Line data Source code
1 : // Copyright 2013 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #include "src/compiler/backend/code-generator.h"
6 :
7 : #include "src/address-map.h"
8 : #include "src/assembler-inl.h"
9 : #include "src/base/adapters.h"
10 : #include "src/compiler/backend/code-generator-impl.h"
11 : #include "src/compiler/linkage.h"
12 : #include "src/compiler/pipeline.h"
13 : #include "src/compiler/wasm-compiler.h"
14 : #include "src/counters.h"
15 : #include "src/eh-frame.h"
16 : #include "src/frames.h"
17 : #include "src/macro-assembler-inl.h"
18 : #include "src/objects/smi.h"
19 : #include "src/optimized-compilation-info.h"
20 : #include "src/string-constants.h"
21 :
22 : namespace v8 {
23 : namespace internal {
24 : namespace compiler {
25 :
26 : class CodeGenerator::JumpTable final : public ZoneObject {
27 : public:
28 : JumpTable(JumpTable* next, Label** targets, size_t target_count)
29 304 : : next_(next), targets_(targets), target_count_(target_count) {}
30 :
31 : Label* label() { return &label_; }
32 : JumpTable* next() const { return next_; }
33 : Label** targets() const { return targets_; }
34 : size_t target_count() const { return target_count_; }
35 :
36 : private:
37 : Label label_;
38 : JumpTable* const next_;
39 : Label** const targets_;
40 : size_t const target_count_;
41 : };
42 :
43 2949572 : CodeGenerator::CodeGenerator(
44 : Zone* codegen_zone, Frame* frame, Linkage* linkage,
45 29433090 : InstructionSequence* code, OptimizedCompilationInfo* info, Isolate* isolate,
46 : base::Optional<OsrHelper> osr_helper, int start_source_position,
47 : JumpOptimizationInfo* jump_opt, PoisoningMitigationLevel poisoning_level,
48 17697285 : const AssemblerOptions& options, int32_t builtin_index)
49 : : zone_(codegen_zone),
50 : isolate_(isolate),
51 : frame_access_state_(nullptr),
52 : linkage_(linkage),
53 : code_(code),
54 : unwinding_info_writer_(zone()),
55 : info_(info),
56 2949341 : labels_(zone()->NewArray<Label>(code->InstructionBlockCount())),
57 : current_block_(RpoNumber::Invalid()),
58 : start_source_position_(start_source_position),
59 : current_source_position_(SourcePosition::Unknown()),
60 : tasm_(isolate, options, CodeObjectRequired::kNo),
61 : resolver_(this),
62 : safepoints_(zone()),
63 : handlers_(zone()),
64 : deoptimization_exits_(zone()),
65 : deoptimization_states_(zone()),
66 : deoptimization_literals_(zone()),
67 : translations_(zone()),
68 : caller_registers_saved_(false),
69 : jump_tables_(nullptr),
70 : ools_(nullptr),
71 : osr_helper_(std::move(osr_helper)),
72 : osr_pc_offset_(-1),
73 : optimized_out_literal_id_(-1),
74 : source_position_table_builder_(
75 : SourcePositionTableBuilder::RECORD_SOURCE_POSITIONS),
76 : protected_instructions_(zone()),
77 : result_(kSuccess),
78 : poisoning_level_(poisoning_level),
79 : block_starts_(zone()),
80 23596015 : instr_starts_(zone()) {
81 47068638 : for (int i = 0; i < code->InstructionBlockCount(); ++i) {
82 20584737 : new (&labels_[i]) Label;
83 : }
84 2949582 : CreateFrameAccessState(frame);
85 2949430 : CHECK_EQ(info->is_osr(), osr_helper_.has_value());
86 : tasm_.set_jump_optimization_info(jump_opt);
87 : Code::Kind code_kind = info->code_kind();
88 5898860 : if (code_kind == Code::WASM_FUNCTION ||
89 2949430 : code_kind == Code::WASM_TO_JS_FUNCTION ||
90 7778789 : code_kind == Code::WASM_INTERPRETER_ENTRY ||
91 120568 : (Builtins::IsBuiltinId(builtin_index) &&
92 120568 : Builtins::IsWasmRuntimeStub(builtin_index))) {
93 : tasm_.set_abort_hard(true);
94 : }
95 : tasm_.set_builtin_index(builtin_index);
96 2949430 : }
97 :
98 503549 : bool CodeGenerator::wasm_runtime_exception_support() const {
99 : DCHECK_NOT_NULL(info_);
100 1007098 : return info_->wasm_runtime_exception_support();
101 : }
102 :
103 456259 : void CodeGenerator::AddProtectedInstructionLanding(uint32_t instr_offset,
104 : uint32_t landing_offset) {
105 912553 : protected_instructions_.push_back({instr_offset, landing_offset});
106 456294 : }
107 :
108 5899080 : void CodeGenerator::CreateFrameAccessState(Frame* frame) {
109 2949565 : FinishFrame(frame);
110 2949512 : frame_access_state_ = new (zone()) FrameAccessState(frame);
111 2949512 : }
112 :
113 2736616 : CodeGenerator::CodeGenResult CodeGenerator::AssembleDeoptimizerCall(
114 2736625 : int deoptimization_id, SourcePosition pos) {
115 2736616 : if (deoptimization_id > Deoptimizer::kMaxNumberOfEntries) {
116 : return kTooManyDeoptimizationBailouts;
117 : }
118 :
119 2736596 : DeoptimizeKind deopt_kind = GetDeoptimizationKind(deoptimization_id);
120 : DeoptimizeReason deoptimization_reason =
121 2736610 : GetDeoptimizationReason(deoptimization_id);
122 : Address deopt_entry =
123 2736626 : Deoptimizer::GetDeoptimizationEntry(tasm()->isolate(), deopt_kind);
124 2736625 : if (info()->is_source_positions_enabled()) {
125 88312 : tasm()->RecordDeoptReason(deoptimization_reason, pos, deoptimization_id);
126 : }
127 2736625 : tasm()->CallForDeoptimization(deopt_entry, deoptimization_id);
128 2736602 : return kSuccess;
129 : }
130 :
131 55927469 : void CodeGenerator::AssembleCode() {
132 2949474 : OptimizedCompilationInfo* info = this->info();
133 :
134 : // Open a frame scope to indicate that there is a frame on the stack. The
135 : // MANUAL indicates that the scope shouldn't actually generate code to set up
136 : // the frame (that is done in AssemblePrologue).
137 2949467 : FrameScope frame_scope(tasm(), StackFrame::MANUAL);
138 :
139 2949448 : if (info->is_source_positions_enabled()) {
140 11212 : AssembleSourcePosition(start_source_position());
141 : }
142 :
143 : // Check that {kJavaScriptCallCodeStartRegister} has been set correctly.
144 5898948 : if (FLAG_debug_code & (info->code_kind() == Code::OPTIMIZED_FUNCTION ||
145 : info->code_kind() == Code::BYTECODE_HANDLER)) {
146 108 : tasm()->RecordComment("-- Prologue: check code start register --");
147 108 : AssembleCodeStartRegisterCheck();
148 : }
149 :
150 : // We want to bailout only from JS functions, which are the only ones
151 : // that are optimized.
152 2949474 : if (info->IsOptimizing()) {
153 : DCHECK(linkage()->GetIncomingDescriptor()->IsJSFunctionCall());
154 456087 : tasm()->RecordComment("-- Prologue: check for deoptimization --");
155 456099 : BailoutIfDeoptimized();
156 : }
157 :
158 2949490 : InitializeSpeculationPoison();
159 :
160 : // Define deoptimization literals for all inlined functions.
161 : DCHECK_EQ(0u, deoptimization_literals_.size());
162 5965765 : for (OptimizedCompilationInfo::InlinedFunctionHolder& inlined :
163 : info->inlined_functions()) {
164 66979 : if (!inlined.shared_info.equals(info->shared_info())) {
165 : int index = DefineDeoptimizationLiteral(
166 66539 : DeoptimizationLiteral(inlined.shared_info));
167 : inlined.RegisterInlinedFunctionId(index);
168 : }
169 : }
170 2949393 : inlined_function_count_ = deoptimization_literals_.size();
171 :
172 : // Define deoptimization literals for all BytecodeArrays to which we might
173 : // deopt to ensure they are strongly held by the optimized code.
174 2949393 : if (info->has_bytecode_array()) {
175 456088 : DefineDeoptimizationLiteral(DeoptimizationLiteral(info->bytecode_array()));
176 : }
177 5965743 : for (OptimizedCompilationInfo::InlinedFunctionHolder& inlined :
178 : info->inlined_functions()) {
179 66979 : DefineDeoptimizationLiteral(DeoptimizationLiteral(inlined.bytecode_array));
180 : }
181 :
182 : unwinding_info_writer_.SetNumberOfInstructionBlocks(
183 2949382 : code()->InstructionBlockCount());
184 :
185 2949183 : if (info->trace_turbo_json_enabled()) {
186 27 : block_starts_.assign(code()->instruction_blocks().size(), -1);
187 6 : instr_starts_.assign(code()->instructions().size(), -1);
188 : }
189 :
190 : // Assemble instructions in assembly order.
191 67659911 : for (const InstructionBlock* block : code()->ao_blocks()) {
192 : // Align loop headers on 16-byte boundaries.
193 20584553 : if (block->ShouldAlign() && !tasm()->jump_optimization_info()) {
194 58855 : tasm()->Align(16);
195 : }
196 20584612 : if (info->trace_turbo_json_enabled()) {
197 57 : block_starts_[block->rpo_number().ToInt()] = tasm()->pc_offset();
198 : }
199 : // Bind a label for a block.
200 20584612 : current_block_ = block->rpo_number();
201 41169224 : unwinding_info_writer_.BeginInstructionBlock(tasm()->pc_offset(), block);
202 20584565 : if (FLAG_code_comments) {
203 1895 : std::ostringstream buffer;
204 1895 : buffer << "-- B" << block->rpo_number().ToInt() << " start";
205 1895 : if (block->IsDeferred()) buffer << " (deferred)";
206 1895 : if (!block->needs_frame()) buffer << " (no frame)";
207 1895 : if (block->must_construct_frame()) buffer << " (construct frame)";
208 1895 : if (block->must_deconstruct_frame()) buffer << " (deconstruct frame)";
209 :
210 1895 : if (block->IsLoopHeader()) {
211 19 : buffer << " (loop up to " << block->loop_end().ToInt() << ")";
212 : }
213 1895 : if (block->loop_header().IsValid()) {
214 777 : buffer << " (in loop " << block->loop_header().ToInt() << ")";
215 : }
216 1895 : buffer << " --";
217 3790 : tasm()->RecordComment(buffer.str().c_str());
218 : }
219 :
220 41169130 : frame_access_state()->MarkHasFrame(block->needs_frame());
221 :
222 20584630 : tasm()->bind(GetLabel(current_block_));
223 :
224 20584690 : TryInsertBranchPoisoning(block);
225 :
226 20584596 : if (block->must_construct_frame()) {
227 2961051 : AssembleConstructFrame();
228 : // We need to setup the root register after we assemble the prologue, to
229 : // avoid clobbering callee saved registers in case of C linkage and
230 : // using the roots.
231 : // TODO(mtrofin): investigate how we can avoid doing this repeatedly.
232 2960992 : if (linkage()->GetIncomingDescriptor()->InitializeRootRegister()) {
233 1110310 : tasm()->InitializeRootRegister();
234 : }
235 : }
236 :
237 : if (FLAG_enable_embedded_constant_pool && !block->needs_frame()) {
238 : ConstantPoolUnavailableScope constant_pool_unavailable(tasm());
239 : result_ = AssembleBlock(block);
240 : } else {
241 20584537 : result_ = AssembleBlock(block);
242 : }
243 20584802 : if (result_ != kSuccess) return;
244 20584848 : unwinding_info_writer_.EndInstructionBlock(block);
245 : }
246 :
247 : // Assemble all out-of-line code.
248 2949434 : if (ools_) {
249 173563 : tasm()->RecordComment("-- Out of line code --");
250 1047522 : for (OutOfLineCode* ool = ools_; ool; ool = ool->next()) {
251 873907 : tasm()->bind(ool->entry());
252 873924 : ool->Generate();
253 873933 : if (ool->exit()->is_bound()) tasm()->jmp(ool->exit());
254 : }
255 : }
256 :
257 : // This nop operation is needed to ensure that the trampoline is not
258 : // confused with the pc of the call before deoptimization.
259 : // The test regress/regress-259 is an example of where we need it.
260 2949486 : tasm()->nop();
261 :
262 : // Assemble deoptimization exits.
263 : int last_updated = 0;
264 11016268 : for (DeoptimizationExit* exit : deoptimization_exits_) {
265 2688998 : tasm()->bind(exit->label());
266 2689019 : int trampoline_pc = tasm()->pc_offset();
267 : int deoptimization_id = exit->deoptimization_id();
268 5037727 : DeoptimizationState* ds = deoptimization_states_[deoptimization_id];
269 :
270 2689008 : if (ds->kind() == DeoptimizeKind::kLazy) {
271 : last_updated = safepoints()->UpdateDeoptimizationInfo(
272 2348719 : ds->pc_offset(), trampoline_pc, last_updated);
273 : }
274 2689038 : result_ = AssembleDeoptimizerCall(deoptimization_id, exit->pos());
275 2689022 : if (result_ != kSuccess) return;
276 : }
277 :
278 2949253 : FinishCode();
279 :
280 : // Emit the jump tables.
281 2949246 : if (jump_tables_) {
282 302 : tasm()->Align(kSystemPointerSize);
283 910 : for (JumpTable* table = jump_tables_; table; table = table->next()) {
284 304 : tasm()->bind(table->label());
285 304 : AssembleJumpTable(table->targets(), table->target_count());
286 : }
287 : }
288 :
289 : // The PerfJitLogger logs code up until here, excluding the safepoint
290 : // table. Resolve the unwinding info now so it is aware of the same code size
291 : // as reported by perf.
292 2949246 : unwinding_info_writer_.Finish(tasm()->pc_offset());
293 :
294 2949246 : safepoints()->Emit(tasm(), frame()->GetTotalFrameSlotCount());
295 :
296 : // Emit the exception handler table.
297 3158474 : if (!handlers_.empty()) {
298 : handler_table_offset_ = HandlerTable::EmitReturnTableStart(
299 19715 : tasm(), static_cast<int>(handlers_.size()));
300 418210 : for (size_t i = 0; i < handlers_.size(); ++i) {
301 : HandlerTable::EmitReturnEntry(tasm(), handlers_[i].pc_offset,
302 189390 : handlers_[i].handler->pos());
303 : }
304 : }
305 :
306 2949369 : result_ = kSuccess;
307 : }
308 :
309 34600015 : void CodeGenerator::TryInsertBranchPoisoning(const InstructionBlock* block) {
310 : // See if our predecessor was a basic block terminated by a branch_and_poison
311 : // instruction. If yes, then perform the masking based on the flags.
312 27154260 : if (block->PredecessorCount() != 1) return;
313 14015242 : RpoNumber pred_rpo = (block->predecessors())[0];
314 14015242 : const InstructionBlock* pred = code()->InstructionBlockAt(pred_rpo);
315 14015173 : if (pred->code_start() == pred->code_end()) return;
316 28030440 : Instruction* instr = code()->InstructionAt(pred->code_end() - 1);
317 14015223 : FlagsMode mode = FlagsModeField::decode(instr->opcode());
318 14015223 : switch (mode) {
319 : case kFlags_branch_and_poison: {
320 : BranchInfo branch;
321 0 : RpoNumber target = ComputeBranchInfo(&branch, instr);
322 0 : if (!target.IsValid()) {
323 : // Non-trivial branch, add the masking code.
324 0 : FlagsCondition condition = branch.condition;
325 0 : if (branch.false_label == GetLabel(block->rpo_number())) {
326 : condition = NegateFlagsCondition(condition);
327 : }
328 0 : AssembleBranchPoisoning(condition, instr);
329 : }
330 : break;
331 : }
332 : case kFlags_deoptimize_and_poison: {
333 0 : UNREACHABLE();
334 : break;
335 : }
336 : default:
337 : break;
338 : }
339 : }
340 :
341 129881 : void CodeGenerator::AssembleArchBinarySearchSwitchRange(
342 : Register input, RpoNumber def_block, std::pair<int32_t, Label*>* begin,
343 : std::pair<int32_t, Label*>* end) {
344 129881 : if (end - begin < kBinarySearchSwitchMinimalCases) {
345 302595 : while (begin != end) {
346 217901 : tasm()->JumpIfEqual(input, begin->first, begin->second);
347 217901 : ++begin;
348 : }
349 84694 : AssembleArchJump(def_block);
350 214575 : return;
351 : }
352 45187 : auto middle = begin + (end - begin) / 2;
353 45187 : Label less_label;
354 45187 : tasm()->JumpIfLessThan(input, middle->first, &less_label);
355 45187 : AssembleArchBinarySearchSwitchRange(input, def_block, middle, end);
356 45187 : tasm()->bind(&less_label);
357 45187 : AssembleArchBinarySearchSwitchRange(input, def_block, begin, middle);
358 : }
359 :
360 1068801 : OwnedVector<byte> CodeGenerator::GetSourcePositionTable() {
361 1068801 : return source_position_table_builder_.ToSourcePositionTableVector();
362 : }
363 :
364 : OwnedVector<trap_handler::ProtectedInstructionData>
365 1068740 : CodeGenerator::GetProtectedInstructions() {
366 : return OwnedVector<trap_handler::ProtectedInstructionData>::Of(
367 1068740 : protected_instructions_);
368 : }
369 :
370 15046097 : MaybeHandle<Code> CodeGenerator::FinalizeCode() {
371 1880744 : if (result_ != kSuccess) {
372 9 : tasm()->AbortedCodeGeneration();
373 9 : return MaybeHandle<Code>();
374 : }
375 :
376 : // Allocate the source position table.
377 : Handle<ByteArray> source_positions =
378 1880735 : source_position_table_builder_.ToSourcePositionTable(isolate());
379 :
380 : // Allocate deoptimization data.
381 1880756 : Handle<DeoptimizationData> deopt_data = GenerateDeoptimizationData();
382 :
383 : // Allocate and install the code.
384 1880761 : CodeDesc desc;
385 1880761 : tasm()->GetCode(isolate(), &desc);
386 1880773 : if (unwinding_info_writer_.eh_frame_writer()) {
387 23 : unwinding_info_writer_.eh_frame_writer()->GetEhFrame(&desc);
388 : }
389 :
390 : MaybeHandle<Code> maybe_code = isolate()->factory()->TryNewCode(
391 : desc, info()->code_kind(), Handle<Object>(), info()->builtin_index(),
392 : source_positions, deopt_data, kMovable, true,
393 1880773 : frame()->GetTotalFrameSlotCount(), safepoints()->GetCodeOffset(),
394 11284613 : handler_table_offset_);
395 :
396 : Handle<Code> code;
397 1880775 : if (!maybe_code.ToHandle(&code)) {
398 0 : tasm()->AbortedCodeGeneration();
399 0 : return MaybeHandle<Code>();
400 : }
401 :
402 : isolate()->counters()->total_compiled_code_size()->Increment(
403 1880776 : code->raw_instruction_size());
404 :
405 1880848 : LOG_CODE_EVENT(isolate(),
406 : CodeLinePosInfoRecordEvent(code->raw_instruction_start(),
407 : *source_positions));
408 :
409 1880775 : return code;
410 : }
411 :
412 15330829 : bool CodeGenerator::IsNextInAssemblyOrder(RpoNumber block) const {
413 : return code()
414 : ->InstructionBlockAt(current_block_)
415 15330891 : ->ao_number()
416 30661756 : .IsNext(code()->InstructionBlockAt(block)->ao_number());
417 : }
418 :
419 5452950 : void CodeGenerator::RecordSafepoint(ReferenceMap* references,
420 : Safepoint::Kind kind, int arguments,
421 5452817 : Safepoint::DeoptMode deopt_mode) {
422 : Safepoint safepoint =
423 5452950 : safepoints()->DefineSafepoint(tasm(), kind, arguments, deopt_mode);
424 : int stackSlotToSpillSlotDelta =
425 5452817 : frame()->GetTotalFrameSlotCount() - frame()->GetSpillSlotCount();
426 81882793 : for (const InstructionOperand& operand : references->reference_operands()) {
427 70976842 : if (operand.IsStackSlot()) {
428 : int index = LocationOperand::cast(operand).index();
429 : DCHECK_LE(0, index);
430 : // We might index values in the fixed part of the frame (i.e. the
431 : // closure pointer or the context pointer); these are not spill slots
432 : // and therefore don't work with the SafepointTable currently, but
433 : // we also don't need to worry about them, since the GC has special
434 : // knowledge about those fields anyway.
435 13998158 : if (index < stackSlotToSpillSlotDelta) continue;
436 13998004 : safepoint.DefinePointerSlot(index);
437 56978684 : } else if (operand.IsRegister() && (kind & Safepoint::kWithRegisters)) {
438 0 : Register reg = LocationOperand::cast(operand).GetRegister();
439 0 : safepoint.DefinePointerRegister(reg);
440 : }
441 : }
442 5453134 : }
443 :
444 7819111 : bool CodeGenerator::IsMaterializableFromRoot(Handle<HeapObject> object,
445 14600237 : RootIndex* index_return) {
446 : const CallDescriptor* incoming_descriptor =
447 : linkage()->GetIncomingDescriptor();
448 7819111 : if (incoming_descriptor->flags() & CallDescriptor::kCanUseRoots) {
449 8404983 : return isolate()->roots_table().IsRootHandle(object, index_return) &&
450 8404983 : RootsTable::IsImmortalImmovable(*index_return);
451 : }
452 : return false;
453 : }
454 :
455 20584543 : CodeGenerator::CodeGenResult CodeGenerator::AssembleBlock(
456 235739094 : const InstructionBlock* block) {
457 170882534 : for (int i = block->code_start(); i < block->code_end(); ++i) {
458 64856642 : if (info()->trace_turbo_json_enabled()) {
459 189 : instr_starts_[i] = tasm()->pc_offset();
460 : }
461 : Instruction* instr = code()->InstructionAt(i);
462 64856919 : CodeGenResult result = AssembleInstruction(instr, block);
463 64856676 : if (result != kSuccess) return result;
464 : }
465 : return kSuccess;
466 : }
467 :
468 35937 : bool CodeGenerator::IsValidPush(InstructionOperand source,
469 : CodeGenerator::PushTypeFlags push_type) {
470 35937 : if (source.IsImmediate() &&
471 : ((push_type & CodeGenerator::kImmediatePush) != 0)) {
472 : return true;
473 : }
474 59323 : if (source.IsRegister() &&
475 : ((push_type & CodeGenerator::kRegisterPush) != 0)) {
476 : return true;
477 : }
478 24120 : if (source.IsStackSlot() &&
479 : ((push_type & CodeGenerator::kStackSlotPush) != 0)) {
480 : return true;
481 : }
482 982 : return false;
483 : }
484 :
485 118307 : void CodeGenerator::GetPushCompatibleMoves(Instruction* instr,
486 : PushTypeFlags push_type,
487 : ZoneVector<MoveOperands*>* pushes) {
488 171137 : pushes->clear();
489 320761 : for (int i = Instruction::FIRST_GAP_POSITION;
490 : i <= Instruction::LAST_GAP_POSITION; ++i) {
491 : Instruction::GapPosition inner_pos =
492 : static_cast<Instruction::GapPosition>(i);
493 : ParallelMove* parallel_move = instr->GetParallelMove(inner_pos);
494 219534 : if (parallel_move != nullptr) {
495 495184 : for (auto move : *parallel_move) {
496 242800 : InstructionOperand source = move->source();
497 242800 : InstructionOperand destination = move->destination();
498 : int first_push_compatible_index =
499 : V8_TARGET_ARCH_STORES_RETURN_ADDRESS_ON_STACK ? 1 : 0;
500 : // If there are any moves from slots that will be overridden by pushes,
501 : // then the full gap resolver must be used since optimization with
502 : // pushes don't participate in the parallel move and might clobber
503 : // values needed for the gap resolve.
504 272012 : if (source.IsStackSlot() && LocationOperand::cast(source).index() >=
505 : first_push_compatible_index) {
506 : pushes->clear();
507 118307 : return;
508 : }
509 : // TODO(danno): Right now, only consider moves from the FIRST gap for
510 : // pushes. Theoretically, we could extract pushes for both gaps (there
511 : // are cases where this happens), but the logic for that would also have
512 : // to check to make sure that non-memory inputs to the pushes from the
513 : // LAST gap don't get clobbered in the FIRST gap.
514 225720 : if (i == Instruction::FIRST_GAP_POSITION) {
515 279527 : if (destination.IsStackSlot() &&
516 : LocationOperand::cast(destination).index() >=
517 : first_push_compatible_index) {
518 : int index = LocationOperand::cast(destination).index();
519 35937 : if (IsValidPush(source, push_type)) {
520 34955 : if (index >= static_cast<int>(pushes->size())) {
521 34955 : pushes->resize(index + 1);
522 : }
523 69910 : (*pushes)[index] = move;
524 : }
525 : }
526 : }
527 : }
528 : }
529 : }
530 :
531 : // For now, only support a set of continuous pushes at the end of the list.
532 : size_t push_count_upper_bound = pushes->size();
533 : size_t push_begin = push_count_upper_bound;
534 169019 : for (auto move : base::Reversed(*pushes)) {
535 39673 : if (move == nullptr) break;
536 28119 : push_begin--;
537 : }
538 101227 : size_t push_count = pushes->size() - push_begin;
539 : std::copy(pushes->begin() + push_begin,
540 : pushes->begin() + push_begin + push_count, pushes->begin());
541 101227 : pushes->resize(push_count);
542 : }
543 :
544 34754629 : CodeGenerator::MoveType::Type CodeGenerator::MoveType::InferMove(
545 : InstructionOperand* source, InstructionOperand* destination) {
546 34754629 : if (source->IsConstant()) {
547 18617660 : if (destination->IsAnyRegister()) {
548 : return MoveType::kConstantToRegister;
549 : } else {
550 : DCHECK(destination->IsAnyStackSlot());
551 46842 : return MoveType::kConstantToStack;
552 : }
553 : }
554 : DCHECK(LocationOperand::cast(source)->IsCompatible(
555 : LocationOperand::cast(destination)));
556 16136969 : if (source->IsAnyRegister()) {
557 8488031 : if (destination->IsAnyRegister()) {
558 : return MoveType::kRegisterToRegister;
559 : } else {
560 : DCHECK(destination->IsAnyStackSlot());
561 4349866 : return MoveType::kRegisterToStack;
562 : }
563 : } else {
564 : DCHECK(source->IsAnyStackSlot());
565 7648938 : if (destination->IsAnyRegister()) {
566 : return MoveType::kStackToRegister;
567 : } else {
568 : DCHECK(destination->IsAnyStackSlot());
569 47847 : return MoveType::kStackToStack;
570 : }
571 : }
572 : }
573 :
574 88887 : CodeGenerator::MoveType::Type CodeGenerator::MoveType::InferSwap(
575 : InstructionOperand* source, InstructionOperand* destination) {
576 : DCHECK(LocationOperand::cast(source)->IsCompatible(
577 : LocationOperand::cast(destination)));
578 88887 : if (source->IsAnyRegister()) {
579 84165 : if (destination->IsAnyRegister()) {
580 : return MoveType::kRegisterToRegister;
581 : } else {
582 : DCHECK(destination->IsAnyStackSlot());
583 8361 : return MoveType::kRegisterToStack;
584 : }
585 : } else {
586 : DCHECK(source->IsAnyStackSlot());
587 : DCHECK(destination->IsAnyStackSlot());
588 : return MoveType::kStackToStack;
589 : }
590 : }
591 :
592 5192305 : RpoNumber CodeGenerator::ComputeBranchInfo(BranchInfo* branch,
593 25954426 : Instruction* instr) {
594 : // Assemble a branch after this instruction.
595 : InstructionOperandConverter i(this, instr);
596 5192305 : RpoNumber true_rpo = i.InputRpo(instr->InputCount() - 2);
597 5192318 : RpoNumber false_rpo = i.InputRpo(instr->InputCount() - 1);
598 :
599 5192355 : if (true_rpo == false_rpo) {
600 2420 : return true_rpo;
601 : }
602 5189935 : FlagsCondition condition = FlagsConditionField::decode(instr->opcode());
603 5189935 : if (IsNextInAssemblyOrder(true_rpo)) {
604 : // true block is next, can fall through if condition negated.
605 : std::swap(true_rpo, false_rpo);
606 : condition = NegateFlagsCondition(condition);
607 : }
608 5189934 : branch->condition = condition;
609 5189934 : branch->true_label = GetLabel(true_rpo);
610 5189934 : branch->false_label = GetLabel(false_rpo);
611 5189934 : branch->fallthru = IsNextInAssemblyOrder(false_rpo);
612 : return RpoNumber::Invalid();
613 : }
614 :
615 64856818 : CodeGenerator::CodeGenResult CodeGenerator::AssembleInstruction(
616 199431493 : Instruction* instr, const InstructionBlock* block) {
617 : int first_unused_stack_slot;
618 64856818 : FlagsMode mode = FlagsModeField::decode(instr->opcode());
619 64856818 : if (mode != kFlags_trap) {
620 64809683 : AssembleSourcePosition(instr);
621 : }
622 : bool adjust_stack =
623 64857147 : GetSlotAboveSPBeforeTailCall(instr, &first_unused_stack_slot);
624 64857303 : if (adjust_stack) AssembleTailCallBeforeGap(instr, first_unused_stack_slot);
625 64857303 : AssembleGaps(instr);
626 64857321 : if (adjust_stack) AssembleTailCallAfterGap(instr, first_unused_stack_slot);
627 : DCHECK_IMPLIES(
628 : block->must_deconstruct_frame(),
629 : instr != code()->InstructionAt(block->last_instruction_index()) ||
630 : instr->IsRet() || instr->IsJump());
631 69721233 : if (instr->IsJump() && block->must_deconstruct_frame()) {
632 49843 : AssembleDeconstructFrame();
633 : }
634 : // Assemble architecture-specific code for the instruction.
635 64857484 : CodeGenResult result = AssembleArchInstruction(instr);
636 64856569 : if (result != kSuccess) return result;
637 :
638 64856684 : FlagsCondition condition = FlagsConditionField::decode(instr->opcode());
639 64856684 : switch (mode) {
640 : case kFlags_branch:
641 : case kFlags_branch_and_poison: {
642 : BranchInfo branch;
643 5192341 : RpoNumber target = ComputeBranchInfo(&branch, instr);
644 5192329 : if (target.IsValid()) {
645 : // redundant branch.
646 2420 : if (!IsNextInAssemblyOrder(target)) {
647 298 : AssembleArchJump(target);
648 : }
649 2420 : return kSuccess;
650 : }
651 : // Assemble architecture-specific branch.
652 5189909 : AssembleArchBranch(instr, &branch);
653 5189898 : break;
654 : }
655 : case kFlags_deoptimize:
656 : case kFlags_deoptimize_and_poison: {
657 : // Assemble a conditional eager deoptimization after this instruction.
658 : InstructionOperandConverter i(this, instr);
659 340295 : size_t frame_state_offset = MiscField::decode(instr->opcode());
660 : DeoptimizationExit* const exit =
661 340295 : AddDeoptimizationExit(instr, frame_state_offset);
662 340294 : Label continue_label;
663 : BranchInfo branch;
664 340294 : branch.condition = condition;
665 340294 : branch.true_label = exit->label();
666 340294 : branch.false_label = &continue_label;
667 340294 : branch.fallthru = true;
668 : // Assemble architecture-specific branch.
669 340294 : AssembleArchDeoptBranch(instr, &branch);
670 340293 : tasm()->bind(&continue_label);
671 340294 : if (mode == kFlags_deoptimize_and_poison) {
672 0 : AssembleBranchPoisoning(NegateFlagsCondition(branch.condition), instr);
673 : }
674 : break;
675 : }
676 : case kFlags_set: {
677 : // Assemble a boolean materialization after this instruction.
678 607884 : AssembleArchBoolean(instr, condition);
679 607881 : break;
680 : }
681 : case kFlags_trap: {
682 47284 : AssembleArchTrap(instr, condition);
683 47289 : break;
684 : }
685 : case kFlags_none: {
686 : break;
687 : }
688 : }
689 :
690 : // TODO(jarin) We should thread the flag through rather than set it.
691 64854242 : if (instr->IsCall()) {
692 : ResetSpeculationPoison();
693 : }
694 :
695 : return kSuccess;
696 : }
697 :
698 105283397 : void CodeGenerator::AssembleSourcePosition(Instruction* instr) {
699 64933198 : SourcePosition source_position = SourcePosition::Unknown();
700 125912019 : if (instr->IsNop() && instr->AreMovesRedundant()) return;
701 40350199 : if (!code()->GetSourcePosition(instr, &source_position)) return;
702 3954767 : AssembleSourcePosition(source_position);
703 : }
704 :
705 3966872 : void CodeGenerator::AssembleSourcePosition(SourcePosition source_position) {
706 3965958 : if (source_position == current_source_position_) return;
707 3222671 : current_source_position_ = source_position;
708 3222671 : if (!source_position.IsKnown()) return;
709 3222695 : source_position_table_builder_.AddPosition(tasm()->pc_offset(),
710 3222695 : source_position, false);
711 3222727 : if (FLAG_code_comments) {
712 : OptimizedCompilationInfo* info = this->info();
713 914 : if (info->IsNotOptimizedFunctionOrWasmFunction()) return;
714 914 : std::ostringstream buffer;
715 914 : buffer << "-- ";
716 : // Turbolizer only needs the source position, as it can reconstruct
717 : // the inlining stack from other information.
718 1828 : if (info->trace_turbo_json_enabled() || !tasm()->isolate() ||
719 914 : tasm()->isolate()->concurrent_recompilation_enabled()) {
720 126 : buffer << source_position;
721 : } else {
722 : AllowHeapAllocation allocation;
723 : AllowHandleAllocation handles;
724 : AllowHandleDereference deref;
725 1576 : buffer << source_position.InliningStack(info);
726 : }
727 914 : buffer << " --";
728 1828 : tasm()->RecordComment(buffer.str().c_str());
729 : }
730 : }
731 :
732 64975452 : bool CodeGenerator::GetSlotAboveSPBeforeTailCall(Instruction* instr,
733 : int* slot) {
734 64857160 : if (instr->IsTailCall()) {
735 : InstructionOperandConverter g(this, instr);
736 118292 : *slot = g.InputInt32(instr->InputCount() - 1);
737 : return true;
738 : } else {
739 : return false;
740 : }
741 : }
742 :
743 464361 : StubCallMode CodeGenerator::DetermineStubCallMode() const {
744 464361 : Code::Kind code_kind = info()->code_kind();
745 464361 : return (code_kind == Code::WASM_FUNCTION ||
746 : code_kind == Code::WASM_TO_JS_FUNCTION)
747 : ? StubCallMode::kCallWasmRuntimeStub
748 464361 : : StubCallMode::kCallCodeObject;
749 : }
750 :
751 64857066 : void CodeGenerator::AssembleGaps(Instruction* instr) {
752 194571034 : for (int i = Instruction::FIRST_GAP_POSITION;
753 : i <= Instruction::LAST_GAP_POSITION; i++) {
754 : Instruction::GapPosition inner_pos =
755 : static_cast<Instruction::GapPosition>(i);
756 : ParallelMove* move = instr->GetParallelMove(inner_pos);
757 129713690 : if (move != nullptr) resolver()->Resolve(move);
758 : }
759 64857344 : }
760 :
761 : namespace {
762 :
763 455989 : Handle<PodArray<InliningPosition>> CreateInliningPositions(
764 : OptimizedCompilationInfo* info, Isolate* isolate) {
765 536300 : const OptimizedCompilationInfo::InlinedFunctionList& inlined_functions =
766 : info->inlined_functions();
767 455989 : if (inlined_functions.size() == 0) {
768 : return Handle<PodArray<InliningPosition>>::cast(
769 442532 : isolate->factory()->empty_byte_array());
770 : }
771 : Handle<PodArray<InliningPosition>> inl_positions =
772 : PodArray<InliningPosition>::New(
773 13457 : isolate, static_cast<int>(inlined_functions.size()), TENURED);
774 160622 : for (size_t i = 0; i < inlined_functions.size(); ++i) {
775 66854 : inl_positions->set(static_cast<int>(i), inlined_functions[i].position);
776 : }
777 13457 : return inl_positions;
778 : }
779 :
780 : } // namespace
781 :
782 6977180 : Handle<DeoptimizationData> CodeGenerator::GenerateDeoptimizationData() {
783 455990 : OptimizedCompilationInfo* info = this->info();
784 1880749 : int deopt_count = static_cast<int>(deoptimization_states_.size());
785 3305524 : if (deopt_count == 0 && !info->is_osr()) {
786 1424773 : return DeoptimizationData::Empty(isolate());
787 : }
788 : Handle<DeoptimizationData> data =
789 455976 : DeoptimizationData::New(isolate(), deopt_count, TENURED);
790 :
791 : Handle<ByteArray> translation_array =
792 455968 : translations_.CreateByteArray(isolate()->factory());
793 :
794 911974 : data->SetTranslationByteArray(*translation_array);
795 : data->SetInlinedFunctionCount(
796 455989 : Smi::FromInt(static_cast<int>(inlined_function_count_)));
797 : data->SetOptimizationId(Smi::FromInt(info->optimization_id()));
798 :
799 455990 : if (info->has_shared_info()) {
800 911980 : data->SetSharedFunctionInfo(*info->shared_info());
801 : } else {
802 0 : data->SetSharedFunctionInfo(Smi::kZero);
803 : }
804 :
805 : Handle<FixedArray> literals = isolate()->factory()->NewFixedArray(
806 911980 : static_cast<int>(deoptimization_literals_.size()), TENURED);
807 4607448 : for (unsigned i = 0; i < deoptimization_literals_.size(); i++) {
808 1847735 : Handle<Object> object = deoptimization_literals_[i].Reify(isolate());
809 3695477 : literals->set(i, *object);
810 : }
811 911980 : data->SetLiteralArray(*literals);
812 :
813 : Handle<PodArray<InliningPosition>> inl_pos =
814 455990 : CreateInliningPositions(info, isolate());
815 911979 : data->SetInliningPositions(*inl_pos);
816 :
817 455990 : if (info->is_osr()) {
818 : DCHECK_LE(0, osr_pc_offset_);
819 4917 : data->SetOsrBytecodeOffset(Smi::FromInt(info_->osr_offset().ToInt()));
820 4917 : data->SetOsrPcOffset(Smi::FromInt(osr_pc_offset_));
821 : } else {
822 : BailoutId osr_offset = BailoutId::None();
823 : data->SetOsrBytecodeOffset(Smi::FromInt(osr_offset.ToInt()));
824 : data->SetOsrPcOffset(Smi::FromInt(-1));
825 : }
826 :
827 : // Populate deoptimization entries.
828 2702439 : for (int i = 0; i < deopt_count; i++) {
829 8107317 : DeoptimizationState* deoptimization_state = deoptimization_states_[i];
830 5404881 : data->SetBytecodeOffset(i, deoptimization_state->bailout_id());
831 2702440 : CHECK(deoptimization_state);
832 : data->SetTranslationIndex(
833 2702441 : i, Smi::FromInt(deoptimization_state->translation_id()));
834 2702440 : data->SetPc(i, Smi::FromInt(deoptimization_state->pc_offset()));
835 : }
836 :
837 455990 : return data;
838 : }
839 :
840 304 : Label* CodeGenerator::AddJumpTable(Label** targets, size_t target_count) {
841 608 : jump_tables_ = new (zone()) JumpTable(jump_tables_, targets, target_count);
842 304 : return jump_tables_->label();
843 : }
844 :
845 13648663 : void CodeGenerator::RecordCallPosition(Instruction* instr) {
846 5329492 : CallDescriptor::Flags flags(MiscField::decode(instr->opcode()));
847 :
848 5329492 : bool needs_frame_state = (flags & CallDescriptor::kNeedsFrameState);
849 :
850 : RecordSafepoint(
851 : instr->reference_map(), Safepoint::kSimple, 0,
852 10658984 : needs_frame_state ? Safepoint::kLazyDeopt : Safepoint::kNoLazyDeopt);
853 :
854 5329589 : if (flags & CallDescriptor::kHasExceptionHandler) {
855 : InstructionOperandConverter i(this, instr);
856 189392 : RpoNumber handler_rpo = i.InputRpo(instr->InputCount() - 1);
857 568175 : handlers_.push_back({GetLabel(handler_rpo), tasm()->pc_offset()});
858 : }
859 :
860 5329588 : if (needs_frame_state) {
861 : MarkLazyDeoptSite();
862 : // If the frame state is present, it starts at argument 2 - after
863 : // the code address and the poison-alias index.
864 : size_t frame_state_offset = 2;
865 : FrameStateDescriptor* descriptor =
866 2610901 : GetDeoptimizationEntry(instr, frame_state_offset).descriptor();
867 2610899 : int pc_offset = tasm()->pc_offset();
868 : int deopt_state_id = BuildTranslation(instr, pc_offset, frame_state_offset,
869 2610899 : descriptor->state_combine());
870 :
871 : DeoptimizationExit* const exit = new (zone())
872 5221794 : DeoptimizationExit(deopt_state_id, current_source_position_);
873 2610899 : deoptimization_exits_.push_back(exit);
874 2610890 : safepoints()->RecordLazyDeoptimizationIndex(deopt_state_id);
875 : }
876 5329590 : }
877 :
878 7528171 : int CodeGenerator::DefineDeoptimizationLiteral(DeoptimizationLiteral literal) {
879 7528171 : int result = static_cast<int>(deoptimization_literals_.size());
880 138502198 : for (unsigned i = 0; i < deoptimization_literals_.size(); ++i) {
881 67402728 : if (deoptimization_literals_[i] == literal) return i;
882 : }
883 1848392 : deoptimization_literals_.push_back(literal);
884 1848372 : return result;
885 : }
886 :
887 5609645 : DeoptimizationEntry const& CodeGenerator::GetDeoptimizationEntry(
888 5609673 : Instruction* instr, size_t frame_state_offset) {
889 : InstructionOperandConverter i(this, instr);
890 5609645 : int const state_id = i.InputInt32(frame_state_offset);
891 5609673 : return code()->GetDeoptimizationEntry(state_id);
892 : }
893 :
894 2736595 : DeoptimizeKind CodeGenerator::GetDeoptimizationKind(
895 : int deoptimization_id) const {
896 : size_t const index = static_cast<size_t>(deoptimization_id);
897 : DCHECK_LT(index, deoptimization_states_.size());
898 2736611 : return deoptimization_states_[index]->kind();
899 : }
900 :
901 2736623 : DeoptimizeReason CodeGenerator::GetDeoptimizationReason(
902 : int deoptimization_id) const {
903 : size_t const index = static_cast<size_t>(deoptimization_id);
904 : DCHECK_LT(index, deoptimization_states_.size());
905 2736625 : return deoptimization_states_[index]->reason();
906 : }
907 :
908 42352866 : void CodeGenerator::TranslateStateValueDescriptor(
909 42388067 : StateValueDescriptor* desc, StateValueList* nested,
910 455911 : Translation* translation, InstructionOperandIterator* iter) {
911 : // Note:
912 : // If translation is null, we just skip the relevant instruction operands.
913 42352866 : if (desc->IsNested()) {
914 92285 : if (translation != nullptr) {
915 92285 : translation->BeginCapturedObject(static_cast<int>(nested->size()));
916 : }
917 739590 : for (auto field : *nested) {
918 : TranslateStateValueDescriptor(field.desc, field.nested, translation,
919 555020 : iter);
920 : }
921 42260581 : } else if (desc->IsArgumentsElements()) {
922 6054 : if (translation != nullptr) {
923 6054 : translation->ArgumentsElements(desc->arguments_type());
924 : }
925 42254527 : } else if (desc->IsArgumentsLength()) {
926 6336 : if (translation != nullptr) {
927 6336 : translation->ArgumentsLength(desc->arguments_type());
928 : }
929 42248191 : } else if (desc->IsDuplicate()) {
930 22811 : if (translation != nullptr) {
931 22811 : translation->DuplicateObject(static_cast<int>(desc->id()));
932 : }
933 42225380 : } else if (desc->IsPlain()) {
934 20409587 : InstructionOperand* op = iter->Advance();
935 20409587 : if (translation != nullptr) {
936 : AddTranslationForOperand(translation, iter->instruction(), op,
937 20409666 : desc->type());
938 : }
939 : } else {
940 : DCHECK(desc->IsOptimizedOut());
941 21815793 : if (translation != nullptr) {
942 21815811 : if (optimized_out_literal_id_ == -1) {
943 : optimized_out_literal_id_ = DefineDeoptimizationLiteral(
944 455911 : DeoptimizationLiteral(isolate()->factory()->optimized_out()));
945 : }
946 21815806 : translation->StoreLiteral(optimized_out_literal_id_);
947 : }
948 : }
949 42352558 : }
950 :
951 3358297 : void CodeGenerator::TranslateFrameStateDescriptorOperands(
952 : FrameStateDescriptor* desc, InstructionOperandIterator* iter,
953 : Translation* translation) {
954 : size_t index = 0;
955 : StateValueList* values = desc->GetStateValueDescriptors();
956 48513744 : for (StateValueList::iterator it = values->begin(); it != values->end();
957 : ++it, ++index) {
958 41797446 : TranslateStateValueDescriptor((*it).desc, (*it).nested, translation, iter);
959 : }
960 : DCHECK_EQ(desc->GetSize(), index);
961 3358001 : }
962 :
963 3357934 : void CodeGenerator::BuildTranslationForFrameStateDescriptor(
964 11886931 : FrameStateDescriptor* descriptor, InstructionOperandIterator* iter,
965 2765 : Translation* translation, OutputFrameStateCombine state_combine) {
966 : // Outer-most state must be added to translation first.
967 3357934 : if (descriptor->outer_state() != nullptr) {
968 : BuildTranslationForFrameStateDescriptor(descriptor->outer_state(), iter,
969 359181 : translation, state_combine);
970 : }
971 :
972 : Handle<SharedFunctionInfo> shared_info;
973 3357959 : if (!descriptor->shared_info().ToHandle(&shared_info)) {
974 2765 : if (!info()->has_shared_info()) {
975 3357983 : return; // Stub with no SharedFunctionInfo.
976 : }
977 : shared_info = info()->shared_info();
978 : }
979 : int shared_info_id =
980 3357959 : DefineDeoptimizationLiteral(DeoptimizationLiteral(shared_info));
981 :
982 3357971 : switch (descriptor->type()) {
983 : case FrameStateType::kInterpretedFunction: {
984 : int return_offset = 0;
985 : int return_count = 0;
986 3239710 : if (!state_combine.IsOutputIgnored()) {
987 1813051 : return_offset = static_cast<int>(state_combine.GetOffsetToPokeAt());
988 3626102 : return_count = static_cast<int>(iter->instruction()->OutputCount());
989 : }
990 : translation->BeginInterpretedFrame(
991 : descriptor->bailout_id(), shared_info_id,
992 : static_cast<unsigned int>(descriptor->locals_count() + 1),
993 3239710 : return_offset, return_count);
994 3239734 : break;
995 : }
996 : case FrameStateType::kArgumentsAdaptor:
997 : translation->BeginArgumentsAdaptorFrame(
998 : shared_info_id,
999 76831 : static_cast<unsigned int>(descriptor->parameters_count()));
1000 76831 : break;
1001 : case FrameStateType::kConstructStub:
1002 : DCHECK(descriptor->bailout_id().IsValidForConstructStub());
1003 : translation->BeginConstructStubFrame(
1004 : descriptor->bailout_id(), shared_info_id,
1005 26689 : static_cast<unsigned int>(descriptor->parameters_count() + 1));
1006 26689 : break;
1007 : case FrameStateType::kBuiltinContinuation: {
1008 2765 : BailoutId bailout_id = descriptor->bailout_id();
1009 : int parameter_count =
1010 : static_cast<unsigned int>(descriptor->parameters_count());
1011 : translation->BeginBuiltinContinuationFrame(bailout_id, shared_info_id,
1012 2765 : parameter_count);
1013 : break;
1014 : }
1015 : case FrameStateType::kJavaScriptBuiltinContinuation: {
1016 11703 : BailoutId bailout_id = descriptor->bailout_id();
1017 : int parameter_count =
1018 : static_cast<unsigned int>(descriptor->parameters_count());
1019 : translation->BeginJavaScriptBuiltinContinuationFrame(
1020 11703 : bailout_id, shared_info_id, parameter_count);
1021 : break;
1022 : }
1023 : case FrameStateType::kJavaScriptBuiltinContinuationWithCatch: {
1024 277 : BailoutId bailout_id = descriptor->bailout_id();
1025 : int parameter_count =
1026 : static_cast<unsigned int>(descriptor->parameters_count());
1027 : translation->BeginJavaScriptBuiltinContinuationWithCatchFrame(
1028 277 : bailout_id, shared_info_id, parameter_count);
1029 : break;
1030 : }
1031 : }
1032 :
1033 3357995 : TranslateFrameStateDescriptorOperands(descriptor, iter, translation);
1034 : }
1035 :
1036 2998764 : int CodeGenerator::BuildTranslation(Instruction* instr, int pc_offset,
1037 : size_t frame_state_offset,
1038 5997579 : OutputFrameStateCombine state_combine) {
1039 5997579 : DeoptimizationEntry const& entry =
1040 2998764 : GetDeoptimizationEntry(instr, frame_state_offset);
1041 : FrameStateDescriptor* const descriptor = entry.descriptor();
1042 2998780 : frame_state_offset++;
1043 :
1044 2998780 : int update_feedback_count = entry.feedback().IsValid() ? 1 : 0;
1045 : Translation translation(&translations_,
1046 2998791 : static_cast<int>(descriptor->GetFrameCount()),
1047 2998780 : static_cast<int>(descriptor->GetJSFrameCount()),
1048 5997584 : update_feedback_count, zone());
1049 2998795 : if (entry.feedback().IsValid()) {
1050 : DeoptimizationLiteral literal =
1051 : DeoptimizationLiteral(entry.feedback().vector());
1052 47806 : int literal_id = DefineDeoptimizationLiteral(literal);
1053 47806 : translation.AddUpdateFeedback(literal_id, entry.feedback().slot().ToInt());
1054 : }
1055 : InstructionOperandIterator iter(instr, frame_state_offset);
1056 : BuildTranslationForFrameStateDescriptor(descriptor, &iter, &translation,
1057 2998795 : state_combine);
1058 :
1059 2998799 : int deoptimization_id = static_cast<int>(deoptimization_states_.size());
1060 :
1061 : deoptimization_states_.push_back(new (zone()) DeoptimizationState(
1062 2998799 : descriptor->bailout_id(), translation.index(), pc_offset, entry.kind(),
1063 5997580 : entry.reason()));
1064 :
1065 2998777 : return deoptimization_id;
1066 : }
1067 :
1068 20409837 : void CodeGenerator::AddTranslationForOperand(Translation* translation,
1069 : Instruction* instr,
1070 : InstructionOperand* op,
1071 6097402 : MachineType type) {
1072 20409837 : if (op->IsStackSlot()) {
1073 13472332 : if (type.representation() == MachineRepresentation::kBit) {
1074 18407 : translation->StoreBoolStackSlot(LocationOperand::cast(op)->index());
1075 40361522 : } else if (type == MachineType::Int8() || type == MachineType::Int16() ||
1076 : type == MachineType::Int32()) {
1077 464191 : translation->StoreInt32StackSlot(LocationOperand::cast(op)->index());
1078 38969176 : } else if (type == MachineType::Uint8() || type == MachineType::Uint16() ||
1079 : type == MachineType::Uint32()) {
1080 9560 : translation->StoreUint32StackSlot(LocationOperand::cast(op)->index());
1081 12980174 : } else if (type == MachineType::Int64()) {
1082 265 : translation->StoreInt64StackSlot(LocationOperand::cast(op)->index());
1083 : } else {
1084 12979909 : CHECK_EQ(MachineRepresentation::kTagged, type.representation());
1085 12979909 : translation->StoreStackSlot(LocationOperand::cast(op)->index());
1086 : }
1087 6937505 : } else if (op->IsFPStackSlot()) {
1088 234019 : if (type.representation() == MachineRepresentation::kFloat64) {
1089 233377 : translation->StoreDoubleStackSlot(LocationOperand::cast(op)->index());
1090 : } else {
1091 642 : CHECK_EQ(MachineRepresentation::kFloat32, type.representation());
1092 642 : translation->StoreFloatStackSlot(LocationOperand::cast(op)->index());
1093 : }
1094 6703486 : } else if (op->IsRegister()) {
1095 : InstructionOperandConverter converter(this, instr);
1096 538649 : if (type.representation() == MachineRepresentation::kBit) {
1097 4676 : translation->StoreBoolRegister(converter.ToRegister(op));
1098 1600499 : } else if (type == MachineType::Int8() || type == MachineType::Int16() ||
1099 : type == MachineType::Int32()) {
1100 39645 : translation->StoreInt32Register(converter.ToRegister(op));
1101 1482983 : } else if (type == MachineType::Uint8() || type == MachineType::Uint16() ||
1102 : type == MachineType::Uint32()) {
1103 1326 : translation->StoreUint32Register(converter.ToRegister(op));
1104 493002 : } else if (type == MachineType::Int64()) {
1105 14 : translation->StoreInt64Register(converter.ToRegister(op));
1106 : } else {
1107 492988 : CHECK_EQ(MachineRepresentation::kTagged, type.representation());
1108 492988 : translation->StoreRegister(converter.ToRegister(op));
1109 : }
1110 6164837 : } else if (op->IsFPRegister()) {
1111 : InstructionOperandConverter converter(this, instr);
1112 68030 : if (type.representation() == MachineRepresentation::kFloat64) {
1113 67796 : translation->StoreDoubleRegister(converter.ToDoubleRegister(op));
1114 : } else {
1115 234 : CHECK_EQ(MachineRepresentation::kFloat32, type.representation());
1116 234 : translation->StoreFloatRegister(converter.ToFloatRegister(op));
1117 : }
1118 : } else {
1119 6096807 : CHECK(op->IsImmediate());
1120 : InstructionOperandConverter converter(this, instr);
1121 6096807 : Constant constant = converter.ToConstant(op);
1122 : DeoptimizationLiteral literal;
1123 6096823 : switch (constant.type()) {
1124 : case Constant::kInt32:
1125 13820 : if (type.representation() == MachineRepresentation::kTagged) {
1126 : // When pointers are 4 bytes, we can use int32 constants to represent
1127 : // Smis.
1128 : DCHECK_EQ(4, kSystemPointerSize);
1129 12 : Smi smi(static_cast<Address>(constant.ToInt32()));
1130 : DCHECK(smi->IsSmi());
1131 6 : literal = DeoptimizationLiteral(smi->value());
1132 13814 : } else if (type.representation() == MachineRepresentation::kBit) {
1133 1168 : if (constant.ToInt32() == 0) {
1134 : literal =
1135 231 : DeoptimizationLiteral(isolate()->factory()->false_value());
1136 : } else {
1137 : DCHECK_EQ(1, constant.ToInt32());
1138 353 : literal = DeoptimizationLiteral(isolate()->factory()->true_value());
1139 : }
1140 : } else {
1141 : DCHECK(type == MachineType::Int32() ||
1142 : type == MachineType::Uint32() ||
1143 : type.representation() == MachineRepresentation::kWord32 ||
1144 : type.representation() == MachineRepresentation::kNone);
1145 : DCHECK(type.representation() != MachineRepresentation::kNone ||
1146 : constant.ToInt32() == FrameStateDescriptor::kImpossibleValue);
1147 13230 : if (type == MachineType::Uint32()) {
1148 : literal = DeoptimizationLiteral(
1149 804 : static_cast<uint32_t>(constant.ToInt32()));
1150 : } else {
1151 25656 : literal = DeoptimizationLiteral(constant.ToInt32());
1152 : }
1153 : }
1154 : break;
1155 : case Constant::kInt64:
1156 : DCHECK_EQ(8, kSystemPointerSize);
1157 504446 : if (type.representation() == MachineRepresentation::kWord64) {
1158 : literal =
1159 277 : DeoptimizationLiteral(static_cast<double>(constant.ToInt64()));
1160 : } else {
1161 : // When pointers are 8 bytes, we can use int64 constants to represent
1162 : // Smis.
1163 : DCHECK_EQ(MachineRepresentation::kTagged, type.representation());
1164 : Smi smi(static_cast<Address>(constant.ToInt64()));
1165 : DCHECK(smi->IsSmi());
1166 504169 : literal = DeoptimizationLiteral(smi->value());
1167 : }
1168 : break;
1169 : case Constant::kFloat32:
1170 : DCHECK(type.representation() == MachineRepresentation::kFloat32 ||
1171 : type.representation() == MachineRepresentation::kTagged);
1172 0 : literal = DeoptimizationLiteral(constant.ToFloat32());
1173 0 : break;
1174 : case Constant::kFloat64:
1175 : DCHECK(type.representation() == MachineRepresentation::kFloat64 ||
1176 : type.representation() == MachineRepresentation::kTagged);
1177 90274 : literal = DeoptimizationLiteral(constant.ToFloat64().value());
1178 90274 : break;
1179 : case Constant::kHeapObject:
1180 : DCHECK_EQ(MachineRepresentation::kTagged, type.representation());
1181 5486204 : literal = DeoptimizationLiteral(constant.ToHeapObject());
1182 5486199 : break;
1183 : case Constant::kDelayedStringConstant:
1184 : DCHECK_EQ(MachineRepresentation::kTagged, type.representation());
1185 2079 : literal = DeoptimizationLiteral(constant.ToDelayedStringConstant());
1186 2079 : break;
1187 : default:
1188 0 : UNREACHABLE();
1189 : }
1190 6096818 : if (literal.object().equals(info()->closure())) {
1191 3015908 : translation->StoreJSFrameFunction();
1192 : } else {
1193 3080910 : int literal_id = DefineDeoptimizationLiteral(literal);
1194 3080889 : translation->StoreLiteral(literal_id);
1195 : }
1196 : }
1197 20409745 : }
1198 :
1199 0 : void CodeGenerator::MarkLazyDeoptSite() {
1200 5221802 : last_lazy_deopt_pc_ = tasm()->pc_offset();
1201 0 : }
1202 :
1203 340293 : DeoptimizationExit* CodeGenerator::AddDeoptimizationExit(
1204 340297 : Instruction* instr, size_t frame_state_offset) {
1205 : int const deoptimization_id = BuildTranslation(
1206 340293 : instr, -1, frame_state_offset, OutputFrameStateCombine::Ignore());
1207 :
1208 : DeoptimizationExit* const exit = new (zone())
1209 680593 : DeoptimizationExit(deoptimization_id, current_source_position_);
1210 340296 : deoptimization_exits_.push_back(exit);
1211 340294 : return exit;
1212 : }
1213 :
1214 2949375 : void CodeGenerator::InitializeSpeculationPoison() {
1215 5898750 : if (poisoning_level_ == PoisoningMitigationLevel::kDontPoison) return;
1216 :
1217 : // Initialize {kSpeculationPoisonRegister} either by comparing the expected
1218 : // with the actual call target, or by unconditionally using {-1} initially.
1219 : // Masking register arguments with it only makes sense in the first case.
1220 0 : if (info()->called_with_code_start_register()) {
1221 0 : tasm()->RecordComment("-- Prologue: generate speculation poison --");
1222 0 : GenerateSpeculationPoisonFromCodeStartRegister();
1223 0 : if (info()->is_poisoning_register_arguments()) {
1224 0 : AssembleRegisterArgumentPoisoning();
1225 : }
1226 : } else {
1227 : ResetSpeculationPoison();
1228 : }
1229 : }
1230 :
1231 4917 : void CodeGenerator::ResetSpeculationPoison() {
1232 5364137 : if (poisoning_level_ != PoisoningMitigationLevel::kDontPoison) {
1233 0 : tasm()->ResetSpeculationPoisonRegister();
1234 : }
1235 4917 : }
1236 :
1237 1747758 : OutOfLineCode::OutOfLineCode(CodeGenerator* gen)
1238 1747758 : : frame_(gen->frame()), tasm_(gen->tasm()), next_(gen->ools_) {
1239 873879 : gen->ools_ = this;
1240 873879 : }
1241 :
1242 : OutOfLineCode::~OutOfLineCode() = default;
1243 :
1244 1847734 : Handle<Object> DeoptimizationLiteral::Reify(Isolate* isolate) const {
1245 1847734 : switch (kind_) {
1246 : case DeoptimizationLiteralKind::kObject: {
1247 1777596 : return object_;
1248 : }
1249 : case DeoptimizationLiteralKind::kNumber: {
1250 69509 : return isolate->factory()->NewNumber(number_);
1251 : }
1252 : case DeoptimizationLiteralKind::kString: {
1253 629 : return string_->AllocateStringConstant(isolate);
1254 : }
1255 : }
1256 0 : UNREACHABLE();
1257 : }
1258 :
1259 : } // namespace compiler
1260 : } // namespace internal
1261 183867 : } // namespace v8
|