Line data Source code
1 : // Copyright 2013 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #include "src/compiler/backend/code-generator.h"
6 :
7 : #include "src/address-map.h"
8 : #include "src/assembler-inl.h"
9 : #include "src/base/adapters.h"
10 : #include "src/compiler/backend/code-generator-impl.h"
11 : #include "src/compiler/linkage.h"
12 : #include "src/compiler/pipeline.h"
13 : #include "src/compiler/wasm-compiler.h"
14 : #include "src/counters.h"
15 : #include "src/eh-frame.h"
16 : #include "src/frames.h"
17 : #include "src/log.h"
18 : #include "src/macro-assembler-inl.h"
19 : #include "src/objects/smi.h"
20 : #include "src/optimized-compilation-info.h"
21 : #include "src/string-constants.h"
22 :
23 : namespace v8 {
24 : namespace internal {
25 : namespace compiler {
26 :
27 : class CodeGenerator::JumpTable final : public ZoneObject {
28 : public:
29 : JumpTable(JumpTable* next, Label** targets, size_t target_count)
30 344 : : next_(next), targets_(targets), target_count_(target_count) {}
31 :
32 688 : Label* label() { return &label_; }
33 : JumpTable* next() const { return next_; }
34 : Label** targets() const { return targets_; }
35 : size_t target_count() const { return target_count_; }
36 :
37 : private:
38 : Label label_;
39 : JumpTable* const next_;
40 : Label** const targets_;
41 : size_t const target_count_;
42 : };
43 :
44 2640782 : CodeGenerator::CodeGenerator(
45 : Zone* codegen_zone, Frame* frame, Linkage* linkage,
46 : InstructionSequence* instructions, OptimizedCompilationInfo* info,
47 : Isolate* isolate, base::Optional<OsrHelper> osr_helper,
48 : int start_source_position, JumpOptimizationInfo* jump_opt,
49 : PoisoningMitigationLevel poisoning_level, const AssemblerOptions& options,
50 : int32_t builtin_index, std::unique_ptr<AssemblerBuffer> buffer)
51 : : zone_(codegen_zone),
52 : isolate_(isolate),
53 : frame_access_state_(nullptr),
54 : linkage_(linkage),
55 : instructions_(instructions),
56 : unwinding_info_writer_(zone()),
57 : info_(info),
58 2639817 : labels_(zone()->NewArray<Label>(instructions->InstructionBlockCount())),
59 : current_block_(RpoNumber::Invalid()),
60 : start_source_position_(start_source_position),
61 : current_source_position_(SourcePosition::Unknown()),
62 : tasm_(isolate, options, CodeObjectRequired::kNo, std::move(buffer)),
63 : resolver_(this),
64 : safepoints_(zone()),
65 : handlers_(zone()),
66 : deoptimization_exits_(zone()),
67 : deoptimization_states_(zone()),
68 : deoptimization_literals_(zone()),
69 : translations_(zone()),
70 : caller_registers_saved_(false),
71 : jump_tables_(nullptr),
72 : ools_(nullptr),
73 : osr_helper_(std::move(osr_helper)),
74 : osr_pc_offset_(-1),
75 : optimized_out_literal_id_(-1),
76 : source_position_table_builder_(
77 : SourcePositionTableBuilder::RECORD_SOURCE_POSITIONS),
78 : protected_instructions_(zone()),
79 : result_(kSuccess),
80 : poisoning_level_(poisoning_level),
81 : block_starts_(zone()),
82 23764570 : instr_starts_(zone()) {
83 43582490 : for (int i = 0; i < instructions->InstructionBlockCount(); ++i) {
84 20471070 : new (&labels_[i]) Label;
85 : }
86 2640350 : CreateFrameAccessState(frame);
87 2640260 : CHECK_EQ(info->is_osr(), osr_helper_.has_value());
88 : tasm_.set_jump_optimization_info(jump_opt);
89 : Code::Kind code_kind = info->code_kind();
90 5280520 : if (code_kind == Code::WASM_FUNCTION ||
91 2640260 : code_kind == Code::WASM_TO_JS_FUNCTION ||
92 6926381 : code_kind == Code::WASM_INTERPRETER_ENTRY ||
93 122976 : (Builtins::IsBuiltinId(builtin_index) &&
94 122976 : Builtins::IsWasmRuntimeStub(builtin_index))) {
95 : tasm_.set_abort_hard(true);
96 : }
97 : tasm_.set_builtin_index(builtin_index);
98 2640260 : }
99 :
100 383994 : bool CodeGenerator::wasm_runtime_exception_support() const {
101 : DCHECK_NOT_NULL(info_);
102 767988 : return info_->wasm_runtime_exception_support();
103 : }
104 :
105 241534 : void CodeGenerator::AddProtectedInstructionLanding(uint32_t instr_offset,
106 : uint32_t landing_offset) {
107 483526 : protected_instructions_.push_back({instr_offset, landing_offset});
108 241992 : }
109 :
110 2640350 : void CodeGenerator::CreateFrameAccessState(Frame* frame) {
111 2640350 : FinishFrame(frame);
112 2640246 : frame_access_state_ = new (zone()) FrameAccessState(frame);
113 2640246 : }
114 :
115 3330625 : CodeGenerator::CodeGenResult CodeGenerator::AssembleDeoptimizerCall(
116 : int deoptimization_id, SourcePosition pos) {
117 3330625 : if (deoptimization_id > Deoptimizer::kMaxNumberOfEntries) {
118 : return kTooManyDeoptimizationBailouts;
119 : }
120 :
121 : DeoptimizeKind deopt_kind = GetDeoptimizationKind(deoptimization_id);
122 : DeoptimizeReason deoptimization_reason =
123 : GetDeoptimizationReason(deoptimization_id);
124 : Address deopt_entry =
125 3330623 : Deoptimizer::GetDeoptimizationEntry(tasm()->isolate(), deopt_kind);
126 3330624 : if (info()->is_source_positions_enabled()) {
127 101841 : tasm()->RecordDeoptReason(deoptimization_reason, pos, deoptimization_id);
128 : }
129 3330624 : tasm()->CallForDeoptimization(deopt_entry, deoptimization_id);
130 3330626 : return kSuccess;
131 : }
132 :
133 36 : void CodeGenerator::MaybeEmitOutOfLineConstantPool() {
134 : tasm()->MaybeEmitOutOfLineConstantPool();
135 36 : }
136 :
137 2641403 : void CodeGenerator::AssembleCode() {
138 : OptimizedCompilationInfo* info = this->info();
139 :
140 : // Open a frame scope to indicate that there is a frame on the stack. The
141 : // MANUAL indicates that the scope shouldn't actually generate code to set up
142 : // the frame (that is done in AssemblePrologue).
143 2640064 : FrameScope frame_scope(tasm(), StackFrame::MANUAL);
144 :
145 2641403 : if (info->is_source_positions_enabled()) {
146 11354 : AssembleSourcePosition(start_source_position());
147 : }
148 :
149 : // Check that {kJavaScriptCallCodeStartRegister} has been set correctly.
150 2640930 : if (FLAG_debug_code && (info->code_kind() == Code::OPTIMIZED_FUNCTION ||
151 : info->code_kind() == Code::BYTECODE_HANDLER)) {
152 114 : tasm()->RecordComment("-- Prologue: check code start register --");
153 114 : AssembleCodeStartRegisterCheck();
154 : }
155 :
156 : // We want to bailout only from JS functions, which are the only ones
157 : // that are optimized.
158 2640930 : if (info->IsOptimizing()) {
159 : DCHECK(linkage()->GetIncomingDescriptor()->IsJSFunctionCall());
160 464072 : tasm()->RecordComment("-- Prologue: check for deoptimization --");
161 464073 : BailoutIfDeoptimized();
162 : }
163 :
164 2640930 : InitializeSpeculationPoison();
165 :
166 : // Define deoptimization literals for all inlined functions.
167 : DCHECK_EQ(0u, deoptimization_literals_.size());
168 2706150 : for (OptimizedCompilationInfo::InlinedFunctionHolder& inlined :
169 : info->inlined_functions()) {
170 65913 : if (!inlined.shared_info.equals(info->shared_info())) {
171 65483 : int index = DefineDeoptimizationLiteral(
172 65483 : DeoptimizationLiteral(inlined.shared_info));
173 : inlined.RegisterInlinedFunctionId(index);
174 : }
175 : }
176 2640237 : inlined_function_count_ = deoptimization_literals_.size();
177 :
178 : // Define deoptimization literals for all BytecodeArrays to which we might
179 : // deopt to ensure they are strongly held by the optimized code.
180 2640237 : if (info->has_bytecode_array()) {
181 464072 : DefineDeoptimizationLiteral(DeoptimizationLiteral(info->bytecode_array()));
182 : }
183 2706148 : for (OptimizedCompilationInfo::InlinedFunctionHolder& inlined :
184 : info->inlined_functions()) {
185 65913 : DefineDeoptimizationLiteral(DeoptimizationLiteral(inlined.bytecode_array));
186 : }
187 :
188 : unwinding_info_writer_.SetNumberOfInstructionBlocks(
189 : instructions()->InstructionBlockCount());
190 :
191 2639670 : if (info->trace_turbo_json_enabled()) {
192 27 : block_starts_.assign(instructions()->instruction_blocks().size(), -1);
193 27 : instr_starts_.assign(instructions()->instructions().size(), -1);
194 : }
195 :
196 : // Assemble instructions in assembly order.
197 23113795 : for (const InstructionBlock* block : instructions()->ao_blocks()) {
198 : // Align loop headers on vendor recommended boundaries.
199 20471474 : if (block->ShouldAlign() && !tasm()->jump_optimization_info()) {
200 56590 : tasm()->CodeTargetAlign();
201 : }
202 20471441 : if (info->trace_turbo_json_enabled()) {
203 42 : block_starts_[block->rpo_number().ToInt()] = tasm()->pc_offset();
204 : }
205 : // Bind a label for a block.
206 20471441 : current_block_ = block->rpo_number();
207 20471441 : unwinding_info_writer_.BeginInstructionBlock(tasm()->pc_offset(), block);
208 20471623 : if (FLAG_code_comments) {
209 4006 : std::ostringstream buffer;
210 2003 : buffer << "-- B" << block->rpo_number().ToInt() << " start";
211 2003 : if (block->IsDeferred()) buffer << " (deferred)";
212 2003 : if (!block->needs_frame()) buffer << " (no frame)";
213 2003 : if (block->must_construct_frame()) buffer << " (construct frame)";
214 2003 : if (block->must_deconstruct_frame()) buffer << " (deconstruct frame)";
215 :
216 2003 : if (block->IsLoopHeader()) {
217 17 : buffer << " (loop up to " << block->loop_end().ToInt() << ")";
218 : }
219 2003 : if (block->loop_header().IsValid()) {
220 793 : buffer << " (in loop " << block->loop_header().ToInt() << ")";
221 : }
222 2003 : buffer << " --";
223 4006 : tasm()->RecordComment(buffer.str().c_str());
224 : }
225 :
226 20471623 : frame_access_state()->MarkHasFrame(block->needs_frame());
227 :
228 20471808 : tasm()->bind(GetLabel(current_block_));
229 :
230 20471697 : TryInsertBranchPoisoning(block);
231 :
232 20472332 : if (block->must_construct_frame()) {
233 2662569 : AssembleConstructFrame();
234 : // We need to setup the root register after we assemble the prologue, to
235 : // avoid clobbering callee saved registers in case of C linkage and
236 : // using the roots.
237 : // TODO(mtrofin): investigate how we can avoid doing this repeatedly.
238 2662300 : if (linkage()->GetIncomingDescriptor()->InitializeRootRegister()) {
239 888556 : tasm()->InitializeRootRegister();
240 : }
241 : }
242 :
243 : if (FLAG_enable_embedded_constant_pool && !block->needs_frame()) {
244 : ConstantPoolUnavailableScope constant_pool_unavailable(tasm());
245 : result_ = AssembleBlock(block);
246 : } else {
247 20472063 : result_ = AssembleBlock(block);
248 : }
249 20474285 : if (result_ != kSuccess) return;
250 20474446 : unwinding_info_writer_.EndInstructionBlock(block);
251 : }
252 :
253 : // Assemble all out-of-line code.
254 2642321 : if (ools_) {
255 248545 : tasm()->RecordComment("-- Out of line code --");
256 1007181 : for (OutOfLineCode* ool = ools_; ool; ool = ool->next()) {
257 758661 : tasm()->bind(ool->entry());
258 758655 : ool->Generate();
259 1133392 : if (ool->exit()->is_bound()) tasm()->jmp(ool->exit());
260 : }
261 : }
262 :
263 : // This nop operation is needed to ensure that the trampoline is not
264 : // confused with the pc of the call before deoptimization.
265 : // The test regress/regress-259 is an example of where we need it.
266 2642296 : tasm()->nop();
267 :
268 : // Assemble deoptimization exits.
269 : int last_updated = 0;
270 5925659 : for (DeoptimizationExit* exit : deoptimization_exits_) {
271 3286006 : tasm()->bind(exit->label());
272 : int trampoline_pc = tasm()->pc_offset();
273 : int deoptimization_id = exit->deoptimization_id();
274 3286010 : DeoptimizationState* ds = deoptimization_states_[deoptimization_id];
275 :
276 3286010 : if (ds->kind() == DeoptimizeKind::kLazy) {
277 : last_updated = safepoints()->UpdateDeoptimizationInfo(
278 2955123 : ds->pc_offset(), trampoline_pc, last_updated);
279 : }
280 3286010 : result_ = AssembleDeoptimizerCall(deoptimization_id, exit->pos());
281 3286012 : if (result_ != kSuccess) return;
282 : }
283 :
284 : // TODO(jgruber): Move all inlined metadata generation into a new,
285 : // architecture-independent version of FinishCode. Currently, this includes
286 : // the safepoint table, handler table, constant pool, and code comments, in
287 : // that order.
288 2639653 : FinishCode();
289 :
290 : // Emit the jump tables.
291 2639659 : if (jump_tables_) {
292 342 : tasm()->Align(kSystemPointerSize);
293 686 : for (JumpTable* table = jump_tables_; table; table = table->next()) {
294 344 : tasm()->bind(table->label());
295 344 : AssembleJumpTable(table->targets(), table->target_count());
296 : }
297 : }
298 :
299 : // The PerfJitLogger logs code up until here, excluding the safepoint
300 : // table. Resolve the unwinding info now so it is aware of the same code
301 : // size as reported by perf.
302 : unwinding_info_writer_.Finish(tasm()->pc_offset());
303 :
304 2639659 : safepoints()->Emit(tasm(), frame()->GetTotalFrameSlotCount());
305 :
306 : // Emit the exception handler table.
307 2640386 : if (!handlers_.empty()) {
308 20355 : handler_table_offset_ = HandlerTable::EmitReturnTableStart(
309 20354 : tasm(), static_cast<int>(handlers_.size()));
310 473472 : for (size_t i = 0; i < handlers_.size(); ++i) {
311 226560 : HandlerTable::EmitReturnEntry(tasm(), handlers_[i].pc_offset,
312 453120 : handlers_[i].handler->pos());
313 : }
314 : }
315 :
316 : tasm()->MaybeEmitOutOfLineConstantPool();
317 2640384 : tasm()->FinalizeJumpOptimizationInfo();
318 :
319 2640064 : result_ = kSuccess;
320 : }
321 :
322 20472358 : void CodeGenerator::TryInsertBranchPoisoning(const InstructionBlock* block) {
323 : // See if our predecessor was a basic block terminated by a branch_and_poison
324 : // instruction. If yes, then perform the masking based on the flags.
325 26968083 : if (block->PredecessorCount() != 1) return;
326 13976318 : RpoNumber pred_rpo = (block->predecessors())[0];
327 13976318 : const InstructionBlock* pred = instructions()->InstructionBlockAt(pred_rpo);
328 13976151 : if (pred->code_start() == pred->code_end()) return;
329 13976466 : Instruction* instr = instructions()->InstructionAt(pred->code_end() - 1);
330 13976713 : FlagsMode mode = FlagsModeField::decode(instr->opcode());
331 13976713 : switch (mode) {
332 : case kFlags_branch_and_poison: {
333 : BranchInfo branch;
334 0 : RpoNumber target = ComputeBranchInfo(&branch, instr);
335 0 : if (!target.IsValid()) {
336 : // Non-trivial branch, add the masking code.
337 0 : FlagsCondition condition = branch.condition;
338 0 : if (branch.false_label == GetLabel(block->rpo_number())) {
339 : condition = NegateFlagsCondition(condition);
340 : }
341 0 : AssembleBranchPoisoning(condition, instr);
342 : }
343 : break;
344 : }
345 : case kFlags_deoptimize_and_poison: {
346 0 : UNREACHABLE();
347 : break;
348 : }
349 : default:
350 : break;
351 : }
352 : }
353 :
354 117860 : void CodeGenerator::AssembleArchBinarySearchSwitchRange(
355 : Register input, RpoNumber def_block, std::pair<int32_t, Label*>* begin,
356 : std::pair<int32_t, Label*>* end) {
357 117860 : if (end - begin < kBinarySearchSwitchMinimalCases) {
358 462397 : while (begin != end) {
359 386274 : tasm()->JumpIfEqual(input, begin->first, begin->second);
360 193137 : ++begin;
361 : }
362 76123 : AssembleArchJump(def_block);
363 76123 : return;
364 : }
365 41737 : auto middle = begin + (end - begin) / 2;
366 41737 : Label less_label;
367 83474 : tasm()->JumpIfLessThan(input, middle->first, &less_label);
368 41738 : AssembleArchBinarySearchSwitchRange(input, def_block, middle, end);
369 41738 : tasm()->bind(&less_label);
370 41738 : AssembleArchBinarySearchSwitchRange(input, def_block, begin, middle);
371 : }
372 :
373 993285 : OwnedVector<byte> CodeGenerator::GetSourcePositionTable() {
374 993285 : return source_position_table_builder_.ToSourcePositionTableVector();
375 : }
376 :
377 : OwnedVector<trap_handler::ProtectedInstructionData>
378 993556 : CodeGenerator::GetProtectedInstructions() {
379 : return OwnedVector<trap_handler::ProtectedInstructionData>::Of(
380 993556 : protected_instructions_);
381 : }
382 :
383 1589158 : MaybeHandle<Code> CodeGenerator::FinalizeCode() {
384 1589158 : if (result_ != kSuccess) {
385 : tasm()->AbortedCodeGeneration();
386 0 : return MaybeHandle<Code>();
387 : }
388 :
389 : // Allocate the source position table.
390 : Handle<ByteArray> source_positions =
391 1589158 : source_position_table_builder_.ToSourcePositionTable(isolate());
392 :
393 : // Allocate deoptimization data.
394 1589162 : Handle<DeoptimizationData> deopt_data = GenerateDeoptimizationData();
395 :
396 : // Allocate and install the code.
397 1589160 : CodeDesc desc;
398 3178320 : tasm()->GetCode(isolate(), &desc, safepoints(), handler_table_offset_);
399 :
400 : #if defined(V8_OS_WIN_X64)
401 : if (Builtins::IsBuiltinId(info_->builtin_index())) {
402 : isolate_->SetBuiltinUnwindData(info_->builtin_index(),
403 : tasm()->GetUnwindInfo());
404 : }
405 : #endif
406 :
407 1589165 : if (unwinding_info_writer_.eh_frame_writer()) {
408 22 : unwinding_info_writer_.eh_frame_writer()->GetEhFrame(&desc);
409 : }
410 :
411 : MaybeHandle<Code> maybe_code = isolate()->factory()->TryNewCode(
412 : desc, info()->code_kind(), Handle<Object>(), info()->builtin_index(),
413 : source_positions, deopt_data, kMovable, true,
414 1589165 : frame()->GetTotalFrameSlotCount());
415 :
416 : Handle<Code> code;
417 1589164 : if (!maybe_code.ToHandle(&code)) {
418 : tasm()->AbortedCodeGeneration();
419 0 : return MaybeHandle<Code>();
420 : }
421 :
422 : isolate()->counters()->total_compiled_code_size()->Increment(
423 1589164 : code->raw_instruction_size());
424 :
425 1589237 : LOG_CODE_EVENT(isolate(),
426 : CodeLinePosInfoRecordEvent(code->raw_instruction_start(),
427 : *source_positions));
428 :
429 1589166 : return code;
430 : }
431 :
432 15789646 : bool CodeGenerator::IsNextInAssemblyOrder(RpoNumber block) const {
433 : return instructions()
434 : ->InstructionBlockAt(current_block_)
435 15789829 : ->ao_number()
436 31579515 : .IsNext(instructions()->InstructionBlockAt(block)->ao_number());
437 : }
438 :
439 6267605 : void CodeGenerator::RecordSafepoint(ReferenceMap* references,
440 : Safepoint::Kind kind,
441 : Safepoint::DeoptMode deopt_mode) {
442 6267605 : Safepoint safepoint = safepoints()->DefineSafepoint(tasm(), kind, deopt_mode);
443 : int stackSlotToSpillSlotDelta =
444 6268566 : frame()->GetTotalFrameSlotCount() - frame()->GetSpillSlotCount();
445 87024427 : for (const InstructionOperand& operand : references->reference_operands()) {
446 80755557 : if (operand.IsStackSlot()) {
447 : int index = LocationOperand::cast(operand).index();
448 : DCHECK_LE(0, index);
449 : // We might index values in the fixed part of the frame (i.e. the
450 : // closure pointer or the context pointer); these are not spill slots
451 : // and therefore don't work with the SafepointTable currently, but
452 : // we also don't need to worry about them, since the GC has special
453 : // knowledge about those fields anyway.
454 15827763 : if (index < stackSlotToSpillSlotDelta) continue;
455 : safepoint.DefinePointerSlot(index);
456 64927794 : } else if (operand.IsRegister() && (kind & Safepoint::kWithRegisters)) {
457 0 : Register reg = LocationOperand::cast(operand).GetRegister();
458 0 : safepoint.DefinePointerRegister(reg);
459 : }
460 : }
461 6268870 : }
462 :
463 7856835 : bool CodeGenerator::IsMaterializableFromRoot(Handle<HeapObject> object,
464 : RootIndex* index_return) {
465 : const CallDescriptor* incoming_descriptor =
466 : linkage()->GetIncomingDescriptor();
467 7856835 : if (incoming_descriptor->flags() & CallDescriptor::kCanUseRoots) {
468 8915361 : return isolate()->roots_table().IsRootHandle(object, index_return) &&
469 1901377 : RootsTable::IsImmortalImmovable(*index_return);
470 : }
471 : return false;
472 : }
473 :
474 20472117 : CodeGenerator::CodeGenResult CodeGenerator::AssembleBlock(
475 : const InstructionBlock* block) {
476 158233115 : for (int i = block->code_start(); i < block->code_end(); ++i) {
477 68880292 : if (info()->trace_turbo_json_enabled()) {
478 120 : instr_starts_[i] = tasm()->pc_offset();
479 : }
480 : Instruction* instr = instructions()->InstructionAt(i);
481 68880219 : CodeGenResult result = AssembleInstruction(instr, block);
482 68879664 : if (result != kSuccess) return result;
483 : }
484 : return kSuccess;
485 : }
486 :
487 39244 : bool CodeGenerator::IsValidPush(InstructionOperand source,
488 : CodeGenerator::PushTypeFlags push_type) {
489 39244 : if (source.IsImmediate() &&
490 : ((push_type & CodeGenerator::kImmediatePush) != 0)) {
491 : return true;
492 : }
493 64580 : if (source.IsRegister() &&
494 : ((push_type & CodeGenerator::kRegisterPush) != 0)) {
495 : return true;
496 : }
497 26976 : if (source.IsStackSlot() &&
498 : ((push_type & CodeGenerator::kStackSlotPush) != 0)) {
499 : return true;
500 : }
501 840 : return false;
502 : }
503 :
504 119863 : void CodeGenerator::GetPushCompatibleMoves(Instruction* instr,
505 : PushTypeFlags push_type,
506 : ZoneVector<MoveOperands*>* pushes) {
507 : pushes->clear();
508 205388 : for (int i = Instruction::FIRST_GAP_POSITION;
509 325251 : i <= Instruction::LAST_GAP_POSITION; ++i) {
510 : Instruction::GapPosition inner_pos =
511 : static_cast<Instruction::GapPosition>(i);
512 : ParallelMove* parallel_move = instr->GetParallelMove(inner_pos);
513 222557 : if (parallel_move != nullptr) {
514 554032 : for (auto move : *parallel_move) {
515 434584 : InstructionOperand source = move->source();
516 434584 : InstructionOperand destination = move->destination();
517 : int first_push_compatible_index =
518 : V8_TARGET_ARCH_STORES_RETURN_ADDRESS_ON_STACK ? 1 : 0;
519 : // If there are any moves from slots that will be overridden by pushes,
520 : // then the full gap resolver must be used since optimization with
521 : // pushes don't participate in the parallel move and might clobber
522 : // values needed for the gap resolve.
523 465341 : if (source.IsStackSlot() && LocationOperand::cast(source).index() >=
524 : first_push_compatible_index) {
525 : pushes->clear();
526 : return;
527 : }
528 : // TODO(danno): Right now, only consider moves from the FIRST gap for
529 : // pushes. Theoretically, we could extract pushes for both gaps (there
530 : // are cases where this happens), but the logic for that would also have
531 : // to check to make sure that non-memory inputs to the pushes from the
532 : // LAST gap don't get clobbered in the FIRST gap.
533 417415 : if (i == Instruction::FIRST_GAP_POSITION) {
534 475907 : if (destination.IsStackSlot() &&
535 : LocationOperand::cast(destination).index() >=
536 : first_push_compatible_index) {
537 : int index = LocationOperand::cast(destination).index();
538 39244 : if (IsValidPush(source, push_type)) {
539 38404 : if (index >= static_cast<int>(pushes->size())) {
540 38404 : pushes->resize(index + 1);
541 : }
542 76808 : (*pushes)[index] = move;
543 : }
544 : }
545 : }
546 : }
547 : }
548 : }
549 :
550 : // For now, only support a set of continuous pushes at the end of the list.
551 : size_t push_count_upper_bound = pushes->size();
552 : size_t push_begin = push_count_upper_bound;
553 134378 : for (auto move : base::Reversed(*pushes)) {
554 44740 : if (move == nullptr) break;
555 31684 : push_begin--;
556 : }
557 102694 : size_t push_count = pushes->size() - push_begin;
558 : std::copy(pushes->begin() + push_begin,
559 : pushes->begin() + push_begin + push_count, pushes->begin());
560 102694 : pushes->resize(push_count);
561 : }
562 :
563 37846100 : CodeGenerator::MoveType::Type CodeGenerator::MoveType::InferMove(
564 : InstructionOperand* source, InstructionOperand* destination) {
565 37846100 : if (source->IsConstant()) {
566 18996478 : if (destination->IsAnyRegister()) {
567 : return MoveType::kConstantToRegister;
568 : } else {
569 : DCHECK(destination->IsAnyStackSlot());
570 45337 : return MoveType::kConstantToStack;
571 : }
572 : }
573 : DCHECK(LocationOperand::cast(source)->IsCompatible(
574 : LocationOperand::cast(destination)));
575 18849622 : if (source->IsAnyRegister()) {
576 9908218 : if (destination->IsAnyRegister()) {
577 : return MoveType::kRegisterToRegister;
578 : } else {
579 : DCHECK(destination->IsAnyStackSlot());
580 5689456 : return MoveType::kRegisterToStack;
581 : }
582 : } else {
583 : DCHECK(source->IsAnyStackSlot());
584 8941404 : if (destination->IsAnyRegister()) {
585 : return MoveType::kStackToRegister;
586 : } else {
587 : DCHECK(destination->IsAnyStackSlot());
588 50185 : return MoveType::kStackToStack;
589 : }
590 : }
591 : }
592 :
593 78622 : CodeGenerator::MoveType::Type CodeGenerator::MoveType::InferSwap(
594 : InstructionOperand* source, InstructionOperand* destination) {
595 : DCHECK(LocationOperand::cast(source)->IsCompatible(
596 : LocationOperand::cast(destination)));
597 78622 : if (source->IsAnyRegister()) {
598 74858 : if (destination->IsAnyRegister()) {
599 : return MoveType::kRegisterToRegister;
600 : } else {
601 : DCHECK(destination->IsAnyStackSlot());
602 6543 : return MoveType::kRegisterToStack;
603 : }
604 : } else {
605 : DCHECK(source->IsAnyStackSlot());
606 : DCHECK(destination->IsAnyStackSlot());
607 : return MoveType::kStackToStack;
608 : }
609 : }
610 :
611 5368704 : RpoNumber CodeGenerator::ComputeBranchInfo(BranchInfo* branch,
612 : Instruction* instr) {
613 : // Assemble a branch after this instruction.
614 : InstructionOperandConverter i(this, instr);
615 5368704 : RpoNumber true_rpo = i.InputRpo(instr->InputCount() - 2);
616 5368734 : RpoNumber false_rpo = i.InputRpo(instr->InputCount() - 1);
617 :
618 5368822 : if (true_rpo == false_rpo) {
619 2292 : return true_rpo;
620 : }
621 5366530 : FlagsCondition condition = FlagsConditionField::decode(instr->opcode());
622 5366530 : if (IsNextInAssemblyOrder(true_rpo)) {
623 : // true block is next, can fall through if condition negated.
624 : std::swap(true_rpo, false_rpo);
625 : condition = NegateFlagsCondition(condition);
626 : }
627 5366519 : branch->condition = condition;
628 5366519 : branch->true_label = GetLabel(true_rpo);
629 5366519 : branch->false_label = GetLabel(false_rpo);
630 5366519 : branch->fallthru = IsNextInAssemblyOrder(false_rpo);
631 : return RpoNumber::Invalid();
632 : }
633 :
634 68880096 : CodeGenerator::CodeGenResult CodeGenerator::AssembleInstruction(
635 : Instruction* instr, const InstructionBlock* block) {
636 : int first_unused_stack_slot;
637 68880096 : FlagsMode mode = FlagsModeField::decode(instr->opcode());
638 68880096 : if (mode != kFlags_trap) {
639 68739218 : AssembleSourcePosition(instr);
640 : }
641 : bool adjust_stack =
642 68885428 : GetSlotAboveSPBeforeTailCall(instr, &first_unused_stack_slot);
643 68886547 : if (adjust_stack) AssembleTailCallBeforeGap(instr, first_unused_stack_slot);
644 : AssembleGaps(instr);
645 68881755 : if (adjust_stack) AssembleTailCallAfterGap(instr, first_unused_stack_slot);
646 : DCHECK_IMPLIES(
647 : block->must_deconstruct_frame(),
648 : instr != instructions()->InstructionAt(block->last_instruction_index()) ||
649 : instr->IsRet() || instr->IsJump());
650 68881920 : if (instr->IsJump() && block->must_deconstruct_frame()) {
651 62062 : AssembleDeconstructFrame();
652 : }
653 : // Assemble architecture-specific code for the instruction.
654 68881920 : CodeGenResult result = AssembleArchInstruction(instr);
655 68879426 : if (result != kSuccess) return result;
656 :
657 68879669 : FlagsCondition condition = FlagsConditionField::decode(instr->opcode());
658 68879669 : switch (mode) {
659 : case kFlags_branch:
660 : case kFlags_branch_and_poison: {
661 : BranchInfo branch;
662 5368754 : RpoNumber target = ComputeBranchInfo(&branch, instr);
663 5368856 : if (target.IsValid()) {
664 : // redundant branch.
665 2292 : if (!IsNextInAssemblyOrder(target)) {
666 365 : AssembleArchJump(target);
667 : }
668 2292 : return kSuccess;
669 : }
670 : // Assemble architecture-specific branch.
671 5366564 : AssembleArchBranch(instr, &branch);
672 5366379 : break;
673 : }
674 : case kFlags_deoptimize:
675 : case kFlags_deoptimize_and_poison: {
676 : // Assemble a conditional eager deoptimization after this instruction.
677 : InstructionOperandConverter i(this, instr);
678 330896 : size_t frame_state_offset = MiscField::decode(instr->opcode());
679 : DeoptimizationExit* const exit =
680 330896 : AddDeoptimizationExit(instr, frame_state_offset);
681 330892 : Label continue_label;
682 : BranchInfo branch;
683 330892 : branch.condition = condition;
684 330892 : branch.true_label = exit->label();
685 330892 : branch.false_label = &continue_label;
686 330892 : branch.fallthru = true;
687 : // Assemble architecture-specific branch.
688 330892 : AssembleArchDeoptBranch(instr, &branch);
689 330894 : tasm()->bind(&continue_label);
690 330893 : if (mode == kFlags_deoptimize_and_poison) {
691 0 : AssembleBranchPoisoning(NegateFlagsCondition(branch.condition), instr);
692 : }
693 : break;
694 : }
695 : case kFlags_set: {
696 : // Assemble a boolean materialization after this instruction.
697 376456 : AssembleArchBoolean(instr, condition);
698 376448 : break;
699 : }
700 : case kFlags_trap: {
701 142108 : AssembleArchTrap(instr, condition);
702 142136 : break;
703 : }
704 : case kFlags_none: {
705 : break;
706 : }
707 : }
708 :
709 : // TODO(jarin) We should thread the flag through rather than set it.
710 68877312 : if (instr->IsCall()) {
711 : ResetSpeculationPoison();
712 : }
713 :
714 : return kSuccess;
715 : }
716 :
717 68972454 : void CodeGenerator::AssembleSourcePosition(Instruction* instr) {
718 68972454 : SourcePosition source_position = SourcePosition::Unknown();
719 133191432 : if (instr->IsNop() && instr->AreMovesRedundant()) return;
720 43233553 : if (!instructions()->GetSourcePosition(instr, &source_position)) return;
721 4756495 : AssembleSourcePosition(source_position);
722 : }
723 :
724 4767678 : void CodeGenerator::AssembleSourcePosition(SourcePosition source_position) {
725 4767678 : if (source_position == current_source_position_) return;
726 3638333 : current_source_position_ = source_position;
727 3638333 : if (!source_position.IsKnown()) return;
728 3638444 : source_position_table_builder_.AddPosition(tasm()->pc_offset(),
729 3638444 : source_position, false);
730 3639126 : if (FLAG_code_comments) {
731 : OptimizedCompilationInfo* info = this->info();
732 1163 : if (info->IsNotOptimizedFunctionOrWasmFunction()) return;
733 2326 : std::ostringstream buffer;
734 1163 : buffer << "-- ";
735 : // Turbolizer only needs the source position, as it can reconstruct
736 : // the inlining stack from other information.
737 1163 : if (info->trace_turbo_json_enabled() || !tasm()->isolate() ||
738 : tasm()->isolate()->concurrent_recompilation_enabled()) {
739 169 : buffer << source_position;
740 : } else {
741 : AllowHeapAllocation allocation;
742 : AllowHandleAllocation handles;
743 : AllowHandleDereference deref;
744 1988 : buffer << source_position.InliningStack(info);
745 : }
746 1163 : buffer << " --";
747 2326 : tasm()->RecordComment(buffer.str().c_str());
748 : }
749 : }
750 :
751 68885086 : bool CodeGenerator::GetSlotAboveSPBeforeTailCall(Instruction* instr,
752 : int* slot) {
753 68885086 : if (instr->IsTailCall()) {
754 : InstructionOperandConverter g(this, instr);
755 239702 : *slot = g.InputInt32(instr->InputCount() - 1);
756 : return true;
757 : } else {
758 : return false;
759 : }
760 : }
761 :
762 566561 : StubCallMode CodeGenerator::DetermineStubCallMode() const {
763 : Code::Kind code_kind = info()->code_kind();
764 566561 : return (code_kind == Code::WASM_FUNCTION ||
765 : code_kind == Code::WASM_TO_JS_FUNCTION)
766 : ? StubCallMode::kCallWasmRuntimeStub
767 566561 : : StubCallMode::kCallCodeObject;
768 : }
769 :
770 12 : void CodeGenerator::AssembleGaps(Instruction* instr) {
771 137763552 : for (int i = Instruction::FIRST_GAP_POSITION;
772 206640737 : i <= Instruction::LAST_GAP_POSITION; i++) {
773 : Instruction::GapPosition inner_pos =
774 : static_cast<Instruction::GapPosition>(i);
775 : ParallelMove* move = instr->GetParallelMove(inner_pos);
776 186628947 : if (move != nullptr) resolver()->Resolve(move);
777 : }
778 12 : }
779 :
780 : namespace {
781 :
782 463912 : Handle<PodArray<InliningPosition>> CreateInliningPositions(
783 : OptimizedCompilationInfo* info, Isolate* isolate) {
784 : const OptimizedCompilationInfo::InlinedFunctionList& inlined_functions =
785 : info->inlined_functions();
786 463912 : if (inlined_functions.size() == 0) {
787 : return Handle<PodArray<InliningPosition>>::cast(
788 : isolate->factory()->empty_byte_array());
789 : }
790 : Handle<PodArray<InliningPosition>> inl_positions =
791 : PodArray<InliningPosition>::New(
792 : isolate, static_cast<int>(inlined_functions.size()),
793 13734 : AllocationType::kOld);
794 144874 : for (size_t i = 0; i < inlined_functions.size(); ++i) {
795 : inl_positions->set(static_cast<int>(i), inlined_functions[i].position);
796 : }
797 13734 : return inl_positions;
798 : }
799 :
800 : } // namespace
801 :
802 1589160 : Handle<DeoptimizationData> CodeGenerator::GenerateDeoptimizationData() {
803 : OptimizedCompilationInfo* info = this->info();
804 1589160 : int deopt_count = static_cast<int>(deoptimization_states_.size());
805 2714411 : if (deopt_count == 0 && !info->is_osr()) {
806 1125249 : return DeoptimizationData::Empty(isolate());
807 : }
808 : Handle<DeoptimizationData> data =
809 463911 : DeoptimizationData::New(isolate(), deopt_count, AllocationType::kOld);
810 :
811 : Handle<ByteArray> translation_array =
812 463912 : translations_.CreateByteArray(isolate()->factory());
813 :
814 927823 : data->SetTranslationByteArray(*translation_array);
815 463912 : data->SetInlinedFunctionCount(
816 463912 : Smi::FromInt(static_cast<int>(inlined_function_count_)));
817 : data->SetOptimizationId(Smi::FromInt(info->optimization_id()));
818 :
819 463912 : if (info->has_shared_info()) {
820 927824 : data->SetSharedFunctionInfo(*info->shared_info());
821 : } else {
822 0 : data->SetSharedFunctionInfo(Smi::kZero);
823 : }
824 :
825 : Handle<FixedArray> literals = isolate()->factory()->NewFixedArray(
826 463912 : static_cast<int>(deoptimization_literals_.size()), AllocationType::kOld);
827 6343087 : for (unsigned i = 0; i < deoptimization_literals_.size(); i++) {
828 1805087 : Handle<Object> object = deoptimization_literals_[i].Reify(isolate());
829 3610174 : literals->set(i, *object);
830 : }
831 927824 : data->SetLiteralArray(*literals);
832 :
833 : Handle<PodArray<InliningPosition>> inl_pos =
834 463912 : CreateInliningPositions(info, isolate());
835 927824 : data->SetInliningPositions(*inl_pos);
836 :
837 463912 : if (info->is_osr()) {
838 : DCHECK_LE(0, osr_pc_offset_);
839 4648 : data->SetOsrBytecodeOffset(Smi::FromInt(info_->osr_offset().ToInt()));
840 4648 : data->SetOsrPcOffset(Smi::FromInt(osr_pc_offset_));
841 : } else {
842 : BailoutId osr_offset = BailoutId::None();
843 : data->SetOsrBytecodeOffset(Smi::FromInt(osr_offset.ToInt()));
844 : data->SetOsrPcOffset(Smi::FromInt(-1));
845 : }
846 :
847 : // Populate deoptimization entries.
848 7119070 : for (int i = 0; i < deopt_count; i++) {
849 3327579 : DeoptimizationState* deoptimization_state = deoptimization_states_[i];
850 : data->SetBytecodeOffset(i, deoptimization_state->bailout_id());
851 3327579 : CHECK(deoptimization_state);
852 : data->SetTranslationIndex(
853 : i, Smi::FromInt(deoptimization_state->translation_id()));
854 : data->SetPc(i, Smi::FromInt(deoptimization_state->pc_offset()));
855 : }
856 :
857 463911 : return data;
858 : }
859 :
860 344 : Label* CodeGenerator::AddJumpTable(Label** targets, size_t target_count) {
861 688 : jump_tables_ = new (zone()) JumpTable(jump_tables_, targets, target_count);
862 344 : return jump_tables_->label();
863 : }
864 :
865 6037547 : void CodeGenerator::RecordCallPosition(Instruction* instr) {
866 6037547 : CallDescriptor::Flags flags(MiscField::decode(instr->opcode()));
867 :
868 : bool needs_frame_state = (flags & CallDescriptor::kNeedsFrameState);
869 :
870 6037547 : RecordSafepoint(
871 : instr->reference_map(), Safepoint::kSimple,
872 6037547 : needs_frame_state ? Safepoint::kLazyDeopt : Safepoint::kNoLazyDeopt);
873 :
874 6037955 : if (flags & CallDescriptor::kHasExceptionHandler) {
875 : InstructionOperandConverter i(this, instr);
876 226559 : RpoNumber handler_rpo = i.InputRpo(instr->InputCount() - 1);
877 679678 : handlers_.push_back({GetLabel(handler_rpo), tasm()->pc_offset()});
878 : }
879 :
880 6037956 : if (needs_frame_state) {
881 : MarkLazyDeoptSite();
882 : // If the frame state is present, it starts at argument 2 - after
883 : // the code address and the poison-alias index.
884 : size_t frame_state_offset = 2;
885 : FrameStateDescriptor* descriptor =
886 2955117 : GetDeoptimizationEntry(instr, frame_state_offset).descriptor();
887 : int pc_offset = tasm()->pc_offset();
888 : int deopt_state_id = BuildTranslation(instr, pc_offset, frame_state_offset,
889 2955114 : descriptor->state_combine());
890 :
891 : DeoptimizationExit* const exit = new (zone())
892 5910242 : DeoptimizationExit(deopt_state_id, current_source_position_);
893 2955121 : deoptimization_exits_.push_back(exit);
894 2955121 : safepoints()->RecordLazyDeoptimizationIndex(deopt_state_id);
895 : }
896 6037962 : }
897 :
898 7742344 : int CodeGenerator::DefineDeoptimizationLiteral(DeoptimizationLiteral literal) {
899 7742344 : int result = static_cast<int>(deoptimization_literals_.size());
900 147770144 : for (unsigned i = 0; i < deoptimization_literals_.size(); ++i) {
901 50031612 : if (deoptimization_literals_[i] == literal) return i;
902 : }
903 1806278 : deoptimization_literals_.push_back(literal);
904 1806274 : return result;
905 : }
906 :
907 6285732 : DeoptimizationEntry const& CodeGenerator::GetDeoptimizationEntry(
908 : Instruction* instr, size_t frame_state_offset) {
909 : InstructionOperandConverter i(this, instr);
910 : int const state_id = i.InputInt32(frame_state_offset);
911 6285735 : return instructions()->GetDeoptimizationEntry(state_id);
912 : }
913 :
914 0 : DeoptimizeKind CodeGenerator::GetDeoptimizationKind(
915 : int deoptimization_id) const {
916 : size_t const index = static_cast<size_t>(deoptimization_id);
917 : DCHECK_LT(index, deoptimization_states_.size());
918 3330625 : return deoptimization_states_[index]->kind();
919 : }
920 :
921 0 : DeoptimizeReason CodeGenerator::GetDeoptimizationReason(
922 : int deoptimization_id) const {
923 : size_t const index = static_cast<size_t>(deoptimization_id);
924 : DCHECK_LT(index, deoptimization_states_.size());
925 3330623 : return deoptimization_states_[index]->reason();
926 : }
927 :
928 49737784 : void CodeGenerator::TranslateStateValueDescriptor(
929 : StateValueDescriptor* desc, StateValueList* nested,
930 : Translation* translation, InstructionOperandIterator* iter) {
931 : // Note:
932 : // If translation is null, we just skip the relevant instruction operands.
933 49737784 : if (desc->IsNested()) {
934 89140 : if (translation != nullptr) {
935 89140 : translation->BeginCapturedObject(static_cast<int>(nested->size()));
936 : }
937 619793 : for (auto field : *nested) {
938 : TranslateStateValueDescriptor(field.desc, field.nested, translation,
939 530653 : iter);
940 : }
941 49648644 : } else if (desc->IsArgumentsElements()) {
942 5980 : if (translation != nullptr) {
943 5980 : translation->ArgumentsElements(desc->arguments_type());
944 : }
945 49642664 : } else if (desc->IsArgumentsLength()) {
946 6288 : if (translation != nullptr) {
947 6288 : translation->ArgumentsLength(desc->arguments_type());
948 : }
949 49636376 : } else if (desc->IsDuplicate()) {
950 18873 : if (translation != nullptr) {
951 18873 : translation->DuplicateObject(static_cast<int>(desc->id()));
952 : }
953 49617503 : } else if (desc->IsPlain()) {
954 : InstructionOperand* op = iter->Advance();
955 21939418 : if (translation != nullptr) {
956 : AddTranslationForOperand(translation, iter->instruction(), op,
957 21939446 : desc->type());
958 : }
959 : } else {
960 : DCHECK(desc->IsOptimizedOut());
961 27678085 : if (translation != nullptr) {
962 27678071 : if (optimized_out_literal_id_ == -1) {
963 927758 : optimized_out_literal_id_ = DefineDeoptimizationLiteral(
964 463878 : DeoptimizationLiteral(isolate()->factory()->optimized_out()));
965 : }
966 27678069 : translation->StoreLiteral(optimized_out_literal_id_);
967 : }
968 : }
969 49737712 : }
970 :
971 3683906 : void CodeGenerator::TranslateFrameStateDescriptorOperands(
972 : FrameStateDescriptor* desc, InstructionOperandIterator* iter,
973 : Translation* translation) {
974 : size_t index = 0;
975 : StateValueList* values = desc->GetStateValueDescriptors();
976 102095972 : for (StateValueList::iterator it = values->begin(); it != values->end();
977 : ++it, ++index) {
978 49205972 : TranslateStateValueDescriptor((*it).desc, (*it).nested, translation, iter);
979 : }
980 : DCHECK_EQ(desc->GetSize(), index);
981 3683967 : }
982 :
983 3683961 : void CodeGenerator::BuildTranslationForFrameStateDescriptor(
984 : FrameStateDescriptor* descriptor, InstructionOperandIterator* iter,
985 : Translation* translation, OutputFrameStateCombine state_combine) {
986 : // Outer-most state must be added to translation first.
987 3683961 : if (descriptor->outer_state() != nullptr) {
988 : BuildTranslationForFrameStateDescriptor(descriptor->outer_state(), iter,
989 353333 : translation, state_combine);
990 : }
991 :
992 : Handle<SharedFunctionInfo> shared_info;
993 3683966 : if (!descriptor->shared_info().ToHandle(&shared_info)) {
994 2085 : if (!info()->has_shared_info()) {
995 : return; // Stub with no SharedFunctionInfo.
996 : }
997 : shared_info = info()->shared_info();
998 : }
999 : int shared_info_id =
1000 3683966 : DefineDeoptimizationLiteral(DeoptimizationLiteral(shared_info));
1001 :
1002 3683976 : switch (descriptor->type()) {
1003 : case FrameStateType::kInterpretedFunction: {
1004 : int return_offset = 0;
1005 : int return_count = 0;
1006 3558173 : if (!state_combine.IsOutputIgnored()) {
1007 2116986 : return_offset = static_cast<int>(state_combine.GetOffsetToPokeAt());
1008 2116986 : return_count = static_cast<int>(iter->instruction()->OutputCount());
1009 : }
1010 3558173 : translation->BeginInterpretedFrame(
1011 : descriptor->bailout_id(), shared_info_id,
1012 : static_cast<unsigned int>(descriptor->locals_count() + 1),
1013 3558173 : return_offset, return_count);
1014 3558167 : break;
1015 : }
1016 : case FrameStateType::kArgumentsAdaptor:
1017 84386 : translation->BeginArgumentsAdaptorFrame(
1018 : shared_info_id,
1019 84386 : static_cast<unsigned int>(descriptor->parameters_count()));
1020 84386 : break;
1021 : case FrameStateType::kConstructStub:
1022 : DCHECK(descriptor->bailout_id().IsValidForConstructStub());
1023 26707 : translation->BeginConstructStubFrame(
1024 : descriptor->bailout_id(), shared_info_id,
1025 26707 : static_cast<unsigned int>(descriptor->parameters_count() + 1));
1026 26707 : break;
1027 : case FrameStateType::kBuiltinContinuation: {
1028 2085 : BailoutId bailout_id = descriptor->bailout_id();
1029 : int parameter_count =
1030 : static_cast<unsigned int>(descriptor->parameters_count());
1031 2085 : translation->BeginBuiltinContinuationFrame(bailout_id, shared_info_id,
1032 2085 : parameter_count);
1033 : break;
1034 : }
1035 : case FrameStateType::kJavaScriptBuiltinContinuation: {
1036 12351 : BailoutId bailout_id = descriptor->bailout_id();
1037 : int parameter_count =
1038 : static_cast<unsigned int>(descriptor->parameters_count());
1039 12351 : translation->BeginJavaScriptBuiltinContinuationFrame(
1040 12351 : bailout_id, shared_info_id, parameter_count);
1041 : break;
1042 : }
1043 : case FrameStateType::kJavaScriptBuiltinContinuationWithCatch: {
1044 274 : BailoutId bailout_id = descriptor->bailout_id();
1045 : int parameter_count =
1046 : static_cast<unsigned int>(descriptor->parameters_count());
1047 274 : translation->BeginJavaScriptBuiltinContinuationWithCatchFrame(
1048 274 : bailout_id, shared_info_id, parameter_count);
1049 : break;
1050 : }
1051 : }
1052 :
1053 3683970 : TranslateFrameStateDescriptorOperands(descriptor, iter, translation);
1054 : }
1055 :
1056 3330622 : int CodeGenerator::BuildTranslation(Instruction* instr, int pc_offset,
1057 : size_t frame_state_offset,
1058 : OutputFrameStateCombine state_combine) {
1059 : DeoptimizationEntry const& entry =
1060 3330622 : GetDeoptimizationEntry(instr, frame_state_offset);
1061 : FrameStateDescriptor* const descriptor = entry.descriptor();
1062 3330625 : frame_state_offset++;
1063 :
1064 3330625 : int update_feedback_count = entry.feedback().IsValid() ? 1 : 0;
1065 : Translation translation(&translations_,
1066 3330624 : static_cast<int>(descriptor->GetFrameCount()),
1067 3330625 : static_cast<int>(descriptor->GetJSFrameCount()),
1068 6661251 : update_feedback_count, zone());
1069 3330631 : if (entry.feedback().IsValid()) {
1070 : DeoptimizationLiteral literal =
1071 : DeoptimizationLiteral(entry.feedback().vector());
1072 40485 : int literal_id = DefineDeoptimizationLiteral(literal);
1073 40485 : translation.AddUpdateFeedback(literal_id, entry.feedback().slot().ToInt());
1074 : }
1075 : InstructionOperandIterator iter(instr, frame_state_offset);
1076 : BuildTranslationForFrameStateDescriptor(descriptor, &iter, &translation,
1077 3330631 : state_combine);
1078 :
1079 3330634 : int deoptimization_id = static_cast<int>(deoptimization_states_.size());
1080 :
1081 6661258 : deoptimization_states_.push_back(new (zone()) DeoptimizationState(
1082 : descriptor->bailout_id(), translation.index(), pc_offset, entry.kind(),
1083 : entry.reason()));
1084 :
1085 3330628 : return deoptimization_id;
1086 : }
1087 :
1088 21939893 : void CodeGenerator::AddTranslationForOperand(Translation* translation,
1089 : Instruction* instr,
1090 : InstructionOperand* op,
1091 : MachineType type) {
1092 21939893 : if (op->IsStackSlot()) {
1093 14792805 : if (type.representation() == MachineRepresentation::kBit) {
1094 21496 : translation->StoreBoolStackSlot(LocationOperand::cast(op)->index());
1095 44312822 : } else if (type == MachineType::Int8() || type == MachineType::Int16() ||
1096 : type == MachineType::Int32()) {
1097 210833 : translation->StoreInt32StackSlot(LocationOperand::cast(op)->index());
1098 43681430 : } else if (type == MachineType::Uint8() || type == MachineType::Uint16() ||
1099 : type == MachineType::Uint32()) {
1100 7629 : translation->StoreUint32StackSlot(LocationOperand::cast(op)->index());
1101 14552847 : } else if (type == MachineType::Int64()) {
1102 114 : translation->StoreInt64StackSlot(LocationOperand::cast(op)->index());
1103 : } else {
1104 : #if defined(V8_COMPRESS_POINTERS)
1105 : CHECK(MachineRepresentation::kTagged == type.representation() ||
1106 : MachineRepresentation::kCompressed == type.representation());
1107 : #else
1108 14552733 : CHECK(MachineRepresentation::kTagged == type.representation());
1109 : #endif
1110 14552733 : translation->StoreStackSlot(LocationOperand::cast(op)->index());
1111 : }
1112 7147088 : } else if (op->IsFPStackSlot()) {
1113 258841 : if (type.representation() == MachineRepresentation::kFloat64) {
1114 257926 : translation->StoreDoubleStackSlot(LocationOperand::cast(op)->index());
1115 : } else {
1116 915 : CHECK_EQ(MachineRepresentation::kFloat32, type.representation());
1117 915 : translation->StoreFloatStackSlot(LocationOperand::cast(op)->index());
1118 : }
1119 6888247 : } else if (op->IsRegister()) {
1120 : InstructionOperandConverter converter(this, instr);
1121 500715 : if (type.representation() == MachineRepresentation::kBit) {
1122 3271 : translation->StoreBoolRegister(converter.ToRegister(op));
1123 1491746 : } else if (type == MachineType::Int8() || type == MachineType::Int16() ||
1124 : type == MachineType::Int32()) {
1125 34737 : translation->StoreInt32Register(converter.ToRegister(op));
1126 1388123 : } else if (type == MachineType::Uint8() || type == MachineType::Uint16() ||
1127 : type == MachineType::Uint32()) {
1128 1207 : translation->StoreUint32Register(converter.ToRegister(op));
1129 461500 : } else if (type == MachineType::Int64()) {
1130 14 : translation->StoreInt64Register(converter.ToRegister(op));
1131 : } else {
1132 : #if defined(V8_COMPRESS_POINTERS)
1133 : CHECK(MachineRepresentation::kTagged == type.representation() ||
1134 : MachineRepresentation::kCompressed == type.representation());
1135 : #else
1136 461486 : CHECK(MachineRepresentation::kTagged == type.representation());
1137 : #endif
1138 461486 : translation->StoreRegister(converter.ToRegister(op));
1139 : }
1140 6387532 : } else if (op->IsFPRegister()) {
1141 : InstructionOperandConverter converter(this, instr);
1142 63423 : if (type.representation() == MachineRepresentation::kFloat64) {
1143 63295 : translation->StoreDoubleRegister(converter.ToDoubleRegister(op));
1144 : } else {
1145 128 : CHECK_EQ(MachineRepresentation::kFloat32, type.representation());
1146 128 : translation->StoreFloatRegister(converter.ToFloatRegister(op));
1147 : }
1148 : } else {
1149 6324109 : CHECK(op->IsImmediate());
1150 : InstructionOperandConverter converter(this, instr);
1151 6324109 : Constant constant = converter.ToConstant(op);
1152 : DeoptimizationLiteral literal;
1153 6324202 : switch (constant.type()) {
1154 : case Constant::kInt32:
1155 12863 : if (type.representation() == MachineRepresentation::kTagged) {
1156 : // When pointers are 4 bytes, we can use int32 constants to represent
1157 : // Smis.
1158 : DCHECK_EQ(4, kSystemPointerSize);
1159 79 : Smi smi(static_cast<Address>(constant.ToInt32()));
1160 : DCHECK(smi->IsSmi());
1161 79 : literal = DeoptimizationLiteral(smi->value());
1162 12784 : } else if (type.representation() == MachineRepresentation::kBit) {
1163 783 : if (constant.ToInt32() == 0) {
1164 : literal =
1165 340 : DeoptimizationLiteral(isolate()->factory()->false_value());
1166 : } else {
1167 : DCHECK_EQ(1, constant.ToInt32());
1168 443 : literal = DeoptimizationLiteral(isolate()->factory()->true_value());
1169 : }
1170 : } else {
1171 : DCHECK(type == MachineType::Int32() ||
1172 : type == MachineType::Uint32() ||
1173 : type.representation() == MachineRepresentation::kWord32 ||
1174 : type.representation() == MachineRepresentation::kNone);
1175 : DCHECK(type.representation() != MachineRepresentation::kNone ||
1176 : constant.ToInt32() == FrameStateDescriptor::kImpossibleValue);
1177 12001 : if (type == MachineType::Uint32()) {
1178 : literal = DeoptimizationLiteral(
1179 8 : static_cast<uint32_t>(constant.ToInt32()));
1180 : } else {
1181 11993 : literal = DeoptimizationLiteral(constant.ToInt32());
1182 : }
1183 : }
1184 : break;
1185 : case Constant::kInt64:
1186 : DCHECK_EQ(8, kSystemPointerSize);
1187 563081 : if (type.representation() == MachineRepresentation::kWord64) {
1188 : literal =
1189 0 : DeoptimizationLiteral(static_cast<double>(constant.ToInt64()));
1190 : } else {
1191 : // When pointers are 8 bytes, we can use int64 constants to represent
1192 : // Smis.
1193 : DCHECK_EQ(MachineRepresentation::kTagged, type.representation());
1194 : Smi smi(static_cast<Address>(constant.ToInt64()));
1195 : DCHECK(smi->IsSmi());
1196 563081 : literal = DeoptimizationLiteral(smi->value());
1197 : }
1198 : break;
1199 : case Constant::kFloat32:
1200 : DCHECK(type.representation() == MachineRepresentation::kFloat32 ||
1201 : type.representation() == MachineRepresentation::kTagged);
1202 14 : literal = DeoptimizationLiteral(constant.ToFloat32());
1203 14 : break;
1204 : case Constant::kFloat64:
1205 : DCHECK(type.representation() == MachineRepresentation::kFloat64 ||
1206 : type.representation() == MachineRepresentation::kTagged);
1207 89327 : literal = DeoptimizationLiteral(constant.ToFloat64().value());
1208 89327 : break;
1209 : case Constant::kHeapObject:
1210 : DCHECK_EQ(MachineRepresentation::kTagged, type.representation());
1211 5656459 : literal = DeoptimizationLiteral(constant.ToHeapObject());
1212 5656452 : break;
1213 : case Constant::kDelayedStringConstant:
1214 : DCHECK_EQ(MachineRepresentation::kTagged, type.representation());
1215 2458 : literal = DeoptimizationLiteral(constant.ToDelayedStringConstant());
1216 2458 : break;
1217 : default:
1218 0 : UNREACHABLE();
1219 : }
1220 6324195 : if (literal.object().equals(info()->closure())) {
1221 3365147 : translation->StoreJSFrameFunction();
1222 : } else {
1223 2959048 : int literal_id = DefineDeoptimizationLiteral(literal);
1224 2958974 : translation->StoreLiteral(literal_id);
1225 : }
1226 : }
1227 21939890 : }
1228 :
1229 0 : void CodeGenerator::MarkLazyDeoptSite() {
1230 2955117 : last_lazy_deopt_pc_ = tasm()->pc_offset();
1231 0 : }
1232 :
1233 330896 : DeoptimizationExit* CodeGenerator::AddDeoptimizationExit(
1234 : Instruction* instr, size_t frame_state_offset) {
1235 : int const deoptimization_id = BuildTranslation(
1236 330896 : instr, -1, frame_state_offset, OutputFrameStateCombine::Ignore());
1237 :
1238 : DeoptimizationExit* const exit = new (zone())
1239 661783 : DeoptimizationExit(deoptimization_id, current_source_position_);
1240 330891 : deoptimization_exits_.push_back(exit);
1241 330892 : return exit;
1242 : }
1243 :
1244 2640097 : void CodeGenerator::InitializeSpeculationPoison() {
1245 2640097 : if (poisoning_level_ == PoisoningMitigationLevel::kDontPoison) return;
1246 :
1247 : // Initialize {kSpeculationPoisonRegister} either by comparing the expected
1248 : // with the actual call target, or by unconditionally using {-1} initially.
1249 : // Masking register arguments with it only makes sense in the first case.
1250 0 : if (info()->called_with_code_start_register()) {
1251 0 : tasm()->RecordComment("-- Prologue: generate speculation poison --");
1252 0 : GenerateSpeculationPoisonFromCodeStartRegister();
1253 0 : if (info()->is_poisoning_register_arguments()) {
1254 0 : AssembleRegisterArgumentPoisoning();
1255 : }
1256 : } else {
1257 : ResetSpeculationPoison();
1258 : }
1259 : }
1260 :
1261 4647 : void CodeGenerator::ResetSpeculationPoison() {
1262 6071650 : if (poisoning_level_ != PoisoningMitigationLevel::kDontPoison) {
1263 0 : tasm()->ResetSpeculationPoisonRegister();
1264 : }
1265 4647 : }
1266 :
1267 758474 : OutOfLineCode::OutOfLineCode(CodeGenerator* gen)
1268 2275422 : : frame_(gen->frame()), tasm_(gen->tasm()), next_(gen->ools_) {
1269 758474 : gen->ools_ = this;
1270 758474 : }
1271 :
1272 : OutOfLineCode::~OutOfLineCode() = default;
1273 :
1274 1805086 : Handle<Object> DeoptimizationLiteral::Reify(Isolate* isolate) const {
1275 1805086 : switch (kind_) {
1276 : case DeoptimizationLiteralKind::kObject: {
1277 1725061 : return object_;
1278 : }
1279 : case DeoptimizationLiteralKind::kNumber: {
1280 79397 : return isolate->factory()->NewNumber(number_);
1281 : }
1282 : case DeoptimizationLiteralKind::kString: {
1283 628 : return string_->AllocateStringConstant(isolate);
1284 : }
1285 : }
1286 0 : UNREACHABLE();
1287 : }
1288 :
1289 : } // namespace compiler
1290 : } // namespace internal
1291 121996 : } // namespace v8
|