Line data Source code
1 : // Copyright 2014 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #include "src/compiler/pipeline.h"
6 :
7 : #include <fstream> // NOLINT(readability/streams)
8 : #include <iostream>
9 : #include <memory>
10 : #include <sstream>
11 :
12 : #include "src/assembler-inl.h"
13 : #include "src/base/adapters.h"
14 : #include "src/base/optional.h"
15 : #include "src/base/platform/elapsed-timer.h"
16 : #include "src/bootstrapper.h"
17 : #include "src/code-tracer.h"
18 : #include "src/compiler.h"
19 : #include "src/compiler/backend/code-generator.h"
20 : #include "src/compiler/backend/frame-elider.h"
21 : #include "src/compiler/backend/instruction-selector.h"
22 : #include "src/compiler/backend/instruction.h"
23 : #include "src/compiler/backend/jump-threading.h"
24 : #include "src/compiler/backend/live-range-separator.h"
25 : #include "src/compiler/backend/move-optimizer.h"
26 : #include "src/compiler/backend/register-allocator-verifier.h"
27 : #include "src/compiler/backend/register-allocator.h"
28 : #include "src/compiler/basic-block-instrumentor.h"
29 : #include "src/compiler/branch-elimination.h"
30 : #include "src/compiler/bytecode-graph-builder.h"
31 : #include "src/compiler/checkpoint-elimination.h"
32 : #include "src/compiler/common-operator-reducer.h"
33 : #include "src/compiler/compilation-dependencies.h"
34 : #include "src/compiler/compiler-source-position-table.h"
35 : #include "src/compiler/constant-folding-reducer.h"
36 : #include "src/compiler/control-flow-optimizer.h"
37 : #include "src/compiler/dead-code-elimination.h"
38 : #include "src/compiler/effect-control-linearizer.h"
39 : #include "src/compiler/escape-analysis-reducer.h"
40 : #include "src/compiler/escape-analysis.h"
41 : #include "src/compiler/graph-trimmer.h"
42 : #include "src/compiler/graph-visualizer.h"
43 : #include "src/compiler/js-call-reducer.h"
44 : #include "src/compiler/js-context-specialization.h"
45 : #include "src/compiler/js-create-lowering.h"
46 : #include "src/compiler/js-generic-lowering.h"
47 : #include "src/compiler/js-heap-broker.h"
48 : #include "src/compiler/js-heap-copy-reducer.h"
49 : #include "src/compiler/js-inlining-heuristic.h"
50 : #include "src/compiler/js-intrinsic-lowering.h"
51 : #include "src/compiler/js-native-context-specialization.h"
52 : #include "src/compiler/js-typed-lowering.h"
53 : #include "src/compiler/load-elimination.h"
54 : #include "src/compiler/loop-analysis.h"
55 : #include "src/compiler/loop-peeling.h"
56 : #include "src/compiler/loop-variable-optimizer.h"
57 : #include "src/compiler/machine-graph-verifier.h"
58 : #include "src/compiler/machine-operator-reducer.h"
59 : #include "src/compiler/memory-optimizer.h"
60 : #include "src/compiler/node-origin-table.h"
61 : #include "src/compiler/osr.h"
62 : #include "src/compiler/pipeline-statistics.h"
63 : #include "src/compiler/redundancy-elimination.h"
64 : #include "src/compiler/schedule.h"
65 : #include "src/compiler/scheduler.h"
66 : #include "src/compiler/select-lowering.h"
67 : #include "src/compiler/serializer-for-background-compilation.h"
68 : #include "src/compiler/simplified-lowering.h"
69 : #include "src/compiler/simplified-operator-reducer.h"
70 : #include "src/compiler/simplified-operator.h"
71 : #include "src/compiler/store-store-elimination.h"
72 : #include "src/compiler/type-narrowing-reducer.h"
73 : #include "src/compiler/typed-optimization.h"
74 : #include "src/compiler/typer.h"
75 : #include "src/compiler/value-numbering-reducer.h"
76 : #include "src/compiler/verifier.h"
77 : #include "src/compiler/wasm-compiler.h"
78 : #include "src/compiler/zone-stats.h"
79 : #include "src/disassembler.h"
80 : #include "src/isolate-inl.h"
81 : #include "src/objects/shared-function-info.h"
82 : #include "src/optimized-compilation-info.h"
83 : #include "src/ostreams.h"
84 : #include "src/parsing/parse-info.h"
85 : #include "src/register-configuration.h"
86 : #include "src/utils.h"
87 : #include "src/wasm/function-body-decoder.h"
88 : #include "src/wasm/function-compiler.h"
89 : #include "src/wasm/wasm-engine.h"
90 :
91 : namespace v8 {
92 : namespace internal {
93 : namespace compiler {
94 :
95 : // Turbofan can only handle 2^16 control inputs. Since each control flow split
96 : // requires at least two bytes (jump and offset), we limit the bytecode size
97 : // to 128K bytes.
98 : const int kMaxBytecodeSizeForTurbofan = 128 * 1024;
99 :
100 : class PipelineData {
101 : public:
102 : // For main entry point.
103 950618 : PipelineData(ZoneStats* zone_stats, Isolate* isolate,
104 : OptimizedCompilationInfo* info,
105 : PipelineStatistics* pipeline_statistics)
106 : : isolate_(isolate),
107 : allocator_(isolate->allocator()),
108 : info_(info),
109 : debug_name_(info_->GetDebugName()),
110 : may_have_unverifiable_graph_(false),
111 : zone_stats_(zone_stats),
112 : pipeline_statistics_(pipeline_statistics),
113 : graph_zone_scope_(zone_stats_, ZONE_NAME),
114 475311 : graph_zone_(graph_zone_scope_.zone()),
115 : instruction_zone_scope_(zone_stats_, ZONE_NAME),
116 475311 : instruction_zone_(instruction_zone_scope_.zone()),
117 : codegen_zone_scope_(zone_stats_, ZONE_NAME),
118 475311 : codegen_zone_(codegen_zone_scope_.zone()),
119 : register_allocation_zone_scope_(zone_stats_, ZONE_NAME),
120 475310 : register_allocation_zone_(register_allocation_zone_scope_.zone()),
121 4277788 : assembler_options_(AssemblerOptions::Default(isolate)) {
122 : PhaseScope scope(pipeline_statistics, "init pipeline data");
123 950622 : graph_ = new (graph_zone_) Graph(graph_zone_);
124 950620 : source_positions_ = new (graph_zone_) SourcePositionTable(graph_);
125 : node_origins_ = info->trace_turbo_json_enabled()
126 2 : ? new (graph_zone_) NodeOriginTable(graph_)
127 950618 : : nullptr;
128 950620 : simplified_ = new (graph_zone_) SimplifiedOperatorBuilder(graph_zone_);
129 : machine_ = new (graph_zone_) MachineOperatorBuilder(
130 : graph_zone_, MachineType::PointerRepresentation(),
131 : InstructionSelector::SupportedMachineOperatorFlags(),
132 950621 : InstructionSelector::AlignmentRequirements());
133 950622 : common_ = new (graph_zone_) CommonOperatorBuilder(graph_zone_);
134 950617 : javascript_ = new (graph_zone_) JSOperatorBuilder(graph_zone_);
135 : jsgraph_ = new (graph_zone_)
136 950622 : JSGraph(isolate_, graph_, common_, javascript_, simplified_, machine_);
137 950618 : broker_ = new (info_->zone()) JSHeapBroker(isolate_, info_->zone());
138 : dependencies_ =
139 950615 : new (info_->zone()) CompilationDependencies(isolate_, info_->zone());
140 475306 : }
141 :
142 : // For WebAssembly compile entry point.
143 505451 : PipelineData(ZoneStats* zone_stats, wasm::WasmEngine* wasm_engine,
144 1516575 : OptimizedCompilationInfo* info, MachineGraph* mcgraph,
145 : PipelineStatistics* pipeline_statistics,
146 : SourcePositionTable* source_positions,
147 : NodeOriginTable* node_origins,
148 : const AssemblerOptions& assembler_options)
149 : : isolate_(nullptr),
150 : wasm_engine_(wasm_engine),
151 505451 : allocator_(wasm_engine->allocator()),
152 : info_(info),
153 : debug_name_(info_->GetDebugName()),
154 : may_have_unverifiable_graph_(false),
155 : zone_stats_(zone_stats),
156 : pipeline_statistics_(pipeline_statistics),
157 : graph_zone_scope_(zone_stats_, ZONE_NAME),
158 505540 : graph_zone_(graph_zone_scope_.zone()),
159 : graph_(mcgraph->graph()),
160 : source_positions_(source_positions),
161 : node_origins_(node_origins),
162 : machine_(mcgraph->machine()),
163 : common_(mcgraph->common()),
164 : mcgraph_(mcgraph),
165 : instruction_zone_scope_(zone_stats_, ZONE_NAME),
166 505525 : instruction_zone_(instruction_zone_scope_.zone()),
167 : codegen_zone_scope_(zone_stats_, ZONE_NAME),
168 505552 : codegen_zone_(codegen_zone_scope_.zone()),
169 : register_allocation_zone_scope_(zone_stats_, ZONE_NAME),
170 505484 : register_allocation_zone_(register_allocation_zone_scope_.zone()),
171 5055120 : assembler_options_(assembler_options) {}
172 :
173 : // For CodeStubAssembler and machine graph testing entry point.
174 1189064 : PipelineData(ZoneStats* zone_stats, OptimizedCompilationInfo* info,
175 1189064 : Isolate* isolate, Graph* graph, Schedule* schedule,
176 : SourcePositionTable* source_positions,
177 : NodeOriginTable* node_origins, JumpOptimizationInfo* jump_opt,
178 : const AssemblerOptions& assembler_options)
179 : : isolate_(isolate),
180 : allocator_(isolate->allocator()),
181 : info_(info),
182 : debug_name_(info_->GetDebugName()),
183 : zone_stats_(zone_stats),
184 : graph_zone_scope_(zone_stats_, ZONE_NAME),
185 1189066 : graph_zone_(graph_zone_scope_.zone()),
186 : graph_(graph),
187 : source_positions_(source_positions),
188 : node_origins_(node_origins),
189 : schedule_(schedule),
190 : instruction_zone_scope_(zone_stats_, ZONE_NAME),
191 1189066 : instruction_zone_(instruction_zone_scope_.zone()),
192 : codegen_zone_scope_(zone_stats_, ZONE_NAME),
193 1189066 : codegen_zone_(codegen_zone_scope_.zone()),
194 : register_allocation_zone_scope_(zone_stats_, ZONE_NAME),
195 1189065 : register_allocation_zone_(register_allocation_zone_scope_.zone()),
196 : jump_optimization_info_(jump_opt),
197 8323457 : assembler_options_(assembler_options) {
198 2378132 : simplified_ = new (graph_zone_) SimplifiedOperatorBuilder(graph_zone_);
199 : machine_ = new (graph_zone_) MachineOperatorBuilder(
200 : graph_zone_, MachineType::PointerRepresentation(),
201 : InstructionSelector::SupportedMachineOperatorFlags(),
202 2378131 : InstructionSelector::AlignmentRequirements());
203 2378130 : common_ = new (graph_zone_) CommonOperatorBuilder(graph_zone_);
204 2378132 : javascript_ = new (graph_zone_) JSOperatorBuilder(graph_zone_);
205 : jsgraph_ = new (graph_zone_)
206 2378130 : JSGraph(isolate_, graph_, common_, javascript_, simplified_, machine_);
207 1189065 : }
208 :
209 : // For register allocation testing entry point.
210 42 : PipelineData(ZoneStats* zone_stats, OptimizedCompilationInfo* info,
211 84 : Isolate* isolate, InstructionSequence* sequence)
212 : : isolate_(isolate),
213 : allocator_(isolate->allocator()),
214 : info_(info),
215 : debug_name_(info_->GetDebugName()),
216 : zone_stats_(zone_stats),
217 : graph_zone_scope_(zone_stats_, ZONE_NAME),
218 : instruction_zone_scope_(zone_stats_, ZONE_NAME),
219 : instruction_zone_(sequence->zone()),
220 : sequence_(sequence),
221 : codegen_zone_scope_(zone_stats_, ZONE_NAME),
222 42 : codegen_zone_(codegen_zone_scope_.zone()),
223 : register_allocation_zone_scope_(zone_stats_, ZONE_NAME),
224 42 : register_allocation_zone_(register_allocation_zone_scope_.zone()),
225 294 : assembler_options_(AssemblerOptions::Default(isolate)) {}
226 :
227 2169771 : ~PipelineData() {
228 : // Must happen before zones are destroyed.
229 2169771 : delete code_generator_;
230 2169948 : code_generator_ = nullptr;
231 2169948 : DeleteTyper();
232 :
233 2169901 : DeleteRegisterAllocationZone();
234 2169914 : DeleteInstructionZone();
235 2169921 : DeleteCodegenZone();
236 2169979 : DeleteGraphZone();
237 2169972 : }
238 :
239 : Isolate* isolate() const { return isolate_; }
240 : AccountingAllocator* allocator() const { return allocator_; }
241 : OptimizedCompilationInfo* info() const { return info_; }
242 : ZoneStats* zone_stats() const { return zone_stats_; }
243 : CompilationDependencies* dependencies() const { return dependencies_; }
244 : PipelineStatistics* pipeline_statistics() { return pipeline_statistics_; }
245 : OsrHelper* osr_helper() { return &(*osr_helper_); }
246 : bool compilation_failed() const { return compilation_failed_; }
247 9 : void set_compilation_failed() { compilation_failed_ = true; }
248 :
249 : bool verify_graph() const { return verify_graph_; }
250 132952 : void set_verify_graph(bool value) { verify_graph_ = value; }
251 :
252 : MaybeHandle<Code> code() { return code_; }
253 : void set_code(MaybeHandle<Code> code) {
254 : DCHECK(code_.is_null());
255 1579173 : code_ = code;
256 : }
257 :
258 : CodeGenerator* code_generator() const { return code_generator_; }
259 :
260 : // RawMachineAssembler generally produces graphs which cannot be verified.
261 : bool MayHaveUnverifiableGraph() const { return may_have_unverifiable_graph_; }
262 :
263 : Zone* graph_zone() const { return graph_zone_; }
264 : Graph* graph() const { return graph_; }
265 : SourcePositionTable* source_positions() const { return source_positions_; }
266 : NodeOriginTable* node_origins() const { return node_origins_; }
267 : MachineOperatorBuilder* machine() const { return machine_; }
268 : CommonOperatorBuilder* common() const { return common_; }
269 : JSOperatorBuilder* javascript() const { return javascript_; }
270 : JSGraph* jsgraph() const { return jsgraph_; }
271 : MachineGraph* mcgraph() const { return mcgraph_; }
272 913461 : Handle<Context> native_context() const {
273 1826923 : return handle(info()->native_context(), isolate());
274 : }
275 : Handle<JSGlobalObject> global_object() const {
276 : return handle(info()->global_object(), isolate());
277 : }
278 :
279 : JSHeapBroker* broker() const { return broker_; }
280 :
281 : Schedule* schedule() const { return schedule_; }
282 : void set_schedule(Schedule* schedule) {
283 : DCHECK(!schedule_);
284 1903494 : schedule_ = schedule;
285 : }
286 : void reset_schedule() { schedule_ = nullptr; }
287 :
288 : Zone* instruction_zone() const { return instruction_zone_; }
289 : Zone* codegen_zone() const { return codegen_zone_; }
290 : InstructionSequence* sequence() const { return sequence_; }
291 : Frame* frame() const { return frame_; }
292 :
293 : Zone* register_allocation_zone() const { return register_allocation_zone_; }
294 : RegisterAllocationData* register_allocation_data() const {
295 : return register_allocation_data_;
296 : }
297 :
298 : BasicBlockProfiler::Data* profiler_data() const { return profiler_data_; }
299 : void set_profiler_data(BasicBlockProfiler::Data* profiler_data) {
300 8 : profiler_data_ = profiler_data;
301 : }
302 :
303 : std::string const& source_position_output() const {
304 : return source_position_output_;
305 : }
306 : void set_source_position_output(std::string const& source_position_output) {
307 2 : source_position_output_ = source_position_output;
308 : }
309 :
310 : JumpOptimizationInfo* jump_optimization_info() const {
311 : return jump_optimization_info_;
312 : }
313 :
314 : const AssemblerOptions& assembler_options() const {
315 : return assembler_options_;
316 : }
317 :
318 30 : CodeTracer* GetCodeTracer() const {
319 60 : return wasm_engine_ == nullptr ? isolate_->GetCodeTracer()
320 60 : : wasm_engine_->GetCodeTracer();
321 : }
322 :
323 913394 : Typer* CreateTyper() {
324 : DCHECK_NULL(typer_);
325 913394 : typer_ = new Typer(broker(), typer_flags_, graph());
326 456697 : return typer_;
327 : }
328 :
329 : void AddTyperFlag(Typer::Flag flag) {
330 : DCHECK_NULL(typer_);
331 : typer_flags_ |= flag;
332 : }
333 :
334 2626581 : void DeleteTyper() {
335 2626581 : delete typer_;
336 2626583 : typer_ = nullptr;
337 2626583 : }
338 :
339 4311485 : void DeleteGraphZone() {
340 8622994 : if (graph_zone_ == nullptr) return;
341 2169906 : graph_zone_scope_.Destroy();
342 2169930 : graph_zone_ = nullptr;
343 2169930 : graph_ = nullptr;
344 2169930 : source_positions_ = nullptr;
345 2169930 : node_origins_ = nullptr;
346 2169930 : simplified_ = nullptr;
347 2169930 : machine_ = nullptr;
348 2169930 : common_ = nullptr;
349 2169930 : javascript_ = nullptr;
350 2169930 : jsgraph_ = nullptr;
351 2169930 : mcgraph_ = nullptr;
352 2169930 : schedule_ = nullptr;
353 : }
354 :
355 4311374 : void DeleteInstructionZone() {
356 8622825 : if (instruction_zone_ == nullptr) return;
357 2169904 : instruction_zone_scope_.Destroy();
358 2169981 : instruction_zone_ = nullptr;
359 2169981 : sequence_ = nullptr;
360 : }
361 :
362 2169923 : void DeleteCodegenZone() {
363 4339908 : if (codegen_zone_ == nullptr) return;
364 2169918 : codegen_zone_scope_.Destroy();
365 2169980 : codegen_zone_ = nullptr;
366 2169980 : dependencies_ = nullptr;
367 2169980 : broker_ = nullptr;
368 2169980 : frame_ = nullptr;
369 : }
370 :
371 4311390 : void DeleteRegisterAllocationZone() {
372 8622845 : if (register_allocation_zone_ == nullptr) return;
373 2169918 : register_allocation_zone_scope_.Destroy();
374 2169983 : register_allocation_zone_ = nullptr;
375 2169983 : register_allocation_data_ = nullptr;
376 : }
377 :
378 4282894 : void InitializeInstructionSequence(const CallDescriptor* call_descriptor) {
379 : DCHECK_NULL(sequence_);
380 : InstructionBlocks* instruction_blocks =
381 : InstructionSequence::InstructionBlocksFor(instruction_zone(),
382 2141354 : schedule());
383 : sequence_ = new (instruction_zone())
384 2141416 : InstructionSequence(isolate(), instruction_zone(), instruction_blocks);
385 4282766 : if (call_descriptor && call_descriptor->RequiresFrameAsIncoming()) {
386 2040086 : sequence_->instruction_blocks()[0]->mark_needs_frame();
387 : } else {
388 : DCHECK_EQ(0u, call_descriptor->CalleeSavedFPRegisters());
389 : DCHECK_EQ(0u, call_descriptor->CalleeSavedRegisters());
390 : }
391 2141468 : }
392 :
393 4282805 : void InitializeFrameData(CallDescriptor* call_descriptor) {
394 : DCHECK_NULL(frame_);
395 : int fixed_frame_size = 0;
396 2141448 : if (call_descriptor != nullptr) {
397 2141429 : fixed_frame_size = call_descriptor->CalculateFixedFrameSize();
398 : }
399 2141612 : frame_ = new (codegen_zone()) Frame(fixed_frame_size);
400 2141527 : }
401 :
402 2141296 : void InitializeRegisterAllocationData(const RegisterConfiguration* config,
403 2141296 : CallDescriptor* call_descriptor) {
404 : DCHECK_NULL(register_allocation_data_);
405 : register_allocation_data_ = new (register_allocation_zone())
406 : RegisterAllocationData(config, register_allocation_zone(), frame(),
407 2141611 : sequence(), debug_name());
408 2141568 : }
409 :
410 4991 : void InitializeOsrHelper() {
411 : DCHECK(!osr_helper_.has_value());
412 4991 : osr_helper_.emplace(info());
413 : }
414 :
415 : void set_start_source_position(int position) {
416 : DCHECK_EQ(start_source_position_, kNoSourcePosition);
417 453364 : start_source_position_ = position;
418 : }
419 :
420 2141408 : void InitializeCodeGenerator(Linkage* linkage,
421 2141408 : std::unique_ptr<AssemblerBuffer> buffer) {
422 : DCHECK_NULL(code_generator_);
423 :
424 : code_generator_ = new CodeGenerator(
425 : codegen_zone(), frame(), linkage, sequence(), info(), isolate(),
426 : osr_helper_, start_source_position_, jump_optimization_info_,
427 2141408 : info()->GetPoisoningMitigationLevel(), assembler_options_,
428 6424160 : info_->builtin_index(), std::move(buffer));
429 2141344 : }
430 :
431 6149381 : void BeginPhaseKind(const char* phase_kind_name) {
432 6149381 : if (pipeline_statistics() != nullptr) {
433 0 : pipeline_statistics()->BeginPhaseKind(phase_kind_name);
434 : }
435 : }
436 :
437 2598307 : void EndPhaseKind() {
438 2598307 : if (pipeline_statistics() != nullptr) {
439 0 : pipeline_statistics()->EndPhaseKind();
440 : }
441 : }
442 :
443 : const char* debug_name() const { return debug_name_.get(); }
444 :
445 : private:
446 : Isolate* const isolate_;
447 : wasm::WasmEngine* const wasm_engine_ = nullptr;
448 : AccountingAllocator* const allocator_;
449 : OptimizedCompilationInfo* const info_;
450 : std::unique_ptr<char[]> debug_name_;
451 : bool may_have_unverifiable_graph_ = true;
452 : ZoneStats* const zone_stats_;
453 : PipelineStatistics* pipeline_statistics_ = nullptr;
454 : bool compilation_failed_ = false;
455 : bool verify_graph_ = false;
456 : int start_source_position_ = kNoSourcePosition;
457 : base::Optional<OsrHelper> osr_helper_;
458 : MaybeHandle<Code> code_;
459 : CodeGenerator* code_generator_ = nullptr;
460 : Typer* typer_ = nullptr;
461 : Typer::Flags typer_flags_ = Typer::kNoFlags;
462 :
463 : // All objects in the following group of fields are allocated in graph_zone_.
464 : // They are all set to nullptr when the graph_zone_ is destroyed.
465 : ZoneStats::Scope graph_zone_scope_;
466 : Zone* graph_zone_ = nullptr;
467 : Graph* graph_ = nullptr;
468 : SourcePositionTable* source_positions_ = nullptr;
469 : NodeOriginTable* node_origins_ = nullptr;
470 : SimplifiedOperatorBuilder* simplified_ = nullptr;
471 : MachineOperatorBuilder* machine_ = nullptr;
472 : CommonOperatorBuilder* common_ = nullptr;
473 : JSOperatorBuilder* javascript_ = nullptr;
474 : JSGraph* jsgraph_ = nullptr;
475 : MachineGraph* mcgraph_ = nullptr;
476 : Schedule* schedule_ = nullptr;
477 :
478 : // All objects in the following group of fields are allocated in
479 : // instruction_zone_. They are all set to nullptr when the instruction_zone_
480 : // is destroyed.
481 : ZoneStats::Scope instruction_zone_scope_;
482 : Zone* instruction_zone_;
483 : InstructionSequence* sequence_ = nullptr;
484 :
485 : // All objects in the following group of fields are allocated in
486 : // codegen_zone_. They are all set to nullptr when the codegen_zone_
487 : // is destroyed.
488 : ZoneStats::Scope codegen_zone_scope_;
489 : Zone* codegen_zone_;
490 : CompilationDependencies* dependencies_ = nullptr;
491 : JSHeapBroker* broker_ = nullptr;
492 : Frame* frame_ = nullptr;
493 :
494 : // All objects in the following group of fields are allocated in
495 : // register_allocation_zone_. They are all set to nullptr when the zone is
496 : // destroyed.
497 : ZoneStats::Scope register_allocation_zone_scope_;
498 : Zone* register_allocation_zone_;
499 : RegisterAllocationData* register_allocation_data_ = nullptr;
500 :
501 : // Basic block profiling support.
502 : BasicBlockProfiler::Data* profiler_data_ = nullptr;
503 :
504 : // Source position output for --trace-turbo.
505 : std::string source_position_output_;
506 :
507 : JumpOptimizationInfo* jump_optimization_info_ = nullptr;
508 : AssemblerOptions assembler_options_;
509 :
510 : DISALLOW_COPY_AND_ASSIGN(PipelineData);
511 : };
512 :
513 : class PipelineImpl final {
514 : public:
515 2169954 : explicit PipelineImpl(PipelineData* data) : data_(data) {}
516 :
517 : // Helpers for executing pipeline phases.
518 : template <typename Phase, typename... Args>
519 : void Run(Args&&... args);
520 :
521 : // Step A. Run the graph creation and initial optimization passes.
522 : bool CreateGraph();
523 :
524 : // B. Run the concurrent optimization passes.
525 : bool OptimizeGraph(Linkage* linkage);
526 :
527 : // Substep B.1. Produce a scheduled graph.
528 : void ComputeScheduledGraph();
529 :
530 : // Substep B.2. Select instructions from a scheduled graph.
531 : bool SelectInstructions(Linkage* linkage);
532 :
533 : // Step C. Run the code assembly pass.
534 : void AssembleCode(Linkage* linkage,
535 : std::unique_ptr<AssemblerBuffer> buffer = {});
536 :
537 : // Step D. Run the code finalization pass.
538 : MaybeHandle<Code> FinalizeCode(bool retire_broker = true);
539 :
540 : // Step E. Install any code dependencies.
541 : bool CommitDependencies(Handle<Code> code);
542 :
543 : void VerifyGeneratedCodeIsIdempotent();
544 : void RunPrintAndVerify(const char* phase, bool untyped = false);
545 : bool SelectInstructionsAndAssemble(CallDescriptor* call_descriptor);
546 : MaybeHandle<Code> GenerateCode(CallDescriptor* call_descriptor);
547 : void AllocateRegisters(const RegisterConfiguration* config,
548 : CallDescriptor* call_descriptor, bool run_verifier);
549 :
550 : OptimizedCompilationInfo* info() const;
551 : Isolate* isolate() const;
552 : CodeGenerator* code_generator() const;
553 :
554 : private:
555 : PipelineData* const data_;
556 : };
557 :
558 : namespace {
559 :
560 0 : void PrintFunctionSource(OptimizedCompilationInfo* info, Isolate* isolate,
561 : int source_id, Handle<SharedFunctionInfo> shared) {
562 0 : if (!shared->script()->IsUndefined(isolate)) {
563 0 : Handle<Script> script(Script::cast(shared->script()), isolate);
564 :
565 0 : if (!script->source()->IsUndefined(isolate)) {
566 0 : CodeTracer::Scope tracing_scope(isolate->GetCodeTracer());
567 0 : Object source_name = script->name();
568 0 : OFStream os(tracing_scope.file());
569 0 : os << "--- FUNCTION SOURCE (";
570 0 : if (source_name->IsString()) {
571 0 : os << String::cast(source_name)->ToCString().get() << ":";
572 : }
573 0 : os << shared->DebugName()->ToCString().get() << ") id{";
574 0 : os << info->optimization_id() << "," << source_id << "} start{";
575 0 : os << shared->StartPosition() << "} ---\n";
576 : {
577 : DisallowHeapAllocation no_allocation;
578 0 : int start = shared->StartPosition();
579 0 : int len = shared->EndPosition() - start;
580 : SubStringRange source(String::cast(script->source()), no_allocation,
581 : start, len);
582 0 : for (const auto& c : source) {
583 0 : os << AsReversiblyEscapedUC16(c);
584 : }
585 : }
586 :
587 0 : os << "\n--- END ---\n";
588 : }
589 : }
590 0 : }
591 :
592 : // Print information for the given inlining: which function was inlined and
593 : // where the inlining occurred.
594 0 : void PrintInlinedFunctionInfo(
595 0 : OptimizedCompilationInfo* info, Isolate* isolate, int source_id,
596 : int inlining_id, const OptimizedCompilationInfo::InlinedFunctionHolder& h) {
597 0 : CodeTracer::Scope tracing_scope(isolate->GetCodeTracer());
598 0 : OFStream os(tracing_scope.file());
599 0 : os << "INLINE (" << h.shared_info->DebugName()->ToCString().get() << ") id{"
600 0 : << info->optimization_id() << "," << source_id << "} AS " << inlining_id
601 0 : << " AT ";
602 0 : const SourcePosition position = h.position.position;
603 0 : if (position.IsKnown()) {
604 0 : os << "<" << position.InliningId() << ":" << position.ScriptOffset() << ">";
605 : } else {
606 0 : os << "<?>";
607 : }
608 : os << std::endl;
609 0 : }
610 :
611 : // Print the source of all functions that participated in this optimizing
612 : // compilation. For inlined functions print source position of their inlining.
613 0 : void PrintParticipatingSource(OptimizedCompilationInfo* info,
614 : Isolate* isolate) {
615 : AllowDeferredHandleDereference allow_deference_for_print_code;
616 :
617 0 : SourceIdAssigner id_assigner(info->inlined_functions().size());
618 0 : PrintFunctionSource(info, isolate, -1, info->shared_info());
619 0 : const auto& inlined = info->inlined_functions();
620 0 : for (unsigned id = 0; id < inlined.size(); id++) {
621 0 : const int source_id = id_assigner.GetIdFor(inlined[id].shared_info);
622 0 : PrintFunctionSource(info, isolate, source_id, inlined[id].shared_info);
623 0 : PrintInlinedFunctionInfo(info, isolate, source_id, id, inlined[id]);
624 0 : }
625 0 : }
626 :
627 : // Print the code after compiling it.
628 1579163 : void PrintCode(Isolate* isolate, Handle<Code> code,
629 : OptimizedCompilationInfo* info) {
630 1579163 : if (FLAG_print_opt_source && info->IsOptimizing()) {
631 0 : PrintParticipatingSource(info, isolate);
632 : }
633 :
634 : #ifdef ENABLE_DISASSEMBLER
635 : AllowDeferredHandleDereference allow_deference_for_print_code;
636 : bool print_code =
637 : FLAG_print_code ||
638 : (info->IsOptimizing() && FLAG_print_opt_code &&
639 : info->shared_info()->PassesFilter(FLAG_print_opt_code_filter));
640 : if (print_code) {
641 : std::unique_ptr<char[]> debug_name = info->GetDebugName();
642 : CodeTracer::Scope tracing_scope(isolate->GetCodeTracer());
643 : OFStream os(tracing_scope.file());
644 :
645 : // Print the source code if available.
646 : bool print_source = code->kind() == Code::OPTIMIZED_FUNCTION;
647 : if (print_source) {
648 : Handle<SharedFunctionInfo> shared = info->shared_info();
649 : if (shared->script()->IsScript() &&
650 : !Script::cast(shared->script())->source()->IsUndefined(isolate)) {
651 : os << "--- Raw source ---\n";
652 : StringCharacterStream stream(
653 : String::cast(Script::cast(shared->script())->source()),
654 : shared->StartPosition());
655 : // fun->end_position() points to the last character in the stream. We
656 : // need to compensate by adding one to calculate the length.
657 : int source_len = shared->EndPosition() - shared->StartPosition() + 1;
658 : for (int i = 0; i < source_len; i++) {
659 : if (stream.HasMore()) {
660 : os << AsReversiblyEscapedUC16(stream.GetNext());
661 : }
662 : }
663 : os << "\n\n";
664 : }
665 : }
666 : if (info->IsOptimizing()) {
667 : os << "--- Optimized code ---\n"
668 : << "optimization_id = " << info->optimization_id() << "\n";
669 : } else {
670 : os << "--- Code ---\n";
671 : }
672 : if (print_source) {
673 : Handle<SharedFunctionInfo> shared = info->shared_info();
674 : os << "source_position = " << shared->StartPosition() << "\n";
675 : }
676 : code->Disassemble(debug_name.get(), os);
677 : os << "--- End code ---\n";
678 : }
679 : #endif // ENABLE_DISASSEMBLER
680 1579163 : }
681 :
682 2360132 : void TraceSchedule(OptimizedCompilationInfo* info, PipelineData* data,
683 : Schedule* schedule, const char* phase_name) {
684 2360129 : if (info->trace_turbo_json_enabled()) {
685 : AllowHandleDereference allow_deref;
686 3 : TurboJsonFile json_of(info, std::ios_base::app);
687 3 : json_of << "{\"name\":\"" << phase_name << "\",\"type\":\"schedule\""
688 3 : << ",\"data\":\"";
689 6 : std::stringstream schedule_stream;
690 3 : schedule_stream << *schedule;
691 : std::string schedule_string(schedule_stream.str());
692 6953 : for (const auto& c : schedule_string) {
693 13894 : json_of << AsEscapedUC16ForJSON(c);
694 : }
695 6 : json_of << "\"},\n";
696 : }
697 2360129 : if (info->trace_turbo_graph_enabled() || FLAG_trace_turbo_scheduler) {
698 : AllowHandleDereference allow_deref;
699 3 : CodeTracer::Scope tracing_scope(data->GetCodeTracer());
700 6 : OFStream os(tracing_scope.file());
701 3 : os << "-- Schedule --------------------------------------\n" << *schedule;
702 : }
703 2360129 : }
704 :
705 :
706 : class SourcePositionWrapper final : public Reducer {
707 : public:
708 : SourcePositionWrapper(Reducer* reducer, SourcePositionTable* table)
709 477614 : : reducer_(reducer), table_(table) {}
710 0 : ~SourcePositionWrapper() final = default;
711 :
712 1137 : const char* reducer_name() const override { return reducer_->reducer_name(); }
713 :
714 40861446 : Reduction Reduce(Node* node) final {
715 40861446 : SourcePosition const pos = table_->GetSourcePosition(node);
716 40861446 : SourcePositionTable::Scope position(table_, pos);
717 81722892 : return reducer_->Reduce(node);
718 : }
719 :
720 610053 : void Finalize() final { reducer_->Finalize(); }
721 :
722 : private:
723 : Reducer* const reducer_;
724 : SourcePositionTable* const table_;
725 :
726 : DISALLOW_COPY_AND_ASSIGN(SourcePositionWrapper);
727 : };
728 :
729 : class NodeOriginsWrapper final : public Reducer {
730 : public:
731 : NodeOriginsWrapper(Reducer* reducer, NodeOriginTable* table)
732 43 : : reducer_(reducer), table_(table) {}
733 0 : ~NodeOriginsWrapper() final = default;
734 :
735 1137 : const char* reducer_name() const override { return reducer_->reducer_name(); }
736 :
737 1137 : Reduction Reduce(Node* node) final {
738 1137 : NodeOriginTable::Scope position(table_, reducer_name(), node);
739 2274 : return reducer_->Reduce(node);
740 : }
741 :
742 51 : void Finalize() final { reducer_->Finalize(); }
743 :
744 : private:
745 : Reducer* const reducer_;
746 : NodeOriginTable* const table_;
747 :
748 : DISALLOW_COPY_AND_ASSIGN(NodeOriginsWrapper);
749 : };
750 :
751 41805002 : void AddReducer(PipelineData* data, GraphReducer* graph_reducer,
752 : Reducer* reducer) {
753 20424844 : if (data->info()->is_source_positions_enabled()) {
754 477614 : void* const buffer = data->graph_zone()->New(sizeof(SourcePositionWrapper));
755 : SourcePositionWrapper* const wrapper =
756 477614 : new (buffer) SourcePositionWrapper(reducer, data->source_positions());
757 : reducer = wrapper;
758 : }
759 20424844 : if (data->info()->trace_turbo_json_enabled()) {
760 43 : void* const buffer = data->graph_zone()->New(sizeof(NodeOriginsWrapper));
761 : NodeOriginsWrapper* const wrapper =
762 43 : new (buffer) NodeOriginsWrapper(reducer, data->node_origins());
763 : reducer = wrapper;
764 : }
765 :
766 20424844 : graph_reducer->AddReducer(reducer);
767 20424851 : }
768 :
769 106075586 : class PipelineRunScope {
770 : public:
771 212075019 : PipelineRunScope(PipelineData* data, const char* phase_name)
772 : : phase_scope_(
773 : phase_name == nullptr ? nullptr : data->pipeline_statistics(),
774 : phase_name),
775 : zone_scope_(data->zone_stats(), ZONE_NAME),
776 106070689 : origin_scope_(data->node_origins(), phase_name) {}
777 :
778 53552633 : Zone* zone() { return zone_scope_.zone(); }
779 :
780 : private:
781 : PhaseScope phase_scope_;
782 : ZoneStats::Scope zone_scope_;
783 : NodeOriginTable::PhaseScope origin_scope_;
784 : };
785 :
786 475310 : PipelineStatistics* CreatePipelineStatistics(Handle<Script> script,
787 : OptimizedCompilationInfo* info,
788 : Isolate* isolate,
789 : ZoneStats* zone_stats) {
790 : PipelineStatistics* pipeline_statistics = nullptr;
791 :
792 475310 : if (FLAG_turbo_stats || FLAG_turbo_stats_nvp) {
793 : pipeline_statistics =
794 0 : new PipelineStatistics(info, isolate->GetTurboStatistics(), zone_stats);
795 0 : pipeline_statistics->BeginPhaseKind("initializing");
796 : }
797 :
798 475311 : if (info->trace_turbo_json_enabled()) {
799 1 : TurboJsonFile json_of(info, std::ios_base::trunc);
800 1 : json_of << "{\"function\" : ";
801 : JsonPrintFunctionSource(json_of, -1, info->GetDebugName(), script, isolate,
802 2 : info->shared_info());
803 1 : json_of << ",\n\"phases\":[";
804 : }
805 :
806 475311 : return pipeline_statistics;
807 : }
808 :
809 496304 : PipelineStatistics* CreatePipelineStatistics(
810 : wasm::WasmEngine* wasm_engine, wasm::FunctionBody function_body,
811 : const wasm::WasmModule* wasm_module, OptimizedCompilationInfo* info,
812 : ZoneStats* zone_stats) {
813 : PipelineStatistics* pipeline_statistics = nullptr;
814 :
815 496304 : if (FLAG_turbo_stats_wasm) {
816 : pipeline_statistics = new PipelineStatistics(
817 0 : info, wasm_engine->GetOrCreateTurboStatistics(), zone_stats);
818 0 : pipeline_statistics->BeginPhaseKind("initializing");
819 : }
820 :
821 496315 : if (info->trace_turbo_json_enabled()) {
822 0 : TurboJsonFile json_of(info, std::ios_base::trunc);
823 0 : std::unique_ptr<char[]> function_name = info->GetDebugName();
824 0 : json_of << "{\"function\":\"" << function_name.get() << "\", \"source\":\"";
825 0 : AccountingAllocator allocator;
826 0 : std::ostringstream disassembly;
827 : std::vector<int> source_positions;
828 : wasm::PrintRawWasmCode(&allocator, function_body, wasm_module,
829 0 : wasm::kPrintLocals, disassembly, &source_positions);
830 0 : for (const auto& c : disassembly.str()) {
831 0 : json_of << AsEscapedUC16ForJSON(c);
832 : }
833 0 : json_of << "\",\n\"sourceLineToBytecodePosition\" : [";
834 : bool insert_comma = false;
835 0 : for (auto val : source_positions) {
836 0 : if (insert_comma) {
837 0 : json_of << ", ";
838 : }
839 0 : json_of << val;
840 : insert_comma = true;
841 : }
842 0 : json_of << "],\n\"phases\":[";
843 : }
844 :
845 496315 : return pipeline_statistics;
846 : }
847 :
848 : } // namespace
849 :
850 1887774 : class PipelineCompilationJob final : public OptimizedCompilationJob {
851 : public:
852 471944 : PipelineCompilationJob(Isolate* isolate,
853 : Handle<SharedFunctionInfo> shared_info,
854 : Handle<JSFunction> function)
855 : // Note that the OptimizedCompilationInfo is not initialized at the time
856 : // we pass it to the CompilationJob constructor, but it is not
857 : // dereferenced there.
858 : : OptimizedCompilationJob(
859 : function->GetIsolate()->stack_guard()->real_climit(),
860 : &compilation_info_, "TurboFan"),
861 : zone_(function->GetIsolate()->allocator(), ZONE_NAME),
862 : zone_stats_(function->GetIsolate()->allocator()),
863 : compilation_info_(&zone_, function->GetIsolate(), shared_info,
864 : function),
865 : pipeline_statistics_(CreatePipelineStatistics(
866 : handle(Script::cast(shared_info->script()), isolate),
867 : compilation_info(), function->GetIsolate(), &zone_stats_)),
868 : data_(&zone_stats_, function->GetIsolate(), compilation_info(),
869 : pipeline_statistics_.get()),
870 : pipeline_(&data_),
871 5663324 : linkage_(nullptr) {}
872 :
873 : protected:
874 : Status PrepareJobImpl(Isolate* isolate) final;
875 : Status ExecuteJobImpl() final;
876 : Status FinalizeJobImpl(Isolate* isolate) final;
877 :
878 : // Registers weak object to optimized code dependencies.
879 : void RegisterWeakObjectsInOptimizedCode(Handle<Code> code, Isolate* isolate);
880 :
881 : private:
882 : Zone zone_;
883 : ZoneStats zone_stats_;
884 : OptimizedCompilationInfo compilation_info_;
885 : std::unique_ptr<PipelineStatistics> pipeline_statistics_;
886 : PipelineData data_;
887 : PipelineImpl pipeline_;
888 : Linkage* linkage_;
889 :
890 : DISALLOW_COPY_AND_ASSIGN(PipelineCompilationJob);
891 : };
892 :
893 453369 : PipelineCompilationJob::Status PipelineCompilationJob::PrepareJobImpl(
894 : Isolate* isolate) {
895 6429924 : if (compilation_info()->bytecode_array()->length() >
896 : kMaxBytecodeSizeForTurbofan) {
897 5 : return AbortOptimization(BailoutReason::kFunctionTooBig);
898 : }
899 :
900 453364 : if (!FLAG_always_opt) {
901 : compilation_info()->MarkAsBailoutOnUninitialized();
902 : }
903 453364 : if (FLAG_turbo_loop_peeling) {
904 : compilation_info()->MarkAsLoopPeelingEnabled();
905 : }
906 453364 : if (FLAG_turbo_inlining) {
907 : compilation_info()->MarkAsInliningEnabled();
908 : }
909 453364 : if (FLAG_inline_accessors) {
910 : compilation_info()->MarkAsAccessorInliningEnabled();
911 : }
912 :
913 : // This is the bottleneck for computing and setting poisoning level in the
914 : // optimizing compiler.
915 : PoisoningMitigationLevel load_poisoning =
916 : PoisoningMitigationLevel::kDontPoison;
917 453364 : if (FLAG_untrusted_code_mitigations) {
918 : // For full mitigations, this can be changed to
919 : // PoisoningMitigationLevel::kPoisonAll.
920 : load_poisoning = PoisoningMitigationLevel::kPoisonCriticalOnly;
921 : }
922 : compilation_info()->SetPoisoningMitigationLevel(load_poisoning);
923 :
924 453364 : if (FLAG_turbo_allocation_folding) {
925 : compilation_info()->MarkAsAllocationFoldingEnabled();
926 : }
927 :
928 906728 : if (compilation_info()->closure()->raw_feedback_cell()->map() ==
929 : ReadOnlyRoots(isolate).one_closure_cell_map()) {
930 : compilation_info()->MarkAsFunctionContextSpecializing();
931 : }
932 :
933 453364 : if (compilation_info()->is_source_positions_enabled()) {
934 : SharedFunctionInfo::EnsureSourcePositionsAvailable(
935 11112 : isolate, compilation_info()->shared_info());
936 : }
937 :
938 : data_.set_start_source_position(
939 906728 : compilation_info()->shared_info()->StartPosition());
940 :
941 453363 : linkage_ = new (compilation_info()->zone()) Linkage(
942 906727 : Linkage::ComputeIncoming(compilation_info()->zone(), compilation_info()));
943 :
944 453363 : if (!pipeline_.CreateGraph()) {
945 0 : if (isolate->has_pending_exception()) return FAILED; // Stack overflowed.
946 0 : return AbortOptimization(BailoutReason::kGraphBuildingFailed);
947 : }
948 :
949 453363 : if (compilation_info()->is_osr()) data_.InitializeOsrHelper();
950 :
951 : // Make sure that we have generated the deopt entries code. This is in order
952 : // to avoid triggering the generation of deopt entries later during code
953 : // assembly.
954 453363 : Deoptimizer::EnsureCodeForDeoptimizationEntries(isolate);
955 :
956 453364 : return SUCCEEDED;
957 : }
958 :
959 453330 : PipelineCompilationJob::Status PipelineCompilationJob::ExecuteJobImpl() {
960 453330 : if (!pipeline_.OptimizeGraph(linkage_)) return FAILED;
961 906635 : pipeline_.AssembleCode(linkage_);
962 453319 : return SUCCEEDED;
963 : }
964 :
965 453209 : PipelineCompilationJob::Status PipelineCompilationJob::FinalizeJobImpl(
966 : Isolate* isolate) {
967 453209 : MaybeHandle<Code> maybe_code = pipeline_.FinalizeCode();
968 : Handle<Code> code;
969 453216 : if (!maybe_code.ToHandle(&code)) {
970 906307 : if (compilation_info()->bailout_reason() == BailoutReason::kNoReason) {
971 9 : return AbortOptimization(BailoutReason::kCodeGenerationFailed);
972 : }
973 : return FAILED;
974 : }
975 453207 : if (!pipeline_.CommitDependencies(code)) {
976 58 : return RetryOptimization(BailoutReason::kBailedOutDueToDependencyChange);
977 : }
978 :
979 : compilation_info()->SetCode(code);
980 453149 : compilation_info()->native_context()->AddOptimizedCode(*code);
981 453149 : RegisterWeakObjectsInOptimizedCode(code, isolate);
982 453149 : return SUCCEEDED;
983 : }
984 :
985 453149 : void PipelineCompilationJob::RegisterWeakObjectsInOptimizedCode(
986 : Handle<Code> code, Isolate* isolate) {
987 : DCHECK(code->is_optimized_code());
988 : std::vector<Handle<Map>> maps;
989 : {
990 : DisallowHeapAllocation no_gc;
991 : int const mode_mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT);
992 5508298 : for (RelocIterator it(*code, mode_mask); !it.done(); it.next()) {
993 5055147 : RelocInfo::Mode mode = it.rinfo()->rmode();
994 10110296 : if (mode == RelocInfo::EMBEDDED_OBJECT &&
995 10110293 : code->IsWeakObjectInOptimizedCode(it.rinfo()->target_object())) {
996 2664987 : Handle<HeapObject> object(HeapObject::cast(it.rinfo()->target_object()),
997 : isolate);
998 5329974 : if (object->IsMap()) {
999 145645 : maps.push_back(Handle<Map>::cast(object));
1000 : }
1001 : }
1002 : }
1003 : }
1004 1051943 : for (Handle<Map> map : maps) {
1005 145645 : isolate->heap()->AddRetainedMap(map);
1006 : }
1007 453149 : code->set_can_have_weak_objects(true);
1008 453149 : }
1009 :
1010 : template <typename Phase, typename... Args>
1011 52542559 : void PipelineImpl::Run(Args&&... args) {
1012 52542559 : PipelineRunScope scope(this->data_, Phase::phase_name());
1013 : Phase phase;
1014 67956437 : phase.Run(this->data_, scope.zone(), std::forward<Args>(args)...);
1015 52545421 : }
1016 :
1017 : struct GraphBuilderPhase {
1018 : static const char* phase_name() { return "bytecode graph builder"; }
1019 :
1020 1370192 : void Run(PipelineData* data, Zone* temp_zone) {
1021 : JSTypeHintLowering::Flags flags = JSTypeHintLowering::kNoFlags;
1022 456731 : if (data->info()->is_bailout_on_uninitialized()) {
1023 : flags |= JSTypeHintLowering::kBailoutOnUninitialized;
1024 : }
1025 : CallFrequency frequency = CallFrequency(1.0f);
1026 : BytecodeGraphBuilder graph_builder(
1027 : temp_zone, data->info()->bytecode_array(), data->info()->shared_info(),
1028 : handle(data->info()->closure()->feedback_vector(), data->isolate()),
1029 : data->info()->osr_offset(), data->jsgraph(), frequency,
1030 : data->source_positions(), data->native_context(),
1031 : SourcePosition::kNotInlined, flags, true,
1032 1370192 : data->info()->is_analyze_environment_liveness());
1033 456731 : graph_builder.CreateGraph();
1034 456731 : }
1035 : };
1036 :
1037 : namespace {
1038 :
1039 26433 : Maybe<OuterContext> GetModuleContext(Handle<JSFunction> closure) {
1040 26433 : Context current = closure->context();
1041 : size_t distance = 0;
1042 78393 : while (!current->IsNativeContext()) {
1043 25532 : if (current->IsModuleContext()) {
1044 : return Just(
1045 : OuterContext(handle(current, current->GetIsolate()), distance));
1046 : }
1047 25527 : current = current->previous();
1048 25527 : distance++;
1049 : }
1050 : return Nothing<OuterContext>();
1051 : }
1052 :
1053 456730 : Maybe<OuterContext> ChooseSpecializationContext(
1054 : Isolate* isolate, OptimizedCompilationInfo* info) {
1055 456730 : if (info->is_function_context_specializing()) {
1056 : DCHECK(info->has_context());
1057 430297 : return Just(OuterContext(handle(info->context(), isolate), 0));
1058 : }
1059 26433 : return GetModuleContext(info->closure());
1060 : }
1061 :
1062 : } // anonymous namespace
1063 :
1064 : struct InliningPhase {
1065 : static const char* phase_name() { return "inlining"; }
1066 :
1067 5024018 : void Run(PipelineData* data, Zone* temp_zone) {
1068 : Isolate* isolate = data->isolate();
1069 456730 : OptimizedCompilationInfo* info = data->info();
1070 : GraphReducer graph_reducer(temp_zone, data->graph(),
1071 913450 : data->jsgraph()->Dead());
1072 : DeadCodeElimination dead_code_elimination(&graph_reducer, data->graph(),
1073 456726 : data->common(), temp_zone);
1074 456731 : CheckpointElimination checkpoint_elimination(&graph_reducer);
1075 : CommonOperatorReducer common_reducer(&graph_reducer, data->graph(),
1076 : data->broker(), data->common(),
1077 456728 : data->machine(), temp_zone);
1078 : JSCallReducer call_reducer(&graph_reducer, data->jsgraph(), data->broker(),
1079 : data->info()->is_bailout_on_uninitialized()
1080 : ? JSCallReducer::kBailoutOnUninitialized
1081 : : JSCallReducer::kNoFlags,
1082 456731 : data->dependencies());
1083 : JSContextSpecialization context_specialization(
1084 : &graph_reducer, data->jsgraph(), data->broker(),
1085 : ChooseSpecializationContext(isolate, data->info()),
1086 : data->info()->is_function_context_specializing()
1087 : ? data->info()->closure()
1088 456731 : : MaybeHandle<JSFunction>());
1089 : JSNativeContextSpecialization::Flags flags =
1090 : JSNativeContextSpecialization::kNoFlags;
1091 456730 : if (data->info()->is_accessor_inlining_enabled()) {
1092 : flags |= JSNativeContextSpecialization::kAccessorInliningEnabled;
1093 : }
1094 456730 : if (data->info()->is_bailout_on_uninitialized()) {
1095 : flags |= JSNativeContextSpecialization::kBailoutOnUninitialized;
1096 : }
1097 : // Passing the OptimizedCompilationInfo's shared zone here as
1098 : // JSNativeContextSpecialization allocates out-of-heap objects
1099 : // that need to live until code generation.
1100 : JSNativeContextSpecialization native_context_specialization(
1101 : &graph_reducer, data->jsgraph(), data->broker(), flags,
1102 913461 : data->native_context(), data->dependencies(), temp_zone, info->zone());
1103 : JSInliningHeuristic inlining(&graph_reducer,
1104 : data->info()->is_inlining_enabled()
1105 : ? JSInliningHeuristic::kGeneralInlining
1106 : : JSInliningHeuristic::kRestrictedInlining,
1107 : temp_zone, data->info(), data->jsgraph(),
1108 913458 : data->broker(), data->source_positions());
1109 456731 : JSIntrinsicLowering intrinsic_lowering(&graph_reducer, data->jsgraph());
1110 456728 : AddReducer(data, &graph_reducer, &dead_code_elimination);
1111 456728 : AddReducer(data, &graph_reducer, &checkpoint_elimination);
1112 456731 : AddReducer(data, &graph_reducer, &common_reducer);
1113 456729 : AddReducer(data, &graph_reducer, &native_context_specialization);
1114 456731 : AddReducer(data, &graph_reducer, &context_specialization);
1115 456731 : AddReducer(data, &graph_reducer, &intrinsic_lowering);
1116 456731 : AddReducer(data, &graph_reducer, &call_reducer);
1117 456731 : AddReducer(data, &graph_reducer, &inlining);
1118 913459 : graph_reducer.ReduceGraph();
1119 456728 : }
1120 : };
1121 :
1122 :
1123 : struct TyperPhase {
1124 : static const char* phase_name() { return "typer"; }
1125 :
1126 2283485 : void Run(PipelineData* data, Zone* temp_zone, Typer* typer) {
1127 : NodeVector roots(temp_zone);
1128 456697 : data->jsgraph()->GetCachedNodes(&roots);
1129 :
1130 : // Make sure we always type True and False. Needed for escape analysis.
1131 913394 : roots.push_back(data->jsgraph()->TrueConstant());
1132 913394 : roots.push_back(data->jsgraph()->FalseConstant());
1133 :
1134 : LoopVariableOptimizer induction_vars(data->jsgraph()->graph(),
1135 456697 : data->common(), temp_zone);
1136 456696 : if (FLAG_turbo_loop_variable) induction_vars.Run();
1137 456693 : typer->Run(roots, &induction_vars);
1138 456696 : }
1139 : };
1140 :
1141 : struct UntyperPhase {
1142 : static const char* phase_name() { return "untyper"; }
1143 :
1144 : void Run(PipelineData* data, Zone* temp_zone) {
1145 0 : class RemoveTypeReducer final : public Reducer {
1146 : public:
1147 0 : const char* reducer_name() const override { return "RemoveTypeReducer"; }
1148 0 : Reduction Reduce(Node* node) final {
1149 0 : if (NodeProperties::IsTyped(node)) {
1150 : NodeProperties::RemoveType(node);
1151 : return Changed(node);
1152 : }
1153 : return NoChange();
1154 : }
1155 : };
1156 :
1157 : NodeVector roots(temp_zone);
1158 : data->jsgraph()->GetCachedNodes(&roots);
1159 : for (Node* node : roots) {
1160 : NodeProperties::RemoveType(node);
1161 : }
1162 :
1163 : GraphReducer graph_reducer(temp_zone, data->graph(),
1164 : data->jsgraph()->Dead());
1165 : RemoveTypeReducer remove_type_reducer;
1166 : AddReducer(data, &graph_reducer, &remove_type_reducer);
1167 : graph_reducer.ReduceGraph();
1168 : }
1169 : };
1170 :
1171 : struct SerializeStandardObjectsPhase {
1172 : static const char* phase_name() { return "serialize standard objects"; }
1173 :
1174 456731 : void Run(PipelineData* data, Zone* temp_zone) {
1175 456731 : data->broker()->SerializeStandardObjects();
1176 : }
1177 : };
1178 :
1179 : struct CopyMetadataForConcurrentCompilePhase {
1180 : static const char* phase_name() { return "serialize metadata"; }
1181 :
1182 1827159 : void Run(PipelineData* data, Zone* temp_zone) {
1183 : GraphReducer graph_reducer(temp_zone, data->graph(),
1184 913579 : data->jsgraph()->Dead());
1185 456790 : JSHeapCopyReducer heap_copy_reducer(data->broker());
1186 456789 : AddReducer(data, &graph_reducer, &heap_copy_reducer);
1187 456790 : graph_reducer.ReduceGraph();
1188 :
1189 : // Some nodes that are no longer in the graph might still be in the cache.
1190 : NodeVector cached_nodes(temp_zone);
1191 456790 : data->jsgraph()->GetCachedNodes(&cached_nodes);
1192 6682384 : for (Node* const node : cached_nodes) graph_reducer.ReduceNode(node);
1193 456789 : }
1194 : };
1195 :
1196 : // TODO(turbofan): Move all calls from CopyMetaDataForConcurrentCompilePhase
1197 : // here. Also all the calls to Serialize* methods that are currently sprinkled
1198 : // over inlining will move here as well.
1199 : struct SerializationPhase {
1200 : static const char* phase_name() { return "serialize bytecode"; }
1201 :
1202 59 : void Run(PipelineData* data, Zone* temp_zone) {
1203 : SerializerForBackgroundCompilation serializer(data->broker(), temp_zone,
1204 59 : data->info()->closure());
1205 59 : serializer.Run();
1206 59 : }
1207 : };
1208 :
1209 : struct TypedLoweringPhase {
1210 : static const char* phase_name() { return "typed lowering"; }
1211 :
1212 3653556 : void Run(PipelineData* data, Zone* temp_zone) {
1213 : GraphReducer graph_reducer(temp_zone, data->graph(),
1214 913387 : data->jsgraph()->Dead());
1215 : DeadCodeElimination dead_code_elimination(&graph_reducer, data->graph(),
1216 456697 : data->common(), temp_zone);
1217 : JSCreateLowering create_lowering(&graph_reducer, data->dependencies(),
1218 : data->jsgraph(), data->broker(),
1219 : temp_zone);
1220 : JSTypedLowering typed_lowering(&graph_reducer, data->jsgraph(),
1221 456697 : data->broker(), temp_zone);
1222 : ConstantFoldingReducer constant_folding_reducer(
1223 913387 : &graph_reducer, data->jsgraph(), data->broker());
1224 : TypedOptimization typed_optimization(&graph_reducer, data->dependencies(),
1225 913388 : data->jsgraph(), data->broker());
1226 : SimplifiedOperatorReducer simple_reducer(&graph_reducer, data->jsgraph(),
1227 913393 : data->broker());
1228 456691 : CheckpointElimination checkpoint_elimination(&graph_reducer);
1229 : CommonOperatorReducer common_reducer(&graph_reducer, data->graph(),
1230 : data->broker(), data->common(),
1231 456691 : data->machine(), temp_zone);
1232 456697 : AddReducer(data, &graph_reducer, &dead_code_elimination);
1233 456697 : AddReducer(data, &graph_reducer, &create_lowering);
1234 456697 : AddReducer(data, &graph_reducer, &constant_folding_reducer);
1235 456697 : AddReducer(data, &graph_reducer, &typed_lowering);
1236 456697 : AddReducer(data, &graph_reducer, &typed_optimization);
1237 456696 : AddReducer(data, &graph_reducer, &simple_reducer);
1238 456695 : AddReducer(data, &graph_reducer, &checkpoint_elimination);
1239 456694 : AddReducer(data, &graph_reducer, &common_reducer);
1240 913391 : graph_reducer.ReduceGraph();
1241 456696 : }
1242 : };
1243 :
1244 :
1245 : struct EscapeAnalysisPhase {
1246 : static const char* phase_name() { return "escape analysis"; }
1247 :
1248 1826777 : void Run(PipelineData* data, Zone* temp_zone) {
1249 456691 : EscapeAnalysis escape_analysis(data->jsgraph(), temp_zone);
1250 : escape_analysis.ReduceGraph();
1251 1370089 : GraphReducer reducer(temp_zone, data->graph(), data->jsgraph()->Dead());
1252 : EscapeAnalysisReducer escape_reducer(&reducer, data->jsgraph(),
1253 : escape_analysis.analysis_result(),
1254 1370083 : temp_zone);
1255 456691 : AddReducer(data, &reducer, &escape_reducer);
1256 456696 : reducer.ReduceGraph();
1257 : // TODO(tebbi): Turn this into a debug mode check once we have confidence.
1258 456692 : escape_reducer.VerifyReplacement();
1259 456695 : }
1260 : };
1261 :
1262 : struct SimplifiedLoweringPhase {
1263 : static const char* phase_name() { return "simplified lowering"; }
1264 :
1265 913394 : void Run(PipelineData* data, Zone* temp_zone) {
1266 : SimplifiedLowering lowering(data->jsgraph(), data->broker(), temp_zone,
1267 : data->source_positions(), data->node_origins(),
1268 913394 : data->info()->GetPoisoningMitigationLevel());
1269 456696 : lowering.LowerAllNodes();
1270 456696 : }
1271 : };
1272 :
1273 : struct LoopPeelingPhase {
1274 : static const char* phase_name() { return "loop peeling"; }
1275 :
1276 1813215 : void Run(PipelineData* data, Zone* temp_zone) {
1277 453305 : GraphTrimmer trimmer(temp_zone, data->graph());
1278 : NodeVector roots(temp_zone);
1279 453304 : data->jsgraph()->GetCachedNodes(&roots);
1280 453306 : trimmer.TrimGraph(roots.begin(), roots.end());
1281 :
1282 : LoopTree* loop_tree =
1283 453306 : LoopFinder::BuildLoopTree(data->jsgraph()->graph(), temp_zone);
1284 : LoopPeeler(data->graph(), data->common(), loop_tree, temp_zone,
1285 : data->source_positions(), data->node_origins())
1286 453300 : .PeelInnerLoopsOfTree();
1287 453305 : }
1288 : };
1289 :
1290 : struct LoopExitEliminationPhase {
1291 : static const char* phase_name() { return "loop exit elimination"; }
1292 :
1293 3391 : void Run(PipelineData* data, Zone* temp_zone) {
1294 3391 : LoopPeeler::EliminateLoopExits(data->graph(), temp_zone);
1295 : }
1296 : };
1297 :
1298 : struct GenericLoweringPhase {
1299 : static const char* phase_name() { return "generic lowering"; }
1300 :
1301 1370091 : void Run(PipelineData* data, Zone* temp_zone) {
1302 : GraphReducer graph_reducer(temp_zone, data->graph(),
1303 913394 : data->jsgraph()->Dead());
1304 913394 : JSGenericLowering generic_lowering(data->jsgraph());
1305 456697 : AddReducer(data, &graph_reducer, &generic_lowering);
1306 913393 : graph_reducer.ReduceGraph();
1307 456697 : }
1308 : };
1309 :
1310 : struct EarlyOptimizationPhase {
1311 : static const char* phase_name() { return "early optimization"; }
1312 :
1313 3196865 : void Run(PipelineData* data, Zone* temp_zone) {
1314 : GraphReducer graph_reducer(temp_zone, data->graph(),
1315 913387 : data->jsgraph()->Dead());
1316 : DeadCodeElimination dead_code_elimination(&graph_reducer, data->graph(),
1317 456697 : data->common(), temp_zone);
1318 : SimplifiedOperatorReducer simple_reducer(&graph_reducer, data->jsgraph(),
1319 913391 : data->broker());
1320 913391 : RedundancyElimination redundancy_elimination(&graph_reducer, temp_zone);
1321 913392 : ValueNumberingReducer value_numbering(temp_zone, data->graph()->zone());
1322 913392 : MachineOperatorReducer machine_reducer(&graph_reducer, data->jsgraph());
1323 : CommonOperatorReducer common_reducer(&graph_reducer, data->graph(),
1324 : data->broker(), data->common(),
1325 456695 : data->machine(), temp_zone);
1326 456697 : AddReducer(data, &graph_reducer, &dead_code_elimination);
1327 456697 : AddReducer(data, &graph_reducer, &simple_reducer);
1328 456697 : AddReducer(data, &graph_reducer, &redundancy_elimination);
1329 456697 : AddReducer(data, &graph_reducer, &machine_reducer);
1330 456691 : AddReducer(data, &graph_reducer, &common_reducer);
1331 456697 : AddReducer(data, &graph_reducer, &value_numbering);
1332 913392 : graph_reducer.ReduceGraph();
1333 456693 : }
1334 : };
1335 :
1336 : struct ControlFlowOptimizationPhase {
1337 : static const char* phase_name() { return "control flow optimization"; }
1338 :
1339 456695 : void Run(PipelineData* data, Zone* temp_zone) {
1340 : ControlFlowOptimizer optimizer(data->graph(), data->common(),
1341 456695 : data->machine(), temp_zone);
1342 456697 : optimizer.Optimize();
1343 456696 : }
1344 : };
1345 :
1346 : struct EffectControlLinearizationPhase {
1347 : static const char* phase_name() { return "effect linearization"; }
1348 :
1349 4566932 : void Run(PipelineData* data, Zone* temp_zone) {
1350 : {
1351 : // The scheduler requires the graphs to be trimmed, so trim now.
1352 : // TODO(jarin) Remove the trimming once the scheduler can handle untrimmed
1353 : // graphs.
1354 456690 : GraphTrimmer trimmer(temp_zone, data->graph());
1355 : NodeVector roots(temp_zone);
1356 456697 : data->jsgraph()->GetCachedNodes(&roots);
1357 456696 : trimmer.TrimGraph(roots.begin(), roots.end());
1358 :
1359 : // Schedule the graph without node splitting so that we can
1360 : // fix the effect and control flow for nodes with low-level side
1361 : // effects (such as changing representation to tagged or
1362 : // 'floating' allocation regions.)
1363 : Schedule* schedule = Scheduler::ComputeSchedule(temp_zone, data->graph(),
1364 456696 : Scheduler::kTempSchedule);
1365 456696 : if (FLAG_turbo_verify) ScheduleVerifier::Run(schedule);
1366 : TraceSchedule(data->info(), data, schedule,
1367 456696 : "effect linearization schedule");
1368 :
1369 : EffectControlLinearizer::MaskArrayIndexEnable mask_array_index =
1370 456694 : (data->info()->GetPoisoningMitigationLevel() !=
1371 : PoisoningMitigationLevel::kDontPoison)
1372 : ? EffectControlLinearizer::kMaskArrayIndex
1373 456694 : : EffectControlLinearizer::kDoNotMaskArrayIndex;
1374 : // Post-pass for wiring the control/effects
1375 : // - connect allocating representation changes into the control&effect
1376 : // chains and lower them,
1377 : // - get rid of the region markers,
1378 : // - introduce effect phis and rewire effects to get SSA again.
1379 : EffectControlLinearizer linearizer(
1380 : data->jsgraph(), schedule, temp_zone, data->source_positions(),
1381 456694 : data->node_origins(), mask_array_index);
1382 456690 : linearizer.Run();
1383 : }
1384 : {
1385 : // The {EffectControlLinearizer} might leave {Dead} nodes behind, so we
1386 : // run {DeadCodeElimination} to prune these parts of the graph.
1387 : // Also, the following store-store elimination phase greatly benefits from
1388 : // doing a common operator reducer and dead code elimination just before
1389 : // it, to eliminate conditional deopts with a constant condition.
1390 : GraphReducer graph_reducer(temp_zone, data->graph(),
1391 913377 : data->jsgraph()->Dead());
1392 : DeadCodeElimination dead_code_elimination(&graph_reducer, data->graph(),
1393 456692 : data->common(), temp_zone);
1394 : CommonOperatorReducer common_reducer(&graph_reducer, data->graph(),
1395 : data->broker(), data->common(),
1396 456696 : data->machine(), temp_zone);
1397 456696 : AddReducer(data, &graph_reducer, &dead_code_elimination);
1398 456688 : AddReducer(data, &graph_reducer, &common_reducer);
1399 913393 : graph_reducer.ReduceGraph();
1400 : }
1401 456695 : }
1402 : };
1403 :
1404 : struct StoreStoreEliminationPhase {
1405 : static const char* phase_name() { return "store-store elimination"; }
1406 :
1407 1370087 : void Run(PipelineData* data, Zone* temp_zone) {
1408 456695 : GraphTrimmer trimmer(temp_zone, data->graph());
1409 : NodeVector roots(temp_zone);
1410 456697 : data->jsgraph()->GetCachedNodes(&roots);
1411 456696 : trimmer.TrimGraph(roots.begin(), roots.end());
1412 :
1413 456695 : StoreStoreElimination::Run(data->jsgraph(), temp_zone);
1414 456695 : }
1415 : };
1416 :
1417 : struct LoadEliminationPhase {
1418 : static const char* phase_name() { return "load elimination"; }
1419 :
1420 4566851 : void Run(PipelineData* data, Zone* temp_zone) {
1421 : GraphReducer graph_reducer(temp_zone, data->graph(),
1422 913370 : data->jsgraph()->Dead());
1423 : BranchElimination branch_condition_elimination(&graph_reducer,
1424 913363 : data->jsgraph(), temp_zone);
1425 : DeadCodeElimination dead_code_elimination(&graph_reducer, data->graph(),
1426 456686 : data->common(), temp_zone);
1427 913366 : RedundancyElimination redundancy_elimination(&graph_reducer, temp_zone);
1428 : LoadElimination load_elimination(&graph_reducer, data->jsgraph(),
1429 : temp_zone);
1430 456685 : CheckpointElimination checkpoint_elimination(&graph_reducer);
1431 913370 : ValueNumberingReducer value_numbering(temp_zone, data->graph()->zone());
1432 : CommonOperatorReducer common_reducer(&graph_reducer, data->graph(),
1433 : data->broker(), data->common(),
1434 456682 : data->machine(), temp_zone);
1435 : TypedOptimization typed_optimization(&graph_reducer, data->dependencies(),
1436 913368 : data->jsgraph(), data->broker());
1437 : ConstantFoldingReducer constant_folding_reducer(
1438 913367 : &graph_reducer, data->jsgraph(), data->broker());
1439 : TypeNarrowingReducer type_narrowing_reducer(&graph_reducer, data->jsgraph(),
1440 913372 : data->broker());
1441 456686 : AddReducer(data, &graph_reducer, &branch_condition_elimination);
1442 456686 : AddReducer(data, &graph_reducer, &dead_code_elimination);
1443 456686 : AddReducer(data, &graph_reducer, &redundancy_elimination);
1444 456686 : AddReducer(data, &graph_reducer, &load_elimination);
1445 456683 : AddReducer(data, &graph_reducer, &type_narrowing_reducer);
1446 456686 : AddReducer(data, &graph_reducer, &constant_folding_reducer);
1447 456686 : AddReducer(data, &graph_reducer, &typed_optimization);
1448 456684 : AddReducer(data, &graph_reducer, &checkpoint_elimination);
1449 456686 : AddReducer(data, &graph_reducer, &common_reducer);
1450 456686 : AddReducer(data, &graph_reducer, &value_numbering);
1451 913363 : graph_reducer.ReduceGraph();
1452 456685 : }
1453 : };
1454 :
1455 : struct MemoryOptimizationPhase {
1456 : static const char* phase_name() { return "memory optimization"; }
1457 :
1458 2092691 : void Run(PipelineData* data, Zone* temp_zone) {
1459 : // The memory optimizer requires the graphs to be trimmed, so trim now.
1460 523172 : GraphTrimmer trimmer(temp_zone, data->graph());
1461 : NodeVector roots(temp_zone);
1462 523173 : data->jsgraph()->GetCachedNodes(&roots);
1463 523173 : trimmer.TrimGraph(roots.begin(), roots.end());
1464 :
1465 : // Optimize allocations and load/store operations.
1466 : MemoryOptimizer optimizer(
1467 : data->jsgraph(), temp_zone, data->info()->GetPoisoningMitigationLevel(),
1468 : data->info()->is_allocation_folding_enabled()
1469 : ? MemoryOptimizer::AllocationFolding::kDoAllocationFolding
1470 1046346 : : MemoryOptimizer::AllocationFolding::kDontAllocationFolding);
1471 1046333 : optimizer.Optimize();
1472 523167 : }
1473 : };
1474 :
1475 : struct LateOptimizationPhase {
1476 : static const char* phase_name() { return "late optimization"; }
1477 :
1478 3653562 : void Run(PipelineData* data, Zone* temp_zone) {
1479 : GraphReducer graph_reducer(temp_zone, data->graph(),
1480 913387 : data->jsgraph()->Dead());
1481 : BranchElimination branch_condition_elimination(&graph_reducer,
1482 913393 : data->jsgraph(), temp_zone);
1483 : DeadCodeElimination dead_code_elimination(&graph_reducer, data->graph(),
1484 456697 : data->common(), temp_zone);
1485 913391 : ValueNumberingReducer value_numbering(temp_zone, data->graph()->zone());
1486 913391 : MachineOperatorReducer machine_reducer(&graph_reducer, data->jsgraph());
1487 : CommonOperatorReducer common_reducer(&graph_reducer, data->graph(),
1488 : data->broker(), data->common(),
1489 456694 : data->machine(), temp_zone);
1490 : SelectLowering select_lowering(data->jsgraph()->graph(),
1491 913392 : data->jsgraph()->common());
1492 456693 : AddReducer(data, &graph_reducer, &branch_condition_elimination);
1493 456695 : AddReducer(data, &graph_reducer, &dead_code_elimination);
1494 456696 : AddReducer(data, &graph_reducer, &machine_reducer);
1495 456697 : AddReducer(data, &graph_reducer, &common_reducer);
1496 456692 : AddReducer(data, &graph_reducer, &select_lowering);
1497 456697 : AddReducer(data, &graph_reducer, &value_numbering);
1498 913391 : graph_reducer.ReduceGraph();
1499 456697 : }
1500 : };
1501 :
1502 : struct CsaOptimizationPhase {
1503 : static const char* phase_name() { return "csa optimization"; }
1504 :
1505 398856 : void Run(PipelineData* data, Zone* temp_zone) {
1506 : GraphReducer graph_reducer(temp_zone, data->graph(),
1507 132952 : data->jsgraph()->Dead());
1508 : BranchElimination branch_condition_elimination(&graph_reducer,
1509 132952 : data->jsgraph(), temp_zone);
1510 : DeadCodeElimination dead_code_elimination(&graph_reducer, data->graph(),
1511 66476 : data->common(), temp_zone);
1512 132952 : MachineOperatorReducer machine_reducer(&graph_reducer, data->jsgraph());
1513 : CommonOperatorReducer common_reducer(&graph_reducer, data->graph(),
1514 : data->broker(), data->common(),
1515 66476 : data->machine(), temp_zone);
1516 66476 : AddReducer(data, &graph_reducer, &branch_condition_elimination);
1517 66476 : AddReducer(data, &graph_reducer, &dead_code_elimination);
1518 66476 : AddReducer(data, &graph_reducer, &machine_reducer);
1519 66476 : AddReducer(data, &graph_reducer, &common_reducer);
1520 132952 : graph_reducer.ReduceGraph();
1521 66476 : }
1522 : };
1523 :
1524 : struct EarlyGraphTrimmingPhase {
1525 : static const char* phase_name() { return "early trimming"; }
1526 913458 : void Run(PipelineData* data, Zone* temp_zone) {
1527 456727 : GraphTrimmer trimmer(temp_zone, data->graph());
1528 : NodeVector roots(temp_zone);
1529 456731 : data->jsgraph()->GetCachedNodes(&roots);
1530 456728 : trimmer.TrimGraph(roots.begin(), roots.end());
1531 456730 : }
1532 : };
1533 :
1534 :
1535 : struct LateGraphTrimmingPhase {
1536 : static const char* phase_name() { return "late graph trimming"; }
1537 3806956 : void Run(PipelineData* data, Zone* temp_zone) {
1538 1903392 : GraphTrimmer trimmer(temp_zone, data->graph());
1539 : NodeVector roots(temp_zone);
1540 1903564 : if (data->jsgraph()) {
1541 1398001 : data->jsgraph()->GetCachedNodes(&roots);
1542 : }
1543 1903561 : trimmer.TrimGraph(roots.begin(), roots.end());
1544 1903553 : }
1545 : };
1546 :
1547 :
1548 : struct ComputeSchedulePhase {
1549 : static const char* phase_name() { return "scheduling"; }
1550 :
1551 3806784 : void Run(PipelineData* data, Zone* temp_zone) {
1552 : Schedule* schedule = Scheduler::ComputeSchedule(
1553 : temp_zone, data->graph(), data->info()->is_splitting_enabled()
1554 : ? Scheduler::kSplitNodes
1555 3806784 : : Scheduler::kNoFlags);
1556 1903494 : if (FLAG_turbo_verify) ScheduleVerifier::Run(schedule);
1557 : data->set_schedule(schedule);
1558 1903494 : }
1559 : };
1560 :
1561 : struct InstructionRangesAsJSON {
1562 : const InstructionSequence* sequence;
1563 : const ZoneVector<std::pair<int, int>>* instr_origins;
1564 : };
1565 :
1566 2 : std::ostream& operator<<(std::ostream& out, const InstructionRangesAsJSON& s) {
1567 4 : const int max = static_cast<int>(s.sequence->LastInstructionIndex());
1568 :
1569 2 : out << ", \"nodeIdToInstructionRange\": {";
1570 : bool need_comma = false;
1571 304 : for (size_t i = 0; i < s.instr_origins->size(); ++i) {
1572 302 : std::pair<int, int> offset = (*s.instr_origins)[i];
1573 150 : if (offset.first == -1) continue;
1574 125 : const int first = max - offset.first + 1;
1575 125 : const int second = max - offset.second + 1;
1576 125 : if (need_comma) out << ", ";
1577 250 : out << "\"" << i << "\": [" << first << ", " << second << "]";
1578 : need_comma = true;
1579 : }
1580 2 : out << "}";
1581 2 : out << ", \"blockIdtoInstructionRange\": {";
1582 : need_comma = false;
1583 44 : for (auto block : s.sequence->instruction_blocks()) {
1584 19 : if (need_comma) out << ", ";
1585 38 : out << "\"" << block->rpo_number() << "\": [" << block->code_start() << ", "
1586 19 : << block->code_end() << "]";
1587 : need_comma = true;
1588 : }
1589 2 : out << "}";
1590 2 : return out;
1591 : }
1592 :
1593 : struct InstructionSelectionPhase {
1594 : static const char* phase_name() { return "select instructions"; }
1595 :
1596 10707409 : void Run(PipelineData* data, Zone* temp_zone, Linkage* linkage) {
1597 : InstructionSelector selector(
1598 : temp_zone, data->graph()->NodeCount(), linkage, data->sequence(),
1599 : data->schedule(), data->source_positions(), data->frame(),
1600 : data->info()->switch_jump_table_enabled()
1601 : ? InstructionSelector::kEnableSwitchJumpTable
1602 : : InstructionSelector::kDisableSwitchJumpTable,
1603 : data->info()->is_source_positions_enabled()
1604 : ? InstructionSelector::kAllSourcePositions
1605 : : InstructionSelector::kCallSourcePositions,
1606 : InstructionSelector::SupportedFeatures(),
1607 : FLAG_turbo_instruction_scheduling
1608 : ? InstructionSelector::kEnableScheduling
1609 : : InstructionSelector::kDisableScheduling,
1610 1636014 : !data->isolate() || data->isolate()->serializer_enabled() ||
1611 : data->isolate()->ShouldLoadConstantsFromRootList()
1612 : ? InstructionSelector::kDisableRootsRelativeAddressing
1613 : : InstructionSelector::kEnableRootsRelativeAddressing,
1614 : data->info()->GetPoisoningMitigationLevel(),
1615 : data->info()->trace_turbo_json_enabled()
1616 : ? InstructionSelector::kEnableTraceTurboJson
1617 14363217 : : InstructionSelector::kDisableTraceTurboJson);
1618 2141560 : if (!selector.SelectInstructions()) {
1619 : data->set_compilation_failed();
1620 : }
1621 2141531 : if (data->info()->trace_turbo_json_enabled()) {
1622 2 : TurboJsonFile json_of(data->info(), std::ios_base::app);
1623 2 : json_of << "{\"name\":\"" << phase_name()
1624 2 : << "\",\"type\":\"instructions\""
1625 : << InstructionRangesAsJSON{data->sequence(),
1626 6 : &selector.instr_origins()}
1627 4 : << "},\n";
1628 : }
1629 2141531 : }
1630 : };
1631 :
1632 :
1633 : struct MeetRegisterConstraintsPhase {
1634 : static const char* phase_name() { return "meet register constraints"; }
1635 :
1636 2141446 : void Run(PipelineData* data, Zone* temp_zone) {
1637 2141446 : ConstraintBuilder builder(data->register_allocation_data());
1638 2141439 : builder.MeetRegisterConstraints();
1639 2141573 : }
1640 : };
1641 :
1642 :
1643 : struct ResolvePhisPhase {
1644 : static const char* phase_name() { return "resolve phis"; }
1645 :
1646 2141587 : void Run(PipelineData* data, Zone* temp_zone) {
1647 2141587 : ConstraintBuilder builder(data->register_allocation_data());
1648 2141582 : builder.ResolvePhis();
1649 2141382 : }
1650 : };
1651 :
1652 :
1653 : struct BuildLiveRangesPhase {
1654 : static const char* phase_name() { return "build live ranges"; }
1655 :
1656 2141459 : void Run(PipelineData* data, Zone* temp_zone) {
1657 2141459 : LiveRangeBuilder builder(data->register_allocation_data(), temp_zone);
1658 2141456 : builder.BuildLiveRanges();
1659 2141444 : }
1660 : };
1661 :
1662 : struct BuildBundlesPhase {
1663 : static const char* phase_name() { return "build live range bundles"; }
1664 :
1665 2141574 : void Run(PipelineData* data, Zone* temp_zone) {
1666 : BundleBuilder builder(data->register_allocation_data());
1667 2141574 : builder.BuildBundles();
1668 : }
1669 : };
1670 :
1671 : struct SplinterLiveRangesPhase {
1672 : static const char* phase_name() { return "splinter live ranges"; }
1673 :
1674 2141569 : void Run(PipelineData* data, Zone* temp_zone) {
1675 : LiveRangeSeparator live_range_splinterer(data->register_allocation_data(),
1676 : temp_zone);
1677 2141569 : live_range_splinterer.Splinter();
1678 : }
1679 : };
1680 :
1681 :
1682 : template <typename RegAllocator>
1683 : struct AllocateGeneralRegistersPhase {
1684 : static const char* phase_name() { return "allocate general registers"; }
1685 :
1686 2141484 : void Run(PipelineData* data, Zone* temp_zone) {
1687 : RegAllocator allocator(data->register_allocation_data(), GENERAL_REGISTERS,
1688 2141484 : temp_zone);
1689 2141600 : allocator.AllocateRegisters();
1690 2141501 : }
1691 : };
1692 :
1693 : template <typename RegAllocator>
1694 : struct AllocateFPRegistersPhase {
1695 : static const char* phase_name() { return "allocate f.p. registers"; }
1696 :
1697 190057 : void Run(PipelineData* data, Zone* temp_zone) {
1698 : RegAllocator allocator(data->register_allocation_data(), FP_REGISTERS,
1699 190057 : temp_zone);
1700 190057 : allocator.AllocateRegisters();
1701 190055 : }
1702 : };
1703 :
1704 :
1705 : struct MergeSplintersPhase {
1706 : static const char* phase_name() { return "merge splintered ranges"; }
1707 2141564 : void Run(PipelineData* pipeline_data, Zone* temp_zone) {
1708 : RegisterAllocationData* data = pipeline_data->register_allocation_data();
1709 : LiveRangeMerger live_range_merger(data, temp_zone);
1710 2141564 : live_range_merger.Merge();
1711 : }
1712 : };
1713 :
1714 :
1715 : struct LocateSpillSlotsPhase {
1716 : static const char* phase_name() { return "locate spill slots"; }
1717 :
1718 2141595 : void Run(PipelineData* data, Zone* temp_zone) {
1719 2141595 : SpillSlotLocator locator(data->register_allocation_data());
1720 2141565 : locator.LocateSpillSlots();
1721 2141583 : }
1722 : };
1723 :
1724 :
1725 : struct AssignSpillSlotsPhase {
1726 : static const char* phase_name() { return "assign spill slots"; }
1727 :
1728 2141548 : void Run(PipelineData* data, Zone* temp_zone) {
1729 2141548 : OperandAssigner assigner(data->register_allocation_data());
1730 2141492 : assigner.AssignSpillSlots();
1731 2141370 : }
1732 : };
1733 :
1734 :
1735 : struct CommitAssignmentPhase {
1736 : static const char* phase_name() { return "commit assignment"; }
1737 :
1738 2141587 : void Run(PipelineData* data, Zone* temp_zone) {
1739 2141587 : OperandAssigner assigner(data->register_allocation_data());
1740 2141586 : assigner.CommitAssignment();
1741 2141553 : }
1742 : };
1743 :
1744 :
1745 : struct PopulateReferenceMapsPhase {
1746 : static const char* phase_name() { return "populate pointer maps"; }
1747 :
1748 2141578 : void Run(PipelineData* data, Zone* temp_zone) {
1749 2141578 : ReferenceMapPopulator populator(data->register_allocation_data());
1750 2141575 : populator.PopulateReferenceMaps();
1751 2141255 : }
1752 : };
1753 :
1754 :
1755 : struct ConnectRangesPhase {
1756 : static const char* phase_name() { return "connect ranges"; }
1757 :
1758 2141586 : void Run(PipelineData* data, Zone* temp_zone) {
1759 2141586 : LiveRangeConnector connector(data->register_allocation_data());
1760 2141587 : connector.ConnectRanges(temp_zone);
1761 2141282 : }
1762 : };
1763 :
1764 :
1765 : struct ResolveControlFlowPhase {
1766 : static const char* phase_name() { return "resolve control flow"; }
1767 :
1768 2141575 : void Run(PipelineData* data, Zone* temp_zone) {
1769 2141575 : LiveRangeConnector connector(data->register_allocation_data());
1770 2141575 : connector.ResolveControlFlow(temp_zone);
1771 2141533 : }
1772 : };
1773 :
1774 :
1775 : struct OptimizeMovesPhase {
1776 : static const char* phase_name() { return "optimize moves"; }
1777 :
1778 2141580 : void Run(PipelineData* data, Zone* temp_zone) {
1779 2141580 : MoveOptimizer move_optimizer(temp_zone, data->sequence());
1780 2141399 : move_optimizer.Run();
1781 2141313 : }
1782 : };
1783 :
1784 :
1785 : struct FrameElisionPhase {
1786 : static const char* phase_name() { return "frame elision"; }
1787 :
1788 2141551 : void Run(PipelineData* data, Zone* temp_zone) {
1789 2141551 : FrameElider(data->sequence()).Run();
1790 2141483 : }
1791 : };
1792 :
1793 :
1794 : struct JumpThreadingPhase {
1795 : static const char* phase_name() { return "jump threading"; }
1796 :
1797 4898208 : void Run(PipelineData* data, Zone* temp_zone, bool frame_at_start) {
1798 : ZoneVector<RpoNumber> result(temp_zone);
1799 2141526 : if (JumpThreading::ComputeForwarding(temp_zone, result, data->sequence(),
1800 2141526 : frame_at_start)) {
1801 615156 : JumpThreading::ApplyForwarding(temp_zone, result, data->sequence());
1802 : }
1803 2141506 : }
1804 : };
1805 :
1806 : struct AssembleCodePhase {
1807 : static const char* phase_name() { return "assemble code"; }
1808 :
1809 2141513 : void Run(PipelineData* data, Zone* temp_zone) {
1810 2141513 : data->code_generator()->AssembleCode();
1811 : }
1812 : };
1813 :
1814 : struct FinalizeCodePhase {
1815 : static const char* phase_name() { return "finalize code"; }
1816 :
1817 1579173 : void Run(PipelineData* data, Zone* temp_zone) {
1818 1579173 : data->set_code(data->code_generator()->FinalizeCode());
1819 : }
1820 : };
1821 :
1822 :
1823 : struct PrintGraphPhase {
1824 : static const char* phase_name() { return nullptr; }
1825 :
1826 57 : void Run(PipelineData* data, Zone* temp_zone, const char* phase) {
1827 : OptimizedCompilationInfo* info = data->info();
1828 : Graph* graph = data->graph();
1829 :
1830 19 : if (info->trace_turbo_json_enabled()) { // Print JSON.
1831 : AllowHandleDereference allow_deref;
1832 :
1833 19 : TurboJsonFile json_of(info, std::ios_base::app);
1834 19 : json_of << "{\"name\":\"" << phase << "\",\"type\":\"graph\",\"data\":"
1835 38 : << AsJSON(*graph, data->source_positions(), data->node_origins())
1836 38 : << "},\n";
1837 : }
1838 :
1839 19 : if (info->trace_turbo_scheduled_enabled()) {
1840 0 : AccountingAllocator allocator;
1841 : Schedule* schedule = data->schedule();
1842 0 : if (schedule == nullptr) {
1843 : schedule = Scheduler::ComputeSchedule(temp_zone, data->graph(),
1844 0 : Scheduler::kNoFlags);
1845 : }
1846 :
1847 : AllowHandleDereference allow_deref;
1848 0 : CodeTracer::Scope tracing_scope(data->GetCodeTracer());
1849 0 : OFStream os(tracing_scope.file());
1850 0 : os << "-- Graph after " << phase << " -- " << std::endl;
1851 0 : os << AsScheduledGraph(schedule);
1852 19 : } else if (info->trace_turbo_graph_enabled()) { // Simple textual RPO.
1853 : AllowHandleDereference allow_deref;
1854 19 : CodeTracer::Scope tracing_scope(data->GetCodeTracer());
1855 38 : OFStream os(tracing_scope.file());
1856 19 : os << "-- Graph after " << phase << " -- " << std::endl;
1857 19 : os << AsRPO(*graph);
1858 : }
1859 19 : }
1860 : };
1861 :
1862 :
1863 : struct VerifyGraphPhase {
1864 : static const char* phase_name() { return nullptr; }
1865 :
1866 133496 : void Run(PipelineData* data, Zone* temp_zone, const bool untyped,
1867 : bool values_only = false) {
1868 : Verifier::CodeType code_type;
1869 66748 : switch (data->info()->code_kind()) {
1870 : case Code::WASM_FUNCTION:
1871 : case Code::WASM_TO_JS_FUNCTION:
1872 : case Code::JS_TO_WASM_FUNCTION:
1873 : case Code::WASM_INTERPRETER_ENTRY:
1874 : case Code::C_WASM_ENTRY:
1875 : code_type = Verifier::kWasm;
1876 : break;
1877 : default:
1878 : code_type = Verifier::kDefault;
1879 : }
1880 : Verifier::Run(data->graph(), !untyped ? Verifier::TYPED : Verifier::UNTYPED,
1881 : values_only ? Verifier::kValuesOnly : Verifier::kAll,
1882 133496 : code_type);
1883 66748 : }
1884 : };
1885 :
1886 11401717 : void PipelineImpl::RunPrintAndVerify(const char* phase, bool untyped) {
1887 22803515 : if (info()->trace_turbo_json_enabled() ||
1888 : info()->trace_turbo_graph_enabled()) {
1889 19 : Run<PrintGraphPhase>(phase);
1890 : }
1891 11401858 : if (FLAG_turbo_verify) {
1892 272 : Run<VerifyGraphPhase>(untyped);
1893 : }
1894 11401858 : }
1895 :
1896 456730 : bool PipelineImpl::CreateGraph() {
1897 2740322 : PipelineData* data = this->data_;
1898 :
1899 : data->BeginPhaseKind("graph creation");
1900 :
1901 913459 : if (info()->trace_turbo_json_enabled() ||
1902 : info()->trace_turbo_graph_enabled()) {
1903 1 : CodeTracer::Scope tracing_scope(data->GetCodeTracer());
1904 2 : OFStream os(tracing_scope.file());
1905 1 : os << "---------------------------------------------------\n"
1906 3 : << "Begin compiling method " << info()->GetDebugName().get()
1907 1 : << " using Turbofan" << std::endl;
1908 : }
1909 456730 : if (info()->trace_turbo_json_enabled()) {
1910 1 : TurboCfgFile tcf(isolate());
1911 1 : tcf << AsC1VCompilation(info());
1912 : }
1913 :
1914 456730 : data->source_positions()->AddDecorator();
1915 456729 : if (data->info()->trace_turbo_json_enabled()) {
1916 1 : data->node_origins()->AddDecorator();
1917 : }
1918 :
1919 456729 : if (FLAG_concurrent_inlining) {
1920 59 : data->broker()->StartSerializing();
1921 59 : Run<SerializeStandardObjectsPhase>();
1922 59 : Run<SerializationPhase>();
1923 : } else {
1924 456670 : data->broker()->SetNativeContextRef();
1925 : }
1926 :
1927 456730 : Run<GraphBuilderPhase>();
1928 456730 : RunPrintAndVerify(GraphBuilderPhase::phase_name(), true);
1929 :
1930 456730 : if (FLAG_concurrent_inlining) {
1931 59 : Run<CopyMetadataForConcurrentCompilePhase>();
1932 : }
1933 :
1934 : // Perform function context specialization and inlining (if enabled).
1935 456730 : Run<InliningPhase>();
1936 456731 : RunPrintAndVerify(InliningPhase::phase_name(), true);
1937 :
1938 : // Remove dead->live edges from the graph.
1939 456731 : Run<EarlyGraphTrimmingPhase>();
1940 456730 : RunPrintAndVerify(EarlyGraphTrimmingPhase::phase_name(), true);
1941 :
1942 : // Determine the Typer operation flags.
1943 : {
1944 1280697 : if (is_sloppy(info()->shared_info()->language_mode()) &&
1945 823965 : info()->shared_info()->IsUserJavaScript()) {
1946 : // Sloppy mode functions always have an Object for this.
1947 : data->AddTyperFlag(Typer::kThisIsReceiver);
1948 : }
1949 456731 : if (IsClassConstructor(info()->shared_info()->kind())) {
1950 : // Class constructors cannot be [[Call]]ed.
1951 : data->AddTyperFlag(Typer::kNewTargetIsReceiver);
1952 : }
1953 : }
1954 :
1955 : // Run the type-sensitive lowerings and optimizations on the graph.
1956 : {
1957 456731 : if (FLAG_concurrent_inlining) {
1958 : // TODO(neis): Remove CopyMetadataForConcurrentCompilePhase call once
1959 : // brokerization of JSNativeContextSpecialization is complete.
1960 59 : Run<CopyMetadataForConcurrentCompilePhase>();
1961 59 : data->broker()->StopSerializing();
1962 : } else {
1963 456672 : data->broker()->StartSerializing();
1964 456671 : Run<SerializeStandardObjectsPhase>();
1965 456671 : Run<CopyMetadataForConcurrentCompilePhase>();
1966 456671 : data->broker()->StopSerializing();
1967 : }
1968 : }
1969 :
1970 : data->EndPhaseKind();
1971 :
1972 456730 : return true;
1973 : }
1974 :
1975 456697 : bool PipelineImpl::OptimizeGraph(Linkage* linkage) {
1976 2283474 : PipelineData* data = this->data_;
1977 :
1978 : data->BeginPhaseKind("lowering");
1979 :
1980 : // Type the graph and keep the Typer running such that new nodes get
1981 : // automatically typed when they are created.
1982 456697 : Run<TyperPhase>(data->CreateTyper());
1983 456696 : RunPrintAndVerify(TyperPhase::phase_name());
1984 456697 : Run<TypedLoweringPhase>();
1985 456696 : RunPrintAndVerify(TypedLoweringPhase::phase_name());
1986 :
1987 456697 : if (data->info()->is_loop_peeling_enabled()) {
1988 453306 : Run<LoopPeelingPhase>();
1989 453305 : RunPrintAndVerify(LoopPeelingPhase::phase_name(), true);
1990 : } else {
1991 3391 : Run<LoopExitEliminationPhase>();
1992 3391 : RunPrintAndVerify(LoopExitEliminationPhase::phase_name(), true);
1993 : }
1994 :
1995 456695 : if (FLAG_turbo_load_elimination) {
1996 456684 : Run<LoadEliminationPhase>();
1997 456685 : RunPrintAndVerify(LoadEliminationPhase::phase_name());
1998 : }
1999 456697 : data->DeleteTyper();
2000 :
2001 456697 : if (FLAG_turbo_escape) {
2002 456695 : Run<EscapeAnalysisPhase>();
2003 456696 : if (data->compilation_failed()) {
2004 : info()->AbortOptimization(
2005 : BailoutReason::kCyclicObjectStateDetectedInEscapeAnalysis);
2006 : data->EndPhaseKind();
2007 : return false;
2008 : }
2009 456696 : RunPrintAndVerify(EscapeAnalysisPhase::phase_name());
2010 : }
2011 :
2012 : // Perform simplified lowering. This has to run w/o the Typer decorator,
2013 : // because we cannot compute meaningful types anyways, and the computed types
2014 : // might even conflict with the representation/truncation logic.
2015 456698 : Run<SimplifiedLoweringPhase>();
2016 456697 : RunPrintAndVerify(SimplifiedLoweringPhase::phase_name(), true);
2017 :
2018 : // From now on it is invalid to look at types on the nodes, because the types
2019 : // on the nodes might not make sense after representation selection due to the
2020 : // way we handle truncations; if we'd want to look at types afterwards we'd
2021 : // essentially need to re-type (large portions of) the graph.
2022 :
2023 : // In order to catch bugs related to type access after this point, we now
2024 : // remove the types from the nodes (currently only in Debug builds).
2025 : #ifdef DEBUG
2026 : Run<UntyperPhase>();
2027 : RunPrintAndVerify(UntyperPhase::phase_name(), true);
2028 : #endif
2029 :
2030 : // Run generic lowering pass.
2031 456697 : Run<GenericLoweringPhase>();
2032 456696 : RunPrintAndVerify(GenericLoweringPhase::phase_name(), true);
2033 :
2034 : data->BeginPhaseKind("block building");
2035 :
2036 : // Run early optimization pass.
2037 456697 : Run<EarlyOptimizationPhase>();
2038 456695 : RunPrintAndVerify(EarlyOptimizationPhase::phase_name(), true);
2039 :
2040 456697 : Run<EffectControlLinearizationPhase>();
2041 456697 : RunPrintAndVerify(EffectControlLinearizationPhase::phase_name(), true);
2042 :
2043 456695 : if (FLAG_turbo_store_elimination) {
2044 456696 : Run<StoreStoreEliminationPhase>();
2045 456697 : RunPrintAndVerify(StoreStoreEliminationPhase::phase_name(), true);
2046 : }
2047 :
2048 : // Optimize control flow.
2049 456692 : if (FLAG_turbo_cf_optimization) {
2050 456692 : Run<ControlFlowOptimizationPhase>();
2051 456695 : RunPrintAndVerify(ControlFlowOptimizationPhase::phase_name(), true);
2052 : }
2053 :
2054 : // Optimize memory access and allocation operations.
2055 456696 : Run<MemoryOptimizationPhase>();
2056 : // TODO(jarin, rossberg): Remove UNTYPED once machine typing works.
2057 456697 : RunPrintAndVerify(MemoryOptimizationPhase::phase_name(), true);
2058 :
2059 : // Lower changes that have been inserted before.
2060 456696 : Run<LateOptimizationPhase>();
2061 : // TODO(jarin, rossberg): Remove UNTYPED once machine typing works.
2062 456697 : RunPrintAndVerify(LateOptimizationPhase::phase_name(), true);
2063 :
2064 456697 : data->source_positions()->RemoveDecorator();
2065 456686 : if (data->info()->trace_turbo_json_enabled()) {
2066 1 : data->node_origins()->RemoveDecorator();
2067 : }
2068 :
2069 456686 : ComputeScheduledGraph();
2070 :
2071 456697 : return SelectInstructions(linkage);
2072 : }
2073 :
2074 66476 : MaybeHandle<Code> Pipeline::GenerateCodeForCodeStub(
2075 199428 : Isolate* isolate, CallDescriptor* call_descriptor, Graph* graph,
2076 : SourcePositionTable* source_positions, Code::Kind kind,
2077 : const char* debug_name, int32_t builtin_index,
2078 : PoisoningMitigationLevel poisoning_level, const AssemblerOptions& options) {
2079 66476 : OptimizedCompilationInfo info(CStrVector(debug_name), graph->zone(), kind);
2080 : info.set_builtin_index(builtin_index);
2081 :
2082 66476 : if (poisoning_level != PoisoningMitigationLevel::kDontPoison) {
2083 : info.SetPoisoningMitigationLevel(poisoning_level);
2084 : }
2085 :
2086 : // Construct a pipeline for scheduling and code generation.
2087 132952 : ZoneStats zone_stats(isolate->allocator());
2088 66476 : NodeOriginTable node_origins(graph);
2089 : JumpOptimizationInfo jump_opt;
2090 : bool should_optimize_jumps =
2091 66476 : isolate->serializer_enabled() && FLAG_turbo_rewrite_far_jumps;
2092 : PipelineData data(&zone_stats, &info, isolate, graph, nullptr,
2093 : source_positions, &node_origins,
2094 132952 : should_optimize_jumps ? &jump_opt : nullptr, options);
2095 : data.set_verify_graph(FLAG_verify_csa);
2096 : std::unique_ptr<PipelineStatistics> pipeline_statistics;
2097 66476 : if (FLAG_turbo_stats || FLAG_turbo_stats_nvp) {
2098 : pipeline_statistics.reset(new PipelineStatistics(
2099 0 : &info, isolate->GetTurboStatistics(), &zone_stats));
2100 0 : pipeline_statistics->BeginPhaseKind("stub codegen");
2101 : }
2102 :
2103 : PipelineImpl pipeline(&data);
2104 :
2105 132952 : if (info.trace_turbo_json_enabled() || info.trace_turbo_graph_enabled()) {
2106 0 : CodeTracer::Scope tracing_scope(data.GetCodeTracer());
2107 0 : OFStream os(tracing_scope.file());
2108 0 : os << "---------------------------------------------------\n"
2109 0 : << "Begin compiling " << debug_name << " using Turbofan" << std::endl;
2110 0 : if (info.trace_turbo_json_enabled()) {
2111 0 : TurboJsonFile json_of(&info, std::ios_base::trunc);
2112 0 : json_of << "{\"function\" : ";
2113 : JsonPrintFunctionSource(json_of, -1, info.GetDebugName(),
2114 : Handle<Script>(), isolate,
2115 0 : Handle<SharedFunctionInfo>());
2116 0 : json_of << ",\n\"phases\":[";
2117 : }
2118 0 : pipeline.Run<PrintGraphPhase>("Machine");
2119 : }
2120 :
2121 : // Optimize memory access and allocation operations.
2122 66476 : pipeline.Run<MemoryOptimizationPhase>();
2123 66476 : pipeline.RunPrintAndVerify(MemoryOptimizationPhase::phase_name(), true);
2124 :
2125 66476 : pipeline.Run<CsaOptimizationPhase>();
2126 66476 : pipeline.RunPrintAndVerify(CsaOptimizationPhase::phase_name(), true);
2127 :
2128 66476 : pipeline.Run<VerifyGraphPhase>(true);
2129 66476 : pipeline.ComputeScheduledGraph();
2130 : DCHECK_NOT_NULL(data.schedule());
2131 :
2132 : // First run code generation on a copy of the pipeline, in order to be able to
2133 : // repeat it for jump optimization. The first run has to happen on a temporary
2134 : // pipeline to avoid deletion of zones on the main pipeline.
2135 : PipelineData second_data(&zone_stats, &info, isolate, data.graph(),
2136 : data.schedule(), data.source_positions(),
2137 : data.node_origins(), data.jump_optimization_info(),
2138 132952 : options);
2139 : second_data.set_verify_graph(FLAG_verify_csa);
2140 : PipelineImpl second_pipeline(&second_data);
2141 66476 : second_pipeline.SelectInstructionsAndAssemble(call_descriptor);
2142 :
2143 : Handle<Code> code;
2144 66476 : if (jump_opt.is_optimizable()) {
2145 : jump_opt.set_optimizing();
2146 113456 : code = pipeline.GenerateCode(call_descriptor).ToHandleChecked();
2147 : } else {
2148 19496 : code = second_pipeline.FinalizeCode().ToHandleChecked();
2149 : }
2150 :
2151 132952 : return code;
2152 : }
2153 :
2154 : // static
2155 9010 : wasm::WasmCode* Pipeline::GenerateCodeForWasmNativeStub(
2156 : wasm::WasmEngine* wasm_engine, CallDescriptor* call_descriptor,
2157 9010 : MachineGraph* mcgraph, Code::Kind kind, int wasm_kind,
2158 : const char* debug_name, const AssemblerOptions& options,
2159 : wasm::NativeModule* native_module, SourcePositionTable* source_positions) {
2160 18020 : Graph* graph = mcgraph->graph();
2161 9010 : OptimizedCompilationInfo info(CStrVector(debug_name), graph->zone(), kind);
2162 : // Construct a pipeline for scheduling and code generation.
2163 18020 : ZoneStats zone_stats(wasm_engine->allocator());
2164 9010 : NodeOriginTable* node_positions = new (graph->zone()) NodeOriginTable(graph);
2165 : PipelineData data(&zone_stats, wasm_engine, &info, mcgraph, nullptr,
2166 18020 : source_positions, node_positions, options);
2167 : std::unique_ptr<PipelineStatistics> pipeline_statistics;
2168 9010 : if (FLAG_turbo_stats || FLAG_turbo_stats_nvp) {
2169 : pipeline_statistics.reset(new PipelineStatistics(
2170 0 : &info, wasm_engine->GetOrCreateTurboStatistics(), &zone_stats));
2171 0 : pipeline_statistics->BeginPhaseKind("wasm stub codegen");
2172 : }
2173 :
2174 : PipelineImpl pipeline(&data);
2175 :
2176 18020 : if (info.trace_turbo_json_enabled() || info.trace_turbo_graph_enabled()) {
2177 0 : CodeTracer::Scope tracing_scope(data.GetCodeTracer());
2178 0 : OFStream os(tracing_scope.file());
2179 0 : os << "---------------------------------------------------\n"
2180 0 : << "Begin compiling method " << info.GetDebugName().get()
2181 0 : << " using Turbofan" << std::endl;
2182 : }
2183 :
2184 9010 : if (info.trace_turbo_graph_enabled()) { // Simple textual RPO.
2185 0 : StdoutStream{} << "-- wasm stub " << Code::Kind2String(kind) << " graph -- "
2186 : << std::endl
2187 0 : << AsRPO(*graph);
2188 : }
2189 :
2190 9010 : if (info.trace_turbo_json_enabled()) {
2191 0 : TurboJsonFile json_of(&info, std::ios_base::trunc);
2192 0 : json_of << "{\"function\":\"" << info.GetDebugName().get()
2193 0 : << "\", \"source\":\"\",\n\"phases\":[";
2194 : }
2195 :
2196 9010 : pipeline.RunPrintAndVerify("machine", true);
2197 9010 : pipeline.ComputeScheduledGraph();
2198 :
2199 : Linkage linkage(call_descriptor);
2200 9010 : if (!pipeline.SelectInstructions(&linkage)) return nullptr;
2201 18020 : pipeline.AssembleCode(&linkage);
2202 :
2203 18020 : CodeGenerator* code_generator = pipeline.code_generator();
2204 9010 : CodeDesc code_desc;
2205 : code_generator->tasm()->GetCode(
2206 : nullptr, &code_desc, code_generator->safepoint_table_builder(),
2207 9010 : static_cast<int>(code_generator->GetHandlerTableOffset()));
2208 :
2209 : wasm::WasmCode* code = native_module->AddCode(
2210 : wasm::WasmCode::kAnonymousFuncIndex, code_desc,
2211 9010 : code_generator->frame()->GetTotalFrameSlotCount(),
2212 9010 : call_descriptor->GetTaggedParameterSlots(),
2213 : code_generator->GetProtectedInstructions(),
2214 : code_generator->GetSourcePositionTable(),
2215 63070 : static_cast<wasm::WasmCode::Kind>(wasm_kind), wasm::WasmCode::kOther);
2216 :
2217 9010 : if (info.trace_turbo_json_enabled()) {
2218 0 : TurboJsonFile json_of(&info, std::ios_base::app);
2219 0 : json_of << "{\"name\":\"disassembly\",\"type\":\"disassembly\",\"data\":\"";
2220 : #ifdef ENABLE_DISASSEMBLER
2221 : std::stringstream disassembler_stream;
2222 : Disassembler::Decode(
2223 : nullptr, &disassembler_stream, code->instructions().start(),
2224 : code->instructions().start() + code->safepoint_table_offset(),
2225 : CodeReference(code));
2226 : for (auto const c : disassembler_stream.str()) {
2227 : json_of << AsEscapedUC16ForJSON(c);
2228 : }
2229 : #endif // ENABLE_DISASSEMBLER
2230 0 : json_of << "\"}\n]";
2231 0 : json_of << "\n}";
2232 : }
2233 :
2234 18020 : if (info.trace_turbo_json_enabled() || info.trace_turbo_graph_enabled()) {
2235 0 : CodeTracer::Scope tracing_scope(data.GetCodeTracer());
2236 0 : OFStream os(tracing_scope.file());
2237 0 : os << "---------------------------------------------------\n"
2238 0 : << "Finished compiling method " << info.GetDebugName().get()
2239 0 : << " using Turbofan" << std::endl;
2240 : }
2241 :
2242 18020 : return code;
2243 : }
2244 :
2245 : // static
2246 140460 : MaybeHandle<Code> Pipeline::GenerateCodeForWasmHeapStub(
2247 421385 : Isolate* isolate, CallDescriptor* call_descriptor, Graph* graph,
2248 : Code::Kind kind, const char* debug_name, const AssemblerOptions& options,
2249 : SourcePositionTable* source_positions) {
2250 140460 : OptimizedCompilationInfo info(CStrVector(debug_name), graph->zone(), kind);
2251 : // Construct a pipeline for scheduling and code generation.
2252 280928 : ZoneStats zone_stats(isolate->allocator());
2253 140463 : NodeOriginTable* node_positions = new (graph->zone()) NodeOriginTable(graph);
2254 : PipelineData data(&zone_stats, &info, isolate, graph, nullptr,
2255 280923 : source_positions, node_positions, nullptr, options);
2256 : std::unique_ptr<PipelineStatistics> pipeline_statistics;
2257 140463 : if (FLAG_turbo_stats || FLAG_turbo_stats_nvp) {
2258 : pipeline_statistics.reset(new PipelineStatistics(
2259 1 : &info, isolate->GetTurboStatistics(), &zone_stats));
2260 0 : pipeline_statistics->BeginPhaseKind("wasm stub codegen");
2261 : }
2262 :
2263 : PipelineImpl pipeline(&data);
2264 :
2265 280924 : if (info.trace_turbo_json_enabled() ||
2266 : info.trace_turbo_graph_enabled()) {
2267 1 : CodeTracer::Scope tracing_scope(data.GetCodeTracer());
2268 2 : OFStream os(tracing_scope.file());
2269 1 : os << "---------------------------------------------------\n"
2270 3 : << "Begin compiling method " << info.GetDebugName().get()
2271 1 : << " using Turbofan" << std::endl;
2272 : }
2273 :
2274 140462 : if (info.trace_turbo_graph_enabled()) { // Simple textual RPO.
2275 3 : StdoutStream{} << "-- wasm stub " << Code::Kind2String(kind) << " graph -- "
2276 : << std::endl
2277 2 : << AsRPO(*graph);
2278 : }
2279 :
2280 140462 : if (info.trace_turbo_json_enabled()) {
2281 1 : TurboJsonFile json_of(&info, std::ios_base::trunc);
2282 3 : json_of << "{\"function\":\"" << info.GetDebugName().get()
2283 2 : << "\", \"source\":\"\",\n\"phases\":[";
2284 : }
2285 :
2286 140462 : pipeline.RunPrintAndVerify("machine", true);
2287 140463 : pipeline.ComputeScheduledGraph();
2288 :
2289 : Handle<Code> code;
2290 421386 : if (pipeline.GenerateCode(call_descriptor).ToHandle(&code) &&
2291 140461 : pipeline.CommitDependencies(code)) {
2292 140462 : return code;
2293 : }
2294 140463 : return MaybeHandle<Code>();
2295 : }
2296 :
2297 : // static
2298 3367 : MaybeHandle<Code> Pipeline::GenerateCodeForTesting(
2299 3367 : OptimizedCompilationInfo* info, Isolate* isolate,
2300 : JSHeapBroker** out_broker) {
2301 3367 : ZoneStats zone_stats(isolate->allocator());
2302 : std::unique_ptr<PipelineStatistics> pipeline_statistics(
2303 : CreatePipelineStatistics(Handle<Script>::null(), info, isolate,
2304 3367 : &zone_stats));
2305 6734 : PipelineData data(&zone_stats, isolate, info, pipeline_statistics.get());
2306 3367 : if (out_broker != nullptr) {
2307 59 : *out_broker = data.broker();
2308 : }
2309 :
2310 : PipelineImpl pipeline(&data);
2311 :
2312 3367 : Linkage linkage(Linkage::ComputeIncoming(data.instruction_zone(), info));
2313 3367 : Deoptimizer::EnsureCodeForDeoptimizationEntries(isolate);
2314 :
2315 3367 : if (!pipeline.CreateGraph()) return MaybeHandle<Code>();
2316 3367 : if (!pipeline.OptimizeGraph(&linkage)) return MaybeHandle<Code>();
2317 6734 : pipeline.AssembleCode(&linkage);
2318 : Handle<Code> code;
2319 10101 : if (pipeline.FinalizeCode(out_broker == nullptr).ToHandle(&code) &&
2320 3367 : pipeline.CommitDependencies(code)) {
2321 3367 : return code;
2322 : }
2323 3367 : return MaybeHandle<Code>();
2324 : }
2325 :
2326 : // static
2327 915650 : MaybeHandle<Code> Pipeline::GenerateCodeForTesting(
2328 1831300 : OptimizedCompilationInfo* info, Isolate* isolate,
2329 : CallDescriptor* call_descriptor, Graph* graph,
2330 : const AssemblerOptions& options, Schedule* schedule) {
2331 : // Construct a pipeline for scheduling and code generation.
2332 915650 : ZoneStats zone_stats(isolate->allocator());
2333 915650 : NodeOriginTable* node_positions = new (info->zone()) NodeOriginTable(graph);
2334 : PipelineData data(&zone_stats, info, isolate, graph, schedule, nullptr,
2335 1831300 : node_positions, nullptr, options);
2336 : std::unique_ptr<PipelineStatistics> pipeline_statistics;
2337 915650 : if (FLAG_turbo_stats || FLAG_turbo_stats_nvp) {
2338 : pipeline_statistics.reset(new PipelineStatistics(
2339 0 : info, isolate->GetTurboStatistics(), &zone_stats));
2340 0 : pipeline_statistics->BeginPhaseKind("test codegen");
2341 : }
2342 :
2343 : PipelineImpl pipeline(&data);
2344 :
2345 915650 : if (info->trace_turbo_json_enabled()) {
2346 0 : TurboJsonFile json_of(info, std::ios_base::trunc);
2347 0 : json_of << "{\"function\":\"" << info->GetDebugName().get()
2348 0 : << "\", \"source\":\"\",\n\"phases\":[";
2349 : }
2350 : // TODO(rossberg): Should this really be untyped?
2351 915650 : pipeline.RunPrintAndVerify("machine", true);
2352 :
2353 : // Ensure we have a schedule.
2354 915650 : if (data.schedule() == nullptr) {
2355 734364 : pipeline.ComputeScheduledGraph();
2356 : }
2357 :
2358 : Handle<Code> code;
2359 2746950 : if (pipeline.GenerateCode(call_descriptor).ToHandle(&code) &&
2360 915650 : pipeline.CommitDependencies(code)) {
2361 915650 : return code;
2362 : }
2363 915650 : return MaybeHandle<Code>();
2364 : }
2365 :
2366 : // static
2367 471944 : OptimizedCompilationJob* Pipeline::NewCompilationJob(
2368 : Isolate* isolate, Handle<JSFunction> function, bool has_script) {
2369 : Handle<SharedFunctionInfo> shared =
2370 943888 : handle(function->shared(), function->GetIsolate());
2371 471944 : return new PipelineCompilationJob(isolate, shared, function);
2372 : }
2373 :
2374 : // static
2375 496340 : void Pipeline::GenerateCodeForWasmFunction(
2376 : OptimizedCompilationInfo* info, wasm::WasmEngine* wasm_engine,
2377 : MachineGraph* mcgraph, CallDescriptor* call_descriptor,
2378 : SourcePositionTable* source_positions, NodeOriginTable* node_origins,
2379 : wasm::FunctionBody function_body, const wasm::WasmModule* module,
2380 : int function_index) {
2381 496340 : ZoneStats zone_stats(wasm_engine->allocator());
2382 : std::unique_ptr<PipelineStatistics> pipeline_statistics(
2383 : CreatePipelineStatistics(wasm_engine, function_body, module, info,
2384 496320 : &zone_stats));
2385 : // {instruction_buffer} must live longer than {PipelineData}, since
2386 : // {PipelineData} will reference the {instruction_buffer} via the
2387 : // {AssemblerBuffer} of the {Assembler} contained in the {CodeGenerator}.
2388 : std::unique_ptr<wasm::WasmInstructionBuffer> instruction_buffer =
2389 496348 : wasm::WasmInstructionBuffer::New();
2390 : PipelineData data(&zone_stats, wasm_engine, info, mcgraph,
2391 : pipeline_statistics.get(), source_positions, node_origins,
2392 992896 : WasmAssemblerOptions());
2393 :
2394 : PipelineImpl pipeline(&data);
2395 :
2396 1489587 : if (data.info()->trace_turbo_json_enabled() ||
2397 : data.info()->trace_turbo_graph_enabled()) {
2398 0 : CodeTracer::Scope tracing_scope(data.GetCodeTracer());
2399 0 : OFStream os(tracing_scope.file());
2400 0 : os << "---------------------------------------------------\n"
2401 0 : << "Begin compiling method " << data.info()->GetDebugName().get()
2402 0 : << " using Turbofan" << std::endl;
2403 : }
2404 :
2405 496531 : pipeline.RunPrintAndVerify("Machine", true);
2406 :
2407 : data.BeginPhaseKind("wasm optimization");
2408 496458 : const bool is_asm_js = module->origin == wasm::kAsmJsOrigin;
2409 496458 : if (FLAG_turbo_splitting && !is_asm_js) {
2410 488335 : data.info()->MarkAsSplittingEnabled();
2411 : }
2412 496458 : if (FLAG_wasm_opt || is_asm_js) {
2413 8164 : PipelineRunScope scope(&data, "wasm full optimization");
2414 : GraphReducer graph_reducer(scope.zone(), data.graph(),
2415 24480 : data.mcgraph()->Dead());
2416 : DeadCodeElimination dead_code_elimination(&graph_reducer, data.graph(),
2417 8160 : data.common(), scope.zone());
2418 24480 : ValueNumberingReducer value_numbering(scope.zone(), data.graph()->zone());
2419 : const bool allow_signalling_nan = is_asm_js;
2420 : MachineOperatorReducer machine_reducer(&graph_reducer, data.mcgraph(),
2421 16319 : allow_signalling_nan);
2422 : CommonOperatorReducer common_reducer(&graph_reducer, data.graph(),
2423 : data.broker(), data.common(),
2424 8159 : data.machine(), scope.zone());
2425 8159 : AddReducer(&data, &graph_reducer, &dead_code_elimination);
2426 8160 : AddReducer(&data, &graph_reducer, &machine_reducer);
2427 8160 : AddReducer(&data, &graph_reducer, &common_reducer);
2428 8160 : AddReducer(&data, &graph_reducer, &value_numbering);
2429 16320 : graph_reducer.ReduceGraph();
2430 : } else {
2431 488294 : PipelineRunScope scope(&data, "wasm base optimization");
2432 : GraphReducer graph_reducer(scope.zone(), data.graph(),
2433 1464701 : data.mcgraph()->Dead());
2434 1464835 : ValueNumberingReducer value_numbering(scope.zone(), data.graph()->zone());
2435 488169 : AddReducer(&data, &graph_reducer, &value_numbering);
2436 976505 : graph_reducer.ReduceGraph();
2437 : }
2438 496507 : pipeline.RunPrintAndVerify("wasm optimization", true);
2439 :
2440 496517 : if (data.node_origins()) {
2441 0 : data.node_origins()->RemoveDecorator();
2442 : }
2443 :
2444 496517 : pipeline.ComputeScheduledGraph();
2445 :
2446 : Linkage linkage(call_descriptor);
2447 992836 : if (!pipeline.SelectInstructions(&linkage)) return;
2448 993064 : pipeline.AssembleCode(&linkage, instruction_buffer->CreateView());
2449 :
2450 496544 : auto result = base::make_unique<wasm::WasmCompilationResult>();
2451 992983 : CodeGenerator* code_generator = pipeline.code_generator();
2452 : code_generator->tasm()->GetCode(
2453 : nullptr, &result->code_desc, code_generator->safepoint_table_builder(),
2454 993084 : static_cast<int>(code_generator->GetHandlerTableOffset()));
2455 :
2456 992904 : result->instr_buffer = instruction_buffer->ReleaseBuffer();
2457 496441 : result->frame_slot_count = code_generator->frame()->GetTotalFrameSlotCount();
2458 496441 : result->tagged_parameter_slots = call_descriptor->GetTaggedParameterSlots();
2459 1489441 : result->source_positions = code_generator->GetSourcePositionTable();
2460 1489298 : result->protected_instructions = code_generator->GetProtectedInstructions();
2461 :
2462 992810 : if (data.info()->trace_turbo_json_enabled()) {
2463 0 : TurboJsonFile json_of(data.info(), std::ios_base::app);
2464 0 : json_of << "{\"name\":\"disassembly\",\"type\":\"disassembly\",\"data\":\"";
2465 : #ifdef ENABLE_DISASSEMBLER
2466 : std::stringstream disassembler_stream;
2467 : Disassembler::Decode(
2468 : nullptr, &disassembler_stream, result->code_desc.buffer,
2469 : result->code_desc.buffer + result->code_desc.instr_size,
2470 : CodeReference(&result->code_desc));
2471 : for (auto const c : disassembler_stream.str()) {
2472 : json_of << AsEscapedUC16ForJSON(c);
2473 : }
2474 : #endif // ENABLE_DISASSEMBLER
2475 0 : json_of << "\"}\n]";
2476 0 : json_of << "\n}";
2477 : }
2478 :
2479 1489228 : if (data.info()->trace_turbo_json_enabled() ||
2480 : data.info()->trace_turbo_graph_enabled()) {
2481 0 : CodeTracer::Scope tracing_scope(data.GetCodeTracer());
2482 0 : OFStream os(tracing_scope.file());
2483 0 : os << "---------------------------------------------------\n"
2484 0 : << "Finished compiling method " << data.info()->GetDebugName().get()
2485 0 : << " using Turbofan" << std::endl;
2486 : }
2487 :
2488 : DCHECK(result->succeeded());
2489 1489313 : info->SetWasmCompilationResult(std::move(result));
2490 : }
2491 :
2492 42 : bool Pipeline::AllocateRegistersForTesting(const RegisterConfiguration* config,
2493 126 : InstructionSequence* sequence,
2494 : bool run_verifier) {
2495 : OptimizedCompilationInfo info(ArrayVector("testing"), sequence->zone(),
2496 42 : Code::STUB);
2497 84 : ZoneStats zone_stats(sequence->isolate()->allocator());
2498 84 : PipelineData data(&zone_stats, &info, sequence->isolate(), sequence);
2499 42 : data.InitializeFrameData(nullptr);
2500 : PipelineImpl pipeline(&data);
2501 42 : pipeline.AllocateRegisters(config, nullptr, run_verifier);
2502 84 : return !data.compilation_failed();
2503 : }
2504 :
2505 1903451 : void PipelineImpl::ComputeScheduledGraph() {
2506 3807000 : PipelineData* data = this->data_;
2507 :
2508 : // We should only schedule the graph if it is not scheduled yet.
2509 : DCHECK_NULL(data->schedule());
2510 :
2511 1903451 : Run<LateGraphTrimmingPhase>();
2512 1903563 : RunPrintAndVerify(LateGraphTrimmingPhase::phase_name(), true);
2513 :
2514 1903564 : Run<ComputeSchedulePhase>();
2515 1903549 : TraceSchedule(data->info(), data, data->schedule(), "schedule");
2516 1903393 : }
2517 :
2518 2141382 : bool PipelineImpl::SelectInstructions(Linkage* linkage) {
2519 4282714 : auto call_descriptor = linkage->GetIncomingDescriptor();
2520 10706964 : PipelineData* data = this->data_;
2521 :
2522 : // We should have a scheduled graph.
2523 : DCHECK_NOT_NULL(data->graph());
2524 : DCHECK_NOT_NULL(data->schedule());
2525 :
2526 2141382 : if (FLAG_turbo_profiling) {
2527 : data->set_profiler_data(BasicBlockInstrumentor::Instrument(
2528 8 : info(), data->graph(), data->schedule(), data->isolate()));
2529 : }
2530 :
2531 : bool verify_stub_graph = data->verify_graph();
2532 : // Jump optimization runs instruction selection twice, but the instruction
2533 : // selector mutates nodes like swapping the inputs of a load, which can
2534 : // violate the machine graph verification rules. So we skip the second
2535 : // verification on a graph that already verified before.
2536 121632 : auto jump_opt = data->jump_optimization_info();
2537 2263014 : if (jump_opt && jump_opt->is_optimizing()) {
2538 : verify_stub_graph = false;
2539 : }
2540 6424133 : if (verify_stub_graph ||
2541 2141369 : (FLAG_turbo_verify_machine_graph != nullptr &&
2542 0 : (!strcmp(FLAG_turbo_verify_machine_graph, "*") ||
2543 0 : !strcmp(FLAG_turbo_verify_machine_graph, data->debug_name())))) {
2544 0 : if (FLAG_trace_verify_csa) {
2545 : AllowHandleDereference allow_deref;
2546 0 : CodeTracer::Scope tracing_scope(data->GetCodeTracer());
2547 0 : OFStream os(tracing_scope.file());
2548 0 : os << "--------------------------------------------------\n"
2549 0 : << "--- Verifying " << data->debug_name() << " generated by TurboFan\n"
2550 0 : << "--------------------------------------------------\n"
2551 0 : << *data->schedule()
2552 0 : << "--------------------------------------------------\n"
2553 0 : << "--- End of " << data->debug_name() << " generated by TurboFan\n"
2554 0 : << "--------------------------------------------------\n";
2555 : }
2556 0 : Zone temp_zone(data->allocator(), ZONE_NAME);
2557 : MachineGraphVerifier::Run(
2558 : data->graph(), data->schedule(), linkage,
2559 : data->info()->IsNotOptimizedFunctionOrWasmFunction(),
2560 0 : data->debug_name(), &temp_zone);
2561 : }
2562 :
2563 2141382 : data->InitializeInstructionSequence(call_descriptor);
2564 :
2565 2141405 : data->InitializeFrameData(call_descriptor);
2566 : // Select and schedule instructions covering the scheduled graph.
2567 2141485 : Run<InstructionSelectionPhase>(linkage);
2568 2141547 : if (data->compilation_failed()) {
2569 : info()->AbortOptimization(BailoutReason::kCodeGenerationFailed);
2570 : data->EndPhaseKind();
2571 : return false;
2572 : }
2573 :
2574 2141540 : if (info()->trace_turbo_json_enabled() && !data->MayHaveUnverifiableGraph()) {
2575 : AllowHandleDereference allow_deref;
2576 1 : TurboCfgFile tcf(isolate());
2577 : tcf << AsC1V("CodeGen", data->schedule(), data->source_positions(),
2578 1 : data->sequence());
2579 : }
2580 :
2581 2141538 : if (info()->trace_turbo_json_enabled()) {
2582 2 : std::ostringstream source_position_output;
2583 : // Output source position information before the graph is deleted.
2584 2141538 : if (data_->source_positions() != nullptr) {
2585 1 : data_->source_positions()->PrintJson(source_position_output);
2586 : } else {
2587 1 : source_position_output << "{}";
2588 : }
2589 2 : source_position_output << ",\n\"NodeOrigins\" : ";
2590 4 : data_->node_origins()->PrintJson(source_position_output);
2591 4 : data_->set_source_position_output(source_position_output.str());
2592 : }
2593 :
2594 2141538 : data->DeleteGraphZone();
2595 :
2596 : data->BeginPhaseKind("register allocation");
2597 :
2598 2141332 : bool run_verifier = FLAG_turbo_verify_allocation;
2599 :
2600 : // Allocate registers.
2601 2141332 : if (call_descriptor->HasRestrictedAllocatableRegisters()) {
2602 : RegList registers = call_descriptor->AllocatableRegisters();
2603 : DCHECK_LT(0, NumRegs(registers));
2604 : std::unique_ptr<const RegisterConfiguration> config;
2605 224 : config.reset(RegisterConfiguration::RestrictGeneralRegisters(registers));
2606 224 : AllocateRegisters(config.get(), call_descriptor, run_verifier);
2607 2141108 : } else if (data->info()->GetPoisoningMitigationLevel() !=
2608 : PoisoningMitigationLevel::kDontPoison) {
2609 : #ifdef V8_TARGET_ARCH_IA32
2610 : FATAL("Poisoning is not supported on ia32.");
2611 : #else
2612 : AllocateRegisters(RegisterConfiguration::Poisoning(), call_descriptor,
2613 0 : run_verifier);
2614 : #endif // V8_TARGET_ARCH_IA32
2615 : } else {
2616 : AllocateRegisters(RegisterConfiguration::Default(), call_descriptor,
2617 2141108 : run_verifier);
2618 : }
2619 :
2620 : // Verify the instruction sequence has the same hash in two stages.
2621 2141548 : VerifyGeneratedCodeIsIdempotent();
2622 :
2623 2141372 : Run<FrameElisionPhase>();
2624 2141534 : if (data->compilation_failed()) {
2625 : info()->AbortOptimization(
2626 : BailoutReason::kNotEnoughVirtualRegistersRegalloc);
2627 : data->EndPhaseKind();
2628 : return false;
2629 : }
2630 :
2631 : // TODO(mtrofin): move this off to the register allocator.
2632 : bool generate_frame_at_start =
2633 6424602 : data_->sequence()->instruction_blocks().front()->must_construct_frame();
2634 : // Optimimize jumps.
2635 2141534 : if (FLAG_turbo_jt) {
2636 2141225 : Run<JumpThreadingPhase>(generate_frame_at_start);
2637 : }
2638 :
2639 : data->EndPhaseKind();
2640 :
2641 : return true;
2642 : }
2643 :
2644 2141378 : void PipelineImpl::VerifyGeneratedCodeIsIdempotent() {
2645 2263010 : PipelineData* data = this->data_;
2646 178360 : JumpOptimizationInfo* jump_opt = data->jump_optimization_info();
2647 4282756 : if (jump_opt == nullptr) return;
2648 :
2649 243264 : InstructionSequence* code = data->sequence();
2650 : int instruction_blocks = code->InstructionBlockCount();
2651 : int virtual_registers = code->VirtualRegisterCount();
2652 : size_t hash_code = base::hash_combine(instruction_blocks, virtual_registers);
2653 58096614 : for (auto instr : *code) {
2654 : hash_code = base::hash_combine(hash_code, instr->opcode(),
2655 : instr->InputCount(), instr->OutputCount());
2656 : }
2657 13002690 : for (int i = 0; i < virtual_registers; i++) {
2658 13002690 : hash_code = base::hash_combine(hash_code, code->GetRepresentation(i));
2659 : }
2660 121632 : if (jump_opt->is_collecting()) {
2661 : jump_opt->set_hash_code(hash_code);
2662 : } else {
2663 56728 : CHECK_EQ(hash_code, jump_opt->hash_code());
2664 : }
2665 : }
2666 :
2667 : struct InstructionStartsAsJSON {
2668 : const ZoneVector<int>* instr_starts;
2669 : };
2670 :
2671 2 : std::ostream& operator<<(std::ostream& out, const InstructionStartsAsJSON& s) {
2672 2 : out << ", \"instructionOffsetToPCOffset\": {";
2673 : bool need_comma = false;
2674 130 : for (size_t i = 0; i < s.instr_starts->size(); ++i) {
2675 63 : if (need_comma) out << ", ";
2676 191 : int offset = (*s.instr_starts)[i];
2677 126 : out << "\"" << i << "\":" << offset;
2678 : need_comma = true;
2679 : }
2680 2 : out << "}";
2681 2 : return out;
2682 : }
2683 :
2684 2141428 : void PipelineImpl::AssembleCode(Linkage* linkage,
2685 : std::unique_ptr<AssemblerBuffer> buffer) {
2686 4282955 : PipelineData* data = this->data_;
2687 : data->BeginPhaseKind("code generation");
2688 4282775 : data->InitializeCodeGenerator(linkage, std::move(buffer));
2689 :
2690 2141347 : Run<AssembleCodePhase>();
2691 2141525 : if (data->info()->trace_turbo_json_enabled()) {
2692 2 : TurboJsonFile json_of(data->info(), std::ios_base::app);
2693 2 : json_of << "{\"name\":\"code generation\""
2694 2 : << ", \"type\":\"instructions\""
2695 4 : << InstructionStartsAsJSON{&data->code_generator()->instr_starts()};
2696 2 : json_of << "},\n";
2697 : }
2698 2141525 : data->DeleteInstructionZone();
2699 2141565 : }
2700 :
2701 : struct BlockStartsAsJSON {
2702 : const ZoneVector<int>* block_starts;
2703 : };
2704 :
2705 2 : std::ostream& operator<<(std::ostream& out, const BlockStartsAsJSON& s) {
2706 2 : out << ", \"blockIdToOffset\": {";
2707 : bool need_comma = false;
2708 42 : for (size_t i = 0; i < s.block_starts->size(); ++i) {
2709 19 : if (need_comma) out << ", ";
2710 59 : int offset = (*s.block_starts)[i];
2711 38 : out << "\"" << i << "\":" << offset;
2712 : need_comma = true;
2713 : }
2714 2 : out << "},";
2715 2 : return out;
2716 : }
2717 :
2718 1579162 : MaybeHandle<Code> PipelineImpl::FinalizeCode(bool retire_broker) {
2719 1579168 : PipelineData* data = this->data_;
2720 1579162 : if (data->broker() && retire_broker) {
2721 456518 : data->broker()->Retire();
2722 : }
2723 1579160 : Run<FinalizeCodePhase>();
2724 :
2725 : MaybeHandle<Code> maybe_code = data->code();
2726 : Handle<Code> code;
2727 1579172 : if (!maybe_code.ToHandle(&code)) {
2728 9 : return maybe_code;
2729 : }
2730 :
2731 : if (data->profiler_data()) {
2732 : #ifdef ENABLE_DISASSEMBLER
2733 : std::ostringstream os;
2734 : code->Disassemble(nullptr, os);
2735 : data->profiler_data()->SetCode(&os);
2736 : #endif // ENABLE_DISASSEMBLER
2737 : }
2738 :
2739 : info()->SetCode(code);
2740 1579163 : PrintCode(isolate(), code, info());
2741 :
2742 1579163 : if (info()->trace_turbo_json_enabled()) {
2743 2 : TurboJsonFile json_of(info(), std::ios_base::app);
2744 :
2745 2 : json_of << "{\"name\":\"disassembly\",\"type\":\"disassembly\""
2746 6 : << BlockStartsAsJSON{&data->code_generator()->block_starts()}
2747 2 : << "\"data\":\"";
2748 : #ifdef ENABLE_DISASSEMBLER
2749 : std::stringstream disassembly_stream;
2750 : code->Disassemble(nullptr, disassembly_stream);
2751 : std::string disassembly_string(disassembly_stream.str());
2752 : for (const auto& c : disassembly_string) {
2753 : json_of << AsEscapedUC16ForJSON(c);
2754 : }
2755 : #endif // ENABLE_DISASSEMBLER
2756 2 : json_of << "\"}\n],\n";
2757 2 : json_of << "\"nodePositions\":";
2758 2 : json_of << data->source_position_output() << ",\n";
2759 2 : JsonPrintAllSourceWithPositions(json_of, data->info(), isolate());
2760 2 : json_of << "\n}";
2761 : }
2762 3158324 : if (info()->trace_turbo_json_enabled() ||
2763 : info()->trace_turbo_graph_enabled()) {
2764 2 : CodeTracer::Scope tracing_scope(data->GetCodeTracer());
2765 4 : OFStream os(tracing_scope.file());
2766 2 : os << "---------------------------------------------------\n"
2767 6 : << "Finished compiling method " << info()->GetDebugName().get()
2768 2 : << " using Turbofan" << std::endl;
2769 : }
2770 1579163 : return code;
2771 : }
2772 :
2773 1179316 : bool PipelineImpl::SelectInstructionsAndAssemble(
2774 : CallDescriptor* call_descriptor) {
2775 : Linkage linkage(call_descriptor);
2776 :
2777 : // Perform instruction selection and register allocation.
2778 1179316 : if (!SelectInstructions(&linkage)) return false;
2779 :
2780 : // Generate the final machine code.
2781 2358636 : AssembleCode(&linkage);
2782 1179318 : return true;
2783 : }
2784 :
2785 1112840 : MaybeHandle<Code> PipelineImpl::GenerateCode(CallDescriptor* call_descriptor) {
2786 1112840 : if (!SelectInstructionsAndAssemble(call_descriptor))
2787 0 : return MaybeHandle<Code>();
2788 1112842 : return FinalizeCode();
2789 : }
2790 :
2791 1512685 : bool PipelineImpl::CommitDependencies(Handle<Code> code) {
2792 1969259 : return data_->dependencies() == nullptr ||
2793 1969259 : data_->dependencies()->Commit(code);
2794 : }
2795 :
2796 : namespace {
2797 :
2798 4282781 : void TraceSequence(OptimizedCompilationInfo* info, PipelineData* data,
2799 : const char* phase_name) {
2800 4282769 : if (info->trace_turbo_json_enabled()) {
2801 : AllowHandleDereference allow_deref;
2802 4 : TurboJsonFile json_of(info, std::ios_base::app);
2803 4 : json_of << "{\"name\":\"" << phase_name << "\",\"type\":\"sequence\",";
2804 4 : json_of << InstructionSequenceAsJSON{data->sequence()};
2805 4 : json_of << "},\n";
2806 : }
2807 4282769 : if (info->trace_turbo_graph_enabled()) {
2808 : AllowHandleDereference allow_deref;
2809 4 : CodeTracer::Scope tracing_scope(data->GetCodeTracer());
2810 8 : OFStream os(tracing_scope.file());
2811 4 : os << "----- Instruction sequence " << phase_name << " -----\n"
2812 4 : << *data->sequence();
2813 : }
2814 4282769 : }
2815 :
2816 : } // namespace
2817 :
2818 2141332 : void PipelineImpl::AllocateRegisters(const RegisterConfiguration* config,
2819 : CallDescriptor* call_descriptor,
2820 : bool run_verifier) {
2821 4288075 : PipelineData* data = this->data_;
2822 : // Don't track usage for this zone in compiler stats.
2823 : std::unique_ptr<Zone> verifier_zone;
2824 : RegisterAllocatorVerifier* verifier = nullptr;
2825 2141332 : if (run_verifier) {
2826 42 : verifier_zone.reset(new Zone(data->allocator(), ZONE_NAME));
2827 : verifier = new (verifier_zone.get()) RegisterAllocatorVerifier(
2828 42 : verifier_zone.get(), config, data->sequence());
2829 : }
2830 :
2831 : #ifdef DEBUG
2832 : data_->sequence()->ValidateEdgeSplitForm();
2833 : data_->sequence()->ValidateDeferredBlockEntryPaths();
2834 : data_->sequence()->ValidateDeferredBlockExitPaths();
2835 : #endif
2836 :
2837 2141332 : data->InitializeRegisterAllocationData(config, call_descriptor);
2838 2146558 : if (info()->is_osr()) data->osr_helper()->SetupFrame(data->frame());
2839 :
2840 2141567 : Run<MeetRegisterConstraintsPhase>();
2841 2141466 : Run<ResolvePhisPhase>();
2842 2141592 : Run<BuildLiveRangesPhase>();
2843 2141580 : Run<BuildBundlesPhase>();
2844 :
2845 2141586 : TraceSequence(info(), data, "before register allocation");
2846 2141261 : if (verifier != nullptr) {
2847 42 : CHECK(!data->register_allocation_data()->ExistsUseWithoutDefinition());
2848 42 : CHECK(data->register_allocation_data()
2849 : ->RangesDefinedInDeferredStayInDeferred());
2850 : }
2851 :
2852 2141263 : if (info()->trace_turbo_json_enabled() && !data->MayHaveUnverifiableGraph()) {
2853 1 : TurboCfgFile tcf(isolate());
2854 : tcf << AsC1VRegisterAllocationData("PreAllocation",
2855 1 : data->register_allocation_data());
2856 : }
2857 :
2858 2141261 : if (FLAG_turbo_preprocess_ranges) {
2859 2141214 : Run<SplinterLiveRangesPhase>();
2860 2141587 : if (info()->trace_turbo_json_enabled() &&
2861 : !data->MayHaveUnverifiableGraph()) {
2862 1 : TurboCfgFile tcf(isolate());
2863 : tcf << AsC1VRegisterAllocationData("PostSplinter",
2864 1 : data->register_allocation_data());
2865 : }
2866 : }
2867 :
2868 2141632 : Run<AllocateGeneralRegistersPhase<LinearScanAllocator>>();
2869 :
2870 2141575 : if (data->sequence()->HasFPVirtualRegisters()) {
2871 190053 : Run<AllocateFPRegistersPhase<LinearScanAllocator>>();
2872 : }
2873 :
2874 2141590 : if (FLAG_turbo_preprocess_ranges) {
2875 2141590 : Run<MergeSplintersPhase>();
2876 : }
2877 :
2878 2141595 : Run<AssignSpillSlotsPhase>();
2879 :
2880 2141596 : Run<CommitAssignmentPhase>();
2881 :
2882 : // TODO(chromium:725559): remove this check once
2883 : // we understand the cause of the bug. We keep just the
2884 : // check at the end of the allocation.
2885 2141597 : if (verifier != nullptr) {
2886 42 : verifier->VerifyAssignment("Immediately after CommitAssignmentPhase.");
2887 : }
2888 :
2889 2141597 : Run<PopulateReferenceMapsPhase>();
2890 :
2891 2141498 : Run<ConnectRangesPhase>();
2892 :
2893 2141592 : Run<ResolveControlFlowPhase>();
2894 2141604 : if (FLAG_turbo_move_optimization) {
2895 2141602 : Run<OptimizeMovesPhase>();
2896 : }
2897 :
2898 2141607 : Run<LocateSpillSlotsPhase>();
2899 :
2900 2141605 : TraceSequence(info(), data, "after register allocation");
2901 :
2902 2141466 : if (verifier != nullptr) {
2903 42 : verifier->VerifyAssignment("End of regalloc pipeline.");
2904 42 : verifier->VerifyGapMoves();
2905 : }
2906 :
2907 2141552 : if (info()->trace_turbo_json_enabled() && !data->MayHaveUnverifiableGraph()) {
2908 1 : TurboCfgFile tcf(isolate());
2909 : tcf << AsC1VRegisterAllocationData("CodeGen",
2910 1 : data->register_allocation_data());
2911 : }
2912 :
2913 2141550 : data->DeleteRegisterAllocationZone();
2914 2141606 : }
2915 :
2916 37044777 : OptimizedCompilationInfo* PipelineImpl::info() const { return data_->info(); }
2917 :
2918 1579170 : Isolate* PipelineImpl::isolate() const { return data_->isolate(); }
2919 :
2920 0 : CodeGenerator* PipelineImpl::code_generator() const {
2921 505552 : return data_->code_generator();
2922 : }
2923 :
2924 : } // namespace compiler
2925 : } // namespace internal
2926 178779 : } // namespace v8
|