Line data Source code
1 : // Copyright 2014 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #include "src/compiler/pipeline.h"
6 :
7 : #include <fstream> // NOLINT(readability/streams)
8 : #include <iostream>
9 : #include <memory>
10 : #include <sstream>
11 :
12 : #include "src/assembler-inl.h"
13 : #include "src/base/adapters.h"
14 : #include "src/base/optional.h"
15 : #include "src/base/platform/elapsed-timer.h"
16 : #include "src/bootstrapper.h"
17 : #include "src/code-tracer.h"
18 : #include "src/compiler.h"
19 : #include "src/compiler/backend/code-generator.h"
20 : #include "src/compiler/backend/frame-elider.h"
21 : #include "src/compiler/backend/instruction-selector.h"
22 : #include "src/compiler/backend/instruction.h"
23 : #include "src/compiler/backend/jump-threading.h"
24 : #include "src/compiler/backend/live-range-separator.h"
25 : #include "src/compiler/backend/move-optimizer.h"
26 : #include "src/compiler/backend/register-allocator-verifier.h"
27 : #include "src/compiler/backend/register-allocator.h"
28 : #include "src/compiler/basic-block-instrumentor.h"
29 : #include "src/compiler/branch-elimination.h"
30 : #include "src/compiler/bytecode-graph-builder.h"
31 : #include "src/compiler/checkpoint-elimination.h"
32 : #include "src/compiler/common-operator-reducer.h"
33 : #include "src/compiler/compilation-dependencies.h"
34 : #include "src/compiler/compiler-source-position-table.h"
35 : #include "src/compiler/constant-folding-reducer.h"
36 : #include "src/compiler/control-flow-optimizer.h"
37 : #include "src/compiler/dead-code-elimination.h"
38 : #include "src/compiler/effect-control-linearizer.h"
39 : #include "src/compiler/escape-analysis-reducer.h"
40 : #include "src/compiler/escape-analysis.h"
41 : #include "src/compiler/graph-trimmer.h"
42 : #include "src/compiler/graph-visualizer.h"
43 : #include "src/compiler/js-call-reducer.h"
44 : #include "src/compiler/js-context-specialization.h"
45 : #include "src/compiler/js-create-lowering.h"
46 : #include "src/compiler/js-generic-lowering.h"
47 : #include "src/compiler/js-heap-broker.h"
48 : #include "src/compiler/js-heap-copy-reducer.h"
49 : #include "src/compiler/js-inlining-heuristic.h"
50 : #include "src/compiler/js-intrinsic-lowering.h"
51 : #include "src/compiler/js-native-context-specialization.h"
52 : #include "src/compiler/js-typed-lowering.h"
53 : #include "src/compiler/load-elimination.h"
54 : #include "src/compiler/loop-analysis.h"
55 : #include "src/compiler/loop-peeling.h"
56 : #include "src/compiler/loop-variable-optimizer.h"
57 : #include "src/compiler/machine-graph-verifier.h"
58 : #include "src/compiler/machine-operator-reducer.h"
59 : #include "src/compiler/memory-optimizer.h"
60 : #include "src/compiler/node-origin-table.h"
61 : #include "src/compiler/osr.h"
62 : #include "src/compiler/pipeline-statistics.h"
63 : #include "src/compiler/redundancy-elimination.h"
64 : #include "src/compiler/schedule.h"
65 : #include "src/compiler/scheduler.h"
66 : #include "src/compiler/select-lowering.h"
67 : #include "src/compiler/serializer-for-background-compilation.h"
68 : #include "src/compiler/simplified-lowering.h"
69 : #include "src/compiler/simplified-operator-reducer.h"
70 : #include "src/compiler/simplified-operator.h"
71 : #include "src/compiler/store-store-elimination.h"
72 : #include "src/compiler/type-narrowing-reducer.h"
73 : #include "src/compiler/typed-optimization.h"
74 : #include "src/compiler/typer.h"
75 : #include "src/compiler/value-numbering-reducer.h"
76 : #include "src/compiler/verifier.h"
77 : #include "src/compiler/wasm-compiler.h"
78 : #include "src/compiler/zone-stats.h"
79 : #include "src/disassembler.h"
80 : #include "src/isolate-inl.h"
81 : #include "src/objects/shared-function-info.h"
82 : #include "src/optimized-compilation-info.h"
83 : #include "src/ostreams.h"
84 : #include "src/parsing/parse-info.h"
85 : #include "src/register-configuration.h"
86 : #include "src/utils.h"
87 : #include "src/wasm/function-body-decoder.h"
88 : #include "src/wasm/function-compiler.h"
89 : #include "src/wasm/wasm-engine.h"
90 :
91 : namespace v8 {
92 : namespace internal {
93 : namespace compiler {
94 :
95 : // Turbofan can only handle 2^16 control inputs. Since each control flow split
96 : // requires at least two bytes (jump and offset), we limit the bytecode size
97 : // to 128K bytes.
98 : const int kMaxBytecodeSizeForTurbofan = 128 * 1024;
99 :
100 : class PipelineData {
101 : public:
102 : // For main entry point.
103 482542 : PipelineData(ZoneStats* zone_stats, Isolate* isolate,
104 : OptimizedCompilationInfo* info,
105 : PipelineStatistics* pipeline_statistics)
106 : : isolate_(isolate),
107 : allocator_(isolate->allocator()),
108 : info_(info),
109 : debug_name_(info_->GetDebugName()),
110 : may_have_unverifiable_graph_(false),
111 : zone_stats_(zone_stats),
112 : pipeline_statistics_(pipeline_statistics),
113 : graph_zone_scope_(zone_stats_, ZONE_NAME),
114 : graph_zone_(graph_zone_scope_.zone()),
115 482551 : instruction_zone_scope_(zone_stats_, ZONE_NAME),
116 : instruction_zone_(instruction_zone_scope_.zone()),
117 482552 : codegen_zone_scope_(zone_stats_, ZONE_NAME),
118 : codegen_zone_(codegen_zone_scope_.zone()),
119 482548 : register_allocation_zone_scope_(zone_stats_, ZONE_NAME),
120 : register_allocation_zone_(register_allocation_zone_scope_.zone()),
121 3377844 : assembler_options_(AssemblerOptions::Default(isolate)) {
122 : PhaseScope scope(pipeline_statistics, "init pipeline data");
123 965106 : graph_ = new (graph_zone_) Graph(graph_zone_);
124 965091 : source_positions_ = new (graph_zone_) SourcePositionTable(graph_);
125 : node_origins_ = info->trace_turbo_json_enabled()
126 2 : ? new (graph_zone_) NodeOriginTable(graph_)
127 965092 : : nullptr;
128 965093 : simplified_ = new (graph_zone_) SimplifiedOperatorBuilder(graph_zone_);
129 : machine_ = new (graph_zone_) MachineOperatorBuilder(
130 : graph_zone_, MachineType::PointerRepresentation(),
131 965091 : InstructionSelector::SupportedMachineOperatorFlags(),
132 965096 : InstructionSelector::AlignmentRequirements());
133 965089 : common_ = new (graph_zone_) CommonOperatorBuilder(graph_zone_);
134 965087 : javascript_ = new (graph_zone_) JSOperatorBuilder(graph_zone_);
135 : jsgraph_ = new (graph_zone_)
136 965096 : JSGraph(isolate_, graph_, common_, javascript_, simplified_, machine_);
137 965100 : broker_ = new (info_->zone()) JSHeapBroker(isolate_, info_->zone());
138 : dependencies_ =
139 965105 : new (info_->zone()) CompilationDependencies(broker_, info_->zone());
140 482550 : }
141 :
142 : // For WebAssembly compile entry point.
143 870480 : PipelineData(ZoneStats* zone_stats, wasm::WasmEngine* wasm_engine,
144 : OptimizedCompilationInfo* info, MachineGraph* mcgraph,
145 : PipelineStatistics* pipeline_statistics,
146 : SourcePositionTable* source_positions,
147 : NodeOriginTable* node_origins,
148 : const AssemblerOptions& assembler_options)
149 : : isolate_(nullptr),
150 : wasm_engine_(wasm_engine),
151 : allocator_(wasm_engine->allocator()),
152 : info_(info),
153 : debug_name_(info_->GetDebugName()),
154 : may_have_unverifiable_graph_(false),
155 : zone_stats_(zone_stats),
156 : pipeline_statistics_(pipeline_statistics),
157 : graph_zone_scope_(zone_stats_, ZONE_NAME),
158 : graph_zone_(graph_zone_scope_.zone()),
159 : graph_(mcgraph->graph()),
160 : source_positions_(source_positions),
161 : node_origins_(node_origins),
162 : machine_(mcgraph->machine()),
163 : common_(mcgraph->common()),
164 : mcgraph_(mcgraph),
165 872001 : instruction_zone_scope_(zone_stats_, ZONE_NAME),
166 : instruction_zone_(instruction_zone_scope_.zone()),
167 872119 : codegen_zone_scope_(zone_stats_, ZONE_NAME),
168 : codegen_zone_(codegen_zone_scope_.zone()),
169 872180 : register_allocation_zone_scope_(zone_stats_, ZONE_NAME),
170 : register_allocation_zone_(register_allocation_zone_scope_.zone()),
171 6972006 : assembler_options_(assembler_options) {}
172 :
173 : // For CodeStubAssembler and machine graph testing entry point.
174 1191402 : PipelineData(ZoneStats* zone_stats, OptimizedCompilationInfo* info,
175 : Isolate* isolate, Graph* graph, Schedule* schedule,
176 : SourcePositionTable* source_positions,
177 : NodeOriginTable* node_origins, JumpOptimizationInfo* jump_opt,
178 : const AssemblerOptions& assembler_options)
179 : : isolate_(isolate),
180 : allocator_(isolate->allocator()),
181 : info_(info),
182 : debug_name_(info_->GetDebugName()),
183 : zone_stats_(zone_stats),
184 : graph_zone_scope_(zone_stats_, ZONE_NAME),
185 : graph_zone_(graph_zone_scope_.zone()),
186 : graph_(graph),
187 : source_positions_(source_positions),
188 : node_origins_(node_origins),
189 : schedule_(schedule),
190 1191412 : instruction_zone_scope_(zone_stats_, ZONE_NAME),
191 : instruction_zone_(instruction_zone_scope_.zone()),
192 1191415 : codegen_zone_scope_(zone_stats_, ZONE_NAME),
193 : codegen_zone_(codegen_zone_scope_.zone()),
194 1191414 : register_allocation_zone_scope_(zone_stats_, ZONE_NAME),
195 : register_allocation_zone_(register_allocation_zone_scope_.zone()),
196 : jump_optimization_info_(jump_opt),
197 8339884 : assembler_options_(assembler_options) {
198 2382829 : simplified_ = new (graph_zone_) SimplifiedOperatorBuilder(graph_zone_);
199 : machine_ = new (graph_zone_) MachineOperatorBuilder(
200 : graph_zone_, MachineType::PointerRepresentation(),
201 2382827 : InstructionSelector::SupportedMachineOperatorFlags(),
202 2382827 : InstructionSelector::AlignmentRequirements());
203 2382830 : common_ = new (graph_zone_) CommonOperatorBuilder(graph_zone_);
204 2382830 : javascript_ = new (graph_zone_) JSOperatorBuilder(graph_zone_);
205 : jsgraph_ = new (graph_zone_)
206 2382825 : JSGraph(isolate_, graph_, common_, javascript_, simplified_, machine_);
207 1191412 : }
208 :
209 : // For register allocation testing entry point.
210 42 : PipelineData(ZoneStats* zone_stats, OptimizedCompilationInfo* info,
211 : Isolate* isolate, InstructionSequence* sequence)
212 : : isolate_(isolate),
213 : allocator_(isolate->allocator()),
214 : info_(info),
215 : debug_name_(info_->GetDebugName()),
216 : zone_stats_(zone_stats),
217 : graph_zone_scope_(zone_stats_, ZONE_NAME),
218 : instruction_zone_scope_(zone_stats_, ZONE_NAME),
219 : instruction_zone_(sequence->zone()),
220 : sequence_(sequence),
221 : codegen_zone_scope_(zone_stats_, ZONE_NAME),
222 : codegen_zone_(codegen_zone_scope_.zone()),
223 42 : register_allocation_zone_scope_(zone_stats_, ZONE_NAME),
224 : register_allocation_zone_(register_allocation_zone_scope_.zone()),
225 294 : assembler_options_(AssemblerOptions::Default(isolate)) {}
226 :
227 5089896 : ~PipelineData() {
228 : // Must happen before zones are destroyed.
229 2543779 : delete code_generator_;
230 2546171 : code_generator_ = nullptr;
231 2546171 : DeleteTyper();
232 :
233 2545293 : DeleteRegisterAllocationZone();
234 2545546 : DeleteInstructionZone();
235 2545804 : DeleteCodegenZone();
236 2546207 : DeleteGraphZone();
237 2546117 : }
238 :
239 : Isolate* isolate() const { return isolate_; }
240 : AccountingAllocator* allocator() const { return allocator_; }
241 : OptimizedCompilationInfo* info() const { return info_; }
242 : ZoneStats* zone_stats() const { return zone_stats_; }
243 : CompilationDependencies* dependencies() const { return dependencies_; }
244 : PipelineStatistics* pipeline_statistics() { return pipeline_statistics_; }
245 : OsrHelper* osr_helper() { return &(*osr_helper_); }
246 : bool compilation_failed() const { return compilation_failed_; }
247 9 : void set_compilation_failed() { compilation_failed_ = true; }
248 :
249 : bool verify_graph() const { return verify_graph_; }
250 133832 : void set_verify_graph(bool value) { verify_graph_ = value; }
251 :
252 : MaybeHandle<Code> code() { return code_; }
253 : void set_code(MaybeHandle<Code> code) {
254 : DCHECK(code_.is_null());
255 1588141 : code_ = code;
256 : }
257 :
258 : CodeGenerator* code_generator() const { return code_generator_; }
259 :
260 : // RawMachineAssembler generally produces graphs which cannot be verified.
261 : bool MayHaveUnverifiableGraph() const { return may_have_unverifiable_graph_; }
262 :
263 : Zone* graph_zone() const { return graph_zone_; }
264 : Graph* graph() const { return graph_; }
265 : SourcePositionTable* source_positions() const { return source_positions_; }
266 : NodeOriginTable* node_origins() const { return node_origins_; }
267 : MachineOperatorBuilder* machine() const { return machine_; }
268 : CommonOperatorBuilder* common() const { return common_; }
269 : JSOperatorBuilder* javascript() const { return javascript_; }
270 : JSGraph* jsgraph() const { return jsgraph_; }
271 : MachineGraph* mcgraph() const { return mcgraph_; }
272 927750 : Handle<Context> native_context() const {
273 1855500 : return handle(info()->native_context(), isolate());
274 : }
275 : Handle<JSGlobalObject> global_object() const {
276 : return handle(info()->global_object(), isolate());
277 : }
278 :
279 : JSHeapBroker* broker() const { return broker_; }
280 :
281 : Schedule* schedule() const { return schedule_; }
282 : void set_schedule(Schedule* schedule) {
283 : DCHECK(!schedule_);
284 2278746 : schedule_ = schedule;
285 : }
286 : void reset_schedule() { schedule_ = nullptr; }
287 :
288 : Zone* instruction_zone() const { return instruction_zone_; }
289 : Zone* codegen_zone() const { return codegen_zone_; }
290 : InstructionSequence* sequence() const { return sequence_; }
291 : Frame* frame() const { return frame_; }
292 463829 : std::vector<Handle<Map>>* embedded_maps() { return &embedded_maps_; }
293 :
294 : Zone* register_allocation_zone() const { return register_allocation_zone_; }
295 : RegisterAllocationData* register_allocation_data() const {
296 : return register_allocation_data_;
297 : }
298 :
299 : BasicBlockProfiler::Data* profiler_data() const { return profiler_data_; }
300 : void set_profiler_data(BasicBlockProfiler::Data* profiler_data) {
301 8 : profiler_data_ = profiler_data;
302 : }
303 :
304 : std::string const& source_position_output() const {
305 : return source_position_output_;
306 : }
307 : void set_source_position_output(std::string const& source_position_output) {
308 1 : source_position_output_ = source_position_output;
309 : }
310 :
311 : JumpOptimizationInfo* jump_optimization_info() const {
312 : return jump_optimization_info_;
313 : }
314 :
315 : const AssemblerOptions& assembler_options() const {
316 : return assembler_options_;
317 : }
318 :
319 : CodeTracer* GetCodeTracer() const {
320 : return wasm_engine_ == nullptr ? isolate_->GetCodeTracer()
321 23 : : wasm_engine_->GetCodeTracer();
322 : }
323 :
324 463828 : Typer* CreateTyper() {
325 : DCHECK_NULL(typer_);
326 463828 : typer_ = new Typer(broker(), typer_flags_, graph());
327 463834 : return typer_;
328 : }
329 :
330 : void AddTyperFlag(Typer::Flag flag) {
331 : DCHECK_NULL(typer_);
332 : typer_flags_ |= flag;
333 : }
334 :
335 3009671 : void DeleteTyper() {
336 3009671 : delete typer_;
337 3009677 : typer_ = nullptr;
338 3009677 : }
339 :
340 5061886 : void DeleteGraphZone() {
341 5061886 : if (graph_zone_ == nullptr) return;
342 : graph_zone_scope_.Destroy();
343 2546169 : graph_zone_ = nullptr;
344 2546169 : graph_ = nullptr;
345 2546169 : source_positions_ = nullptr;
346 2546169 : node_origins_ = nullptr;
347 2546169 : simplified_ = nullptr;
348 2546169 : machine_ = nullptr;
349 2546169 : common_ = nullptr;
350 2546169 : javascript_ = nullptr;
351 2546169 : jsgraph_ = nullptr;
352 2546169 : mcgraph_ = nullptr;
353 2546169 : schedule_ = nullptr;
354 : }
355 :
356 5062413 : void DeleteInstructionZone() {
357 5062413 : if (instruction_zone_ == nullptr) return;
358 : instruction_zone_scope_.Destroy();
359 2546153 : instruction_zone_ = nullptr;
360 2546153 : sequence_ = nullptr;
361 : }
362 :
363 2545907 : void DeleteCodegenZone() {
364 2545907 : if (codegen_zone_ == nullptr) return;
365 : codegen_zone_scope_.Destroy();
366 2546220 : codegen_zone_ = nullptr;
367 2546220 : dependencies_ = nullptr;
368 2546220 : broker_ = nullptr;
369 2546220 : frame_ = nullptr;
370 : }
371 :
372 5061129 : void DeleteRegisterAllocationZone() {
373 5061129 : if (register_allocation_zone_ == nullptr) return;
374 : register_allocation_zone_scope_.Destroy();
375 2546264 : register_allocation_zone_ = nullptr;
376 2546264 : register_allocation_data_ = nullptr;
377 : }
378 :
379 2515065 : void InitializeInstructionSequence(const CallDescriptor* call_descriptor) {
380 : DCHECK_NULL(sequence_);
381 : InstructionBlocks* instruction_blocks =
382 : InstructionSequence::InstructionBlocksFor(instruction_zone(),
383 2515065 : schedule());
384 : sequence_ = new (instruction_zone())
385 2516804 : InstructionSequence(isolate(), instruction_zone(), instruction_blocks);
386 5035150 : if (call_descriptor && call_descriptor->RequiresFrameAsIncoming()) {
387 2414062 : sequence_->instruction_blocks()[0]->mark_needs_frame();
388 : } else {
389 : DCHECK_EQ(0u, call_descriptor->CalleeSavedFPRegisters());
390 : DCHECK_EQ(0u, call_descriptor->CalleeSavedRegisters());
391 : }
392 2517566 : }
393 :
394 2516315 : void InitializeFrameData(CallDescriptor* call_descriptor) {
395 : DCHECK_NULL(frame_);
396 : int fixed_frame_size = 0;
397 2516315 : if (call_descriptor != nullptr) {
398 2516538 : fixed_frame_size = call_descriptor->CalculateFixedFrameSize();
399 : }
400 2517548 : frame_ = new (codegen_zone()) Frame(fixed_frame_size);
401 2515830 : }
402 :
403 2515630 : void InitializeRegisterAllocationData(const RegisterConfiguration* config,
404 : CallDescriptor* call_descriptor) {
405 : DCHECK_NULL(register_allocation_data_);
406 : register_allocation_data_ = new (register_allocation_zone())
407 : RegisterAllocationData(config, register_allocation_zone(), frame(),
408 2517573 : sequence(), debug_name());
409 2516975 : }
410 :
411 : void InitializeOsrHelper() {
412 : DCHECK(!osr_helper_.has_value());
413 : osr_helper_.emplace(info());
414 : }
415 :
416 : void set_start_source_position(int position) {
417 : DCHECK_EQ(start_source_position_, kNoSourcePosition);
418 460631 : start_source_position_ = position;
419 : }
420 :
421 2514998 : void InitializeCodeGenerator(Linkage* linkage,
422 : std::unique_ptr<AssemblerBuffer> buffer) {
423 : DCHECK_NULL(code_generator_);
424 :
425 : code_generator_ = new CodeGenerator(
426 : codegen_zone(), frame(), linkage, sequence(), info(), isolate(),
427 : osr_helper_, start_source_position_, jump_optimization_info_,
428 : info()->GetPoisoningMitigationLevel(), assembler_options_,
429 7545407 : info_->builtin_index(), std::move(buffer));
430 2515411 : }
431 :
432 : void BeginPhaseKind(const char* phase_kind_name) {
433 6919810 : if (pipeline_statistics() != nullptr) {
434 0 : pipeline_statistics()->BeginPhaseKind(phase_kind_name);
435 : }
436 : }
437 :
438 : void EndPhaseKind() {
439 2981268 : if (pipeline_statistics() != nullptr) {
440 0 : pipeline_statistics()->EndPhaseKind();
441 : }
442 : }
443 :
444 : const char* debug_name() const { return debug_name_.get(); }
445 :
446 : private:
447 : Isolate* const isolate_;
448 : wasm::WasmEngine* const wasm_engine_ = nullptr;
449 : AccountingAllocator* const allocator_;
450 : OptimizedCompilationInfo* const info_;
451 : std::unique_ptr<char[]> debug_name_;
452 : bool may_have_unverifiable_graph_ = true;
453 : ZoneStats* const zone_stats_;
454 : PipelineStatistics* pipeline_statistics_ = nullptr;
455 : bool compilation_failed_ = false;
456 : bool verify_graph_ = false;
457 : int start_source_position_ = kNoSourcePosition;
458 : base::Optional<OsrHelper> osr_helper_;
459 : MaybeHandle<Code> code_;
460 : CodeGenerator* code_generator_ = nullptr;
461 : Typer* typer_ = nullptr;
462 : Typer::Flags typer_flags_ = Typer::kNoFlags;
463 :
464 : // All objects in the following group of fields are allocated in graph_zone_.
465 : // They are all set to nullptr when the graph_zone_ is destroyed.
466 : ZoneStats::Scope graph_zone_scope_;
467 : Zone* graph_zone_ = nullptr;
468 : Graph* graph_ = nullptr;
469 : SourcePositionTable* source_positions_ = nullptr;
470 : NodeOriginTable* node_origins_ = nullptr;
471 : SimplifiedOperatorBuilder* simplified_ = nullptr;
472 : MachineOperatorBuilder* machine_ = nullptr;
473 : CommonOperatorBuilder* common_ = nullptr;
474 : JSOperatorBuilder* javascript_ = nullptr;
475 : JSGraph* jsgraph_ = nullptr;
476 : MachineGraph* mcgraph_ = nullptr;
477 : Schedule* schedule_ = nullptr;
478 :
479 : // All objects in the following group of fields are allocated in
480 : // instruction_zone_. They are all set to nullptr when the instruction_zone_
481 : // is destroyed.
482 : ZoneStats::Scope instruction_zone_scope_;
483 : Zone* instruction_zone_;
484 : InstructionSequence* sequence_ = nullptr;
485 :
486 : // All objects in the following group of fields are allocated in
487 : // codegen_zone_. They are all set to nullptr when the codegen_zone_
488 : // is destroyed.
489 : ZoneStats::Scope codegen_zone_scope_;
490 : Zone* codegen_zone_;
491 : CompilationDependencies* dependencies_ = nullptr;
492 : JSHeapBroker* broker_ = nullptr;
493 : Frame* frame_ = nullptr;
494 :
495 : // embedded_maps_ keeps track of maps we've embedded as Uint32 constants.
496 : // We do this in order to notify the garbage collector at code-gen time.
497 : std::vector<Handle<Map>> embedded_maps_;
498 :
499 : // All objects in the following group of fields are allocated in
500 : // register_allocation_zone_. They are all set to nullptr when the zone is
501 : // destroyed.
502 : ZoneStats::Scope register_allocation_zone_scope_;
503 : Zone* register_allocation_zone_;
504 : RegisterAllocationData* register_allocation_data_ = nullptr;
505 :
506 : // Basic block profiling support.
507 : BasicBlockProfiler::Data* profiler_data_ = nullptr;
508 :
509 : // Source position output for --trace-turbo.
510 : std::string source_position_output_;
511 :
512 : JumpOptimizationInfo* jump_optimization_info_ = nullptr;
513 : AssemblerOptions assembler_options_;
514 :
515 : DISALLOW_COPY_AND_ASSIGN(PipelineData);
516 : };
517 :
518 : class PipelineImpl final {
519 : public:
520 2545951 : explicit PipelineImpl(PipelineData* data) : data_(data) {}
521 :
522 : // Helpers for executing pipeline phases.
523 : template <typename Phase, typename... Args>
524 : void Run(Args&&... args);
525 :
526 : // Step A. Run the graph creation and initial optimization passes.
527 : bool CreateGraph();
528 :
529 : // B. Run the concurrent optimization passes.
530 : bool OptimizeGraph(Linkage* linkage);
531 :
532 : // Substep B.1. Produce a scheduled graph.
533 : void ComputeScheduledGraph();
534 :
535 : // Substep B.2. Select instructions from a scheduled graph.
536 : bool SelectInstructions(Linkage* linkage);
537 :
538 : // Step C. Run the code assembly pass.
539 : void AssembleCode(Linkage* linkage,
540 : std::unique_ptr<AssemblerBuffer> buffer = {});
541 :
542 : // Step D. Run the code finalization pass.
543 : MaybeHandle<Code> FinalizeCode(bool retire_broker = true);
544 :
545 : // Step E. Install any code dependencies.
546 : bool CommitDependencies(Handle<Code> code);
547 :
548 : void VerifyGeneratedCodeIsIdempotent();
549 : void RunPrintAndVerify(const char* phase, bool untyped = false);
550 : bool SelectInstructionsAndAssemble(CallDescriptor* call_descriptor);
551 : MaybeHandle<Code> GenerateCode(CallDescriptor* call_descriptor);
552 : void AllocateRegisters(const RegisterConfiguration* config,
553 : CallDescriptor* call_descriptor, bool run_verifier);
554 :
555 : OptimizedCompilationInfo* info() const;
556 : Isolate* isolate() const;
557 : CodeGenerator* code_generator() const;
558 :
559 : private:
560 : PipelineData* const data_;
561 : };
562 :
563 : namespace {
564 :
565 0 : void PrintFunctionSource(OptimizedCompilationInfo* info, Isolate* isolate,
566 : int source_id, Handle<SharedFunctionInfo> shared) {
567 0 : if (!shared->script()->IsUndefined(isolate)) {
568 0 : Handle<Script> script(Script::cast(shared->script()), isolate);
569 :
570 0 : if (!script->source()->IsUndefined(isolate)) {
571 0 : CodeTracer::Scope tracing_scope(isolate->GetCodeTracer());
572 : Object source_name = script->name();
573 0 : OFStream os(tracing_scope.file());
574 0 : os << "--- FUNCTION SOURCE (";
575 0 : if (source_name->IsString()) {
576 0 : os << String::cast(source_name)->ToCString().get() << ":";
577 : }
578 0 : os << shared->DebugName()->ToCString().get() << ") id{";
579 0 : os << info->optimization_id() << "," << source_id << "} start{";
580 0 : os << shared->StartPosition() << "} ---\n";
581 : {
582 : DisallowHeapAllocation no_allocation;
583 0 : int start = shared->StartPosition();
584 0 : int len = shared->EndPosition() - start;
585 : SubStringRange source(String::cast(script->source()), no_allocation,
586 : start, len);
587 0 : for (const auto& c : source) {
588 0 : os << AsReversiblyEscapedUC16(c);
589 : }
590 : }
591 :
592 0 : os << "\n--- END ---\n";
593 : }
594 : }
595 0 : }
596 :
597 : // Print information for the given inlining: which function was inlined and
598 : // where the inlining occurred.
599 0 : void PrintInlinedFunctionInfo(
600 : OptimizedCompilationInfo* info, Isolate* isolate, int source_id,
601 : int inlining_id, const OptimizedCompilationInfo::InlinedFunctionHolder& h) {
602 0 : CodeTracer::Scope tracing_scope(isolate->GetCodeTracer());
603 0 : OFStream os(tracing_scope.file());
604 0 : os << "INLINE (" << h.shared_info->DebugName()->ToCString().get() << ") id{"
605 0 : << info->optimization_id() << "," << source_id << "} AS " << inlining_id
606 0 : << " AT ";
607 0 : const SourcePosition position = h.position.position;
608 0 : if (position.IsKnown()) {
609 0 : os << "<" << position.InliningId() << ":" << position.ScriptOffset() << ">";
610 : } else {
611 0 : os << "<?>";
612 : }
613 : os << std::endl;
614 0 : }
615 :
616 : // Print the source of all functions that participated in this optimizing
617 : // compilation. For inlined functions print source position of their inlining.
618 0 : void PrintParticipatingSource(OptimizedCompilationInfo* info,
619 : Isolate* isolate) {
620 : AllowDeferredHandleDereference allow_deference_for_print_code;
621 :
622 0 : SourceIdAssigner id_assigner(info->inlined_functions().size());
623 0 : PrintFunctionSource(info, isolate, -1, info->shared_info());
624 : const auto& inlined = info->inlined_functions();
625 0 : for (unsigned id = 0; id < inlined.size(); id++) {
626 0 : const int source_id = id_assigner.GetIdFor(inlined[id].shared_info);
627 0 : PrintFunctionSource(info, isolate, source_id, inlined[id].shared_info);
628 0 : PrintInlinedFunctionInfo(info, isolate, source_id, id, inlined[id]);
629 : }
630 0 : }
631 :
632 : // Print the code after compiling it.
633 1588133 : void PrintCode(Isolate* isolate, Handle<Code> code,
634 : OptimizedCompilationInfo* info) {
635 1588133 : if (FLAG_print_opt_source && info->IsOptimizing()) {
636 0 : PrintParticipatingSource(info, isolate);
637 : }
638 :
639 : #ifdef ENABLE_DISASSEMBLER
640 : AllowDeferredHandleDereference allow_deference_for_print_code;
641 : bool print_code =
642 : FLAG_print_code ||
643 : (info->IsOptimizing() && FLAG_print_opt_code &&
644 : info->shared_info()->PassesFilter(FLAG_print_opt_code_filter));
645 : if (print_code) {
646 : std::unique_ptr<char[]> debug_name = info->GetDebugName();
647 : CodeTracer::Scope tracing_scope(isolate->GetCodeTracer());
648 : OFStream os(tracing_scope.file());
649 :
650 : // Print the source code if available.
651 : bool print_source = code->kind() == Code::OPTIMIZED_FUNCTION;
652 : if (print_source) {
653 : Handle<SharedFunctionInfo> shared = info->shared_info();
654 : if (shared->script()->IsScript() &&
655 : !Script::cast(shared->script())->source()->IsUndefined(isolate)) {
656 : os << "--- Raw source ---\n";
657 : StringCharacterStream stream(
658 : String::cast(Script::cast(shared->script())->source()),
659 : shared->StartPosition());
660 : // fun->end_position() points to the last character in the stream. We
661 : // need to compensate by adding one to calculate the length.
662 : int source_len = shared->EndPosition() - shared->StartPosition() + 1;
663 : for (int i = 0; i < source_len; i++) {
664 : if (stream.HasMore()) {
665 : os << AsReversiblyEscapedUC16(stream.GetNext());
666 : }
667 : }
668 : os << "\n\n";
669 : }
670 : }
671 : if (info->IsOptimizing()) {
672 : os << "--- Optimized code ---\n"
673 : << "optimization_id = " << info->optimization_id() << "\n";
674 : } else {
675 : os << "--- Code ---\n";
676 : }
677 : if (print_source) {
678 : Handle<SharedFunctionInfo> shared = info->shared_info();
679 : os << "source_position = " << shared->StartPosition() << "\n";
680 : }
681 : code->Disassemble(debug_name.get(), os);
682 : os << "--- End code ---\n";
683 : }
684 : #endif // ENABLE_DISASSEMBLER
685 1588133 : }
686 :
687 2741001 : void TraceSchedule(OptimizedCompilationInfo* info, PipelineData* data,
688 : Schedule* schedule, const char* phase_name) {
689 2741001 : if (info->trace_turbo_json_enabled()) {
690 : AllowHandleDereference allow_deref;
691 4 : TurboJsonFile json_of(info, std::ios_base::app);
692 2 : json_of << "{\"name\":\"" << phase_name << "\",\"type\":\"schedule\""
693 2 : << ",\"data\":\"";
694 4 : std::stringstream schedule_stream;
695 2 : schedule_stream << *schedule;
696 : std::string schedule_string(schedule_stream.str());
697 5224 : for (const auto& c : schedule_string) {
698 10444 : json_of << AsEscapedUC16ForJSON(c);
699 : }
700 2 : json_of << "\"},\n";
701 : }
702 2741001 : if (info->trace_turbo_graph_enabled() || FLAG_trace_turbo_scheduler) {
703 : AllowHandleDereference allow_deref;
704 2 : CodeTracer::Scope tracing_scope(data->GetCodeTracer());
705 4 : OFStream os(tracing_scope.file());
706 2 : os << "-- Schedule --------------------------------------\n" << *schedule;
707 : }
708 2741001 : }
709 :
710 :
711 : class SourcePositionWrapper final : public Reducer {
712 : public:
713 : SourcePositionWrapper(Reducer* reducer, SourcePositionTable* table)
714 487497 : : reducer_(reducer), table_(table) {}
715 0 : ~SourcePositionWrapper() final = default;
716 :
717 1391 : const char* reducer_name() const override { return reducer_->reducer_name(); }
718 :
719 41841229 : Reduction Reduce(Node* node) final {
720 41841229 : SourcePosition const pos = table_->GetSourcePosition(node);
721 41841196 : SourcePositionTable::Scope position(table_, pos);
722 83682430 : return reducer_->Reduce(node);
723 : }
724 :
725 622769 : void Finalize() final { reducer_->Finalize(); }
726 :
727 : private:
728 : Reducer* const reducer_;
729 : SourcePositionTable* const table_;
730 :
731 : DISALLOW_COPY_AND_ASSIGN(SourcePositionWrapper);
732 : };
733 :
734 : class NodeOriginsWrapper final : public Reducer {
735 : public:
736 : NodeOriginsWrapper(Reducer* reducer, NodeOriginTable* table)
737 43 : : reducer_(reducer), table_(table) {}
738 0 : ~NodeOriginsWrapper() final = default;
739 :
740 1391 : const char* reducer_name() const override { return reducer_->reducer_name(); }
741 :
742 1391 : Reduction Reduce(Node* node) final {
743 1391 : NodeOriginTable::Scope position(table_, reducer_name(), node);
744 2782 : return reducer_->Reduce(node);
745 : }
746 :
747 51 : void Finalize() final { reducer_->Finalize(); }
748 :
749 : private:
750 : Reducer* const reducer_;
751 : NodeOriginTable* const table_;
752 :
753 : DISALLOW_COPY_AND_ASSIGN(NodeOriginsWrapper);
754 : };
755 :
756 20732459 : void AddReducer(PipelineData* data, GraphReducer* graph_reducer,
757 : Reducer* reducer) {
758 20732459 : if (data->info()->is_source_positions_enabled()) {
759 : void* const buffer = data->graph_zone()->New(sizeof(SourcePositionWrapper));
760 : SourcePositionWrapper* const wrapper =
761 487497 : new (buffer) SourcePositionWrapper(reducer, data->source_positions());
762 : reducer = wrapper;
763 : }
764 20732459 : if (data->info()->trace_turbo_json_enabled()) {
765 : void* const buffer = data->graph_zone()->New(sizeof(NodeOriginsWrapper));
766 : NodeOriginsWrapper* const wrapper =
767 43 : new (buffer) NodeOriginsWrapper(reducer, data->node_origins());
768 : reducer = wrapper;
769 : }
770 :
771 20732459 : graph_reducer->AddReducer(reducer);
772 20732382 : }
773 :
774 126409366 : class PipelineRunScope {
775 : public:
776 63187366 : PipelineRunScope(PipelineData* data, const char* phase_name)
777 : : phase_scope_(
778 : phase_name == nullptr ? nullptr : data->pipeline_statistics(),
779 : phase_name),
780 : zone_scope_(data->zone_stats(), ZONE_NAME),
781 126379954 : origin_scope_(data->node_origins(), phase_name) {}
782 :
783 : Zone* zone() { return zone_scope_.zone(); }
784 :
785 : private:
786 : PhaseScope phase_scope_;
787 : ZoneStats::Scope zone_scope_;
788 : NodeOriginTable::PhaseScope origin_scope_;
789 : };
790 :
791 482542 : PipelineStatistics* CreatePipelineStatistics(Handle<Script> script,
792 : OptimizedCompilationInfo* info,
793 : Isolate* isolate,
794 : ZoneStats* zone_stats) {
795 : PipelineStatistics* pipeline_statistics = nullptr;
796 :
797 482542 : if (FLAG_turbo_stats || FLAG_turbo_stats_nvp) {
798 : pipeline_statistics =
799 0 : new PipelineStatistics(info, isolate->GetTurboStatistics(), zone_stats);
800 0 : pipeline_statistics->BeginPhaseKind("initializing");
801 : }
802 :
803 482544 : if (info->trace_turbo_json_enabled()) {
804 2 : TurboJsonFile json_of(info, std::ios_base::trunc);
805 1 : json_of << "{\"function\" : ";
806 2 : JsonPrintFunctionSource(json_of, -1, info->GetDebugName(), script, isolate,
807 1 : info->shared_info());
808 1 : json_of << ",\n\"phases\":[";
809 : }
810 :
811 482544 : return pipeline_statistics;
812 : }
813 :
814 495089 : PipelineStatistics* CreatePipelineStatistics(
815 : wasm::WasmEngine* wasm_engine, wasm::FunctionBody function_body,
816 : const wasm::WasmModule* wasm_module, OptimizedCompilationInfo* info,
817 : ZoneStats* zone_stats) {
818 : PipelineStatistics* pipeline_statistics = nullptr;
819 :
820 495089 : if (FLAG_turbo_stats_wasm) {
821 : pipeline_statistics = new PipelineStatistics(
822 0 : info, wasm_engine->GetOrCreateTurboStatistics(), zone_stats);
823 0 : pipeline_statistics->BeginPhaseKind("initializing");
824 : }
825 :
826 495028 : if (info->trace_turbo_json_enabled()) {
827 0 : TurboJsonFile json_of(info, std::ios_base::trunc);
828 0 : std::unique_ptr<char[]> function_name = info->GetDebugName();
829 0 : json_of << "{\"function\":\"" << function_name.get() << "\", \"source\":\"";
830 0 : AccountingAllocator allocator;
831 0 : std::ostringstream disassembly;
832 : std::vector<int> source_positions;
833 : wasm::PrintRawWasmCode(&allocator, function_body, wasm_module,
834 0 : wasm::kPrintLocals, disassembly, &source_positions);
835 0 : for (const auto& c : disassembly.str()) {
836 0 : json_of << AsEscapedUC16ForJSON(c);
837 : }
838 0 : json_of << "\",\n\"sourceLineToBytecodePosition\" : [";
839 : bool insert_comma = false;
840 0 : for (auto val : source_positions) {
841 0 : if (insert_comma) {
842 0 : json_of << ", ";
843 : }
844 0 : json_of << val;
845 : insert_comma = true;
846 : }
847 0 : json_of << "],\n\"phases\":[";
848 : }
849 :
850 495028 : return pipeline_statistics;
851 : }
852 :
853 : } // namespace
854 :
855 1437918 : class PipelineCompilationJob final : public OptimizedCompilationJob {
856 : public:
857 479301 : PipelineCompilationJob(Isolate* isolate,
858 : Handle<SharedFunctionInfo> shared_info,
859 : Handle<JSFunction> function)
860 : // Note that the OptimizedCompilationInfo is not initialized at the time
861 : // we pass it to the CompilationJob constructor, but it is not
862 : // dereferenced there.
863 : : OptimizedCompilationJob(
864 : function->GetIsolate()->stack_guard()->real_climit(),
865 : &compilation_info_, "TurboFan"),
866 : zone_(function->GetIsolate()->allocator(), ZONE_NAME),
867 : zone_stats_(function->GetIsolate()->allocator()),
868 : compilation_info_(&zone_, function->GetIsolate(), shared_info,
869 : function),
870 : pipeline_statistics_(CreatePipelineStatistics(
871 : handle(Script::cast(shared_info->script()), isolate),
872 : compilation_info(), function->GetIsolate(), &zone_stats_)),
873 : data_(&zone_stats_, function->GetIsolate(), compilation_info(),
874 : pipeline_statistics_.get()),
875 : pipeline_(&data_),
876 4793004 : linkage_(nullptr) {}
877 :
878 : protected:
879 : Status PrepareJobImpl(Isolate* isolate) final;
880 : Status ExecuteJobImpl() final;
881 : Status FinalizeJobImpl(Isolate* isolate) final;
882 :
883 : // Registers weak object to optimized code dependencies.
884 : void RegisterWeakObjectsInOptimizedCode(Handle<Code> code, Isolate* isolate);
885 :
886 : private:
887 : Zone zone_;
888 : ZoneStats zone_stats_;
889 : OptimizedCompilationInfo compilation_info_;
890 : std::unique_ptr<PipelineStatistics> pipeline_statistics_;
891 : PipelineData data_;
892 : PipelineImpl pipeline_;
893 : Linkage* linkage_;
894 :
895 : DISALLOW_COPY_AND_ASSIGN(PipelineCompilationJob);
896 : };
897 :
898 460636 : PipelineCompilationJob::Status PipelineCompilationJob::PrepareJobImpl(
899 : Isolate* isolate) {
900 460636 : if (compilation_info()->bytecode_array()->length() >
901 : kMaxBytecodeSizeForTurbofan) {
902 5 : return AbortOptimization(BailoutReason::kFunctionTooBig);
903 : }
904 :
905 460631 : if (!FLAG_always_opt) {
906 : compilation_info()->MarkAsBailoutOnUninitialized();
907 : }
908 460631 : if (FLAG_turbo_loop_peeling) {
909 : compilation_info()->MarkAsLoopPeelingEnabled();
910 : }
911 460631 : if (FLAG_turbo_inlining) {
912 : compilation_info()->MarkAsInliningEnabled();
913 : }
914 460631 : if (FLAG_inline_accessors) {
915 : compilation_info()->MarkAsAccessorInliningEnabled();
916 : }
917 :
918 : // This is the bottleneck for computing and setting poisoning level in the
919 : // optimizing compiler.
920 : PoisoningMitigationLevel load_poisoning =
921 : PoisoningMitigationLevel::kDontPoison;
922 460631 : if (FLAG_untrusted_code_mitigations) {
923 : // For full mitigations, this can be changed to
924 : // PoisoningMitigationLevel::kPoisonAll.
925 : load_poisoning = PoisoningMitigationLevel::kPoisonCriticalOnly;
926 : }
927 : compilation_info()->SetPoisoningMitigationLevel(load_poisoning);
928 :
929 460631 : if (FLAG_turbo_allocation_folding) {
930 : compilation_info()->MarkAsAllocationFoldingEnabled();
931 : }
932 :
933 460631 : if (compilation_info()->closure()->raw_feedback_cell()->map() ==
934 : ReadOnlyRoots(isolate).one_closure_cell_map()) {
935 : compilation_info()->MarkAsFunctionContextSpecializing();
936 : }
937 :
938 460631 : if (compilation_info()->is_source_positions_enabled()) {
939 : SharedFunctionInfo::EnsureSourcePositionsAvailable(
940 11345 : isolate, compilation_info()->shared_info());
941 : }
942 :
943 921262 : data_.set_start_source_position(
944 : compilation_info()->shared_info()->StartPosition());
945 :
946 : linkage_ = new (compilation_info()->zone()) Linkage(
947 921260 : Linkage::ComputeIncoming(compilation_info()->zone(), compilation_info()));
948 :
949 460629 : if (!pipeline_.CreateGraph()) {
950 0 : if (isolate->has_pending_exception()) return FAILED; // Stack overflowed.
951 0 : return AbortOptimization(BailoutReason::kGraphBuildingFailed);
952 : }
953 :
954 460625 : if (compilation_info()->is_osr()) data_.InitializeOsrHelper();
955 :
956 : // Make sure that we have generated the deopt entries code. This is in order
957 : // to avoid triggering the generation of deopt entries later during code
958 : // assembly.
959 460625 : Deoptimizer::EnsureCodeForDeoptimizationEntries(isolate);
960 :
961 460622 : return SUCCEEDED;
962 : }
963 :
964 460585 : PipelineCompilationJob::Status PipelineCompilationJob::ExecuteJobImpl() {
965 460585 : if (!pipeline_.OptimizeGraph(linkage_)) return FAILED;
966 921159 : pipeline_.AssembleCode(linkage_);
967 460580 : return SUCCEEDED;
968 : }
969 :
970 460382 : PipelineCompilationJob::Status PipelineCompilationJob::FinalizeJobImpl(
971 : Isolate* isolate) {
972 460382 : MaybeHandle<Code> maybe_code = pipeline_.FinalizeCode();
973 : Handle<Code> code;
974 460395 : if (!maybe_code.ToHandle(&code)) {
975 8 : if (compilation_info()->bailout_reason() == BailoutReason::kNoReason) {
976 8 : return AbortOptimization(BailoutReason::kCodeGenerationFailed);
977 : }
978 : return FAILED;
979 : }
980 460387 : if (!pipeline_.CommitDependencies(code)) {
981 67 : return RetryOptimization(BailoutReason::kBailedOutDueToDependencyChange);
982 : }
983 :
984 : compilation_info()->SetCode(code);
985 460320 : compilation_info()->native_context()->AddOptimizedCode(*code);
986 460319 : RegisterWeakObjectsInOptimizedCode(code, isolate);
987 460320 : return SUCCEEDED;
988 : }
989 :
990 460319 : void PipelineCompilationJob::RegisterWeakObjectsInOptimizedCode(
991 : Handle<Code> code, Isolate* isolate) {
992 : DCHECK(code->is_optimized_code());
993 : std::vector<Handle<Map>> retained_maps;
994 : {
995 : DisallowHeapAllocation no_gc;
996 : int const mode_mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT);
997 5591462 : for (RelocIterator it(*code, mode_mask); !it.done(); it.next()) {
998 : RelocInfo::Mode mode = it.rinfo()->rmode();
999 10262287 : if (mode == RelocInfo::EMBEDDED_OBJECT &&
1000 5131144 : code->IsWeakObjectInOptimizedCode(it.rinfo()->target_object())) {
1001 : Handle<HeapObject> object(HeapObject::cast(it.rinfo()->target_object()),
1002 : isolate);
1003 2687195 : if (object->IsMap()) {
1004 135523 : retained_maps.push_back(Handle<Map>::cast(object));
1005 : }
1006 : }
1007 : }
1008 : }
1009 :
1010 595843 : for (Handle<Map> map : retained_maps) {
1011 135523 : isolate->heap()->AddRetainedMap(map);
1012 : }
1013 :
1014 : // Additionally, gather embedded maps if we have any.
1015 460320 : for (Handle<Map> map : *data_.embedded_maps()) {
1016 0 : if (code->IsWeakObjectInOptimizedCode(*map)) {
1017 0 : isolate->heap()->AddRetainedMap(map);
1018 : }
1019 : }
1020 :
1021 460320 : code->set_can_have_weak_objects(true);
1022 460320 : }
1023 :
1024 : template <typename Phase, typename... Args>
1025 62716019 : void PipelineImpl::Run(Args&&... args) {
1026 125439745 : PipelineRunScope scope(this->data_, Phase::phase_name());
1027 : Phase phase;
1028 68291292 : phase.Run(this->data_, scope.zone(), std::forward<Args>(args)...);
1029 62737350 : }
1030 :
1031 : struct GraphBuilderPhase {
1032 : static const char* phase_name() { return "bytecode graph builder"; }
1033 :
1034 463875 : void Run(PipelineData* data, Zone* temp_zone) {
1035 : JSTypeHintLowering::Flags flags = JSTypeHintLowering::kNoFlags;
1036 463875 : if (data->info()->is_bailout_on_uninitialized()) {
1037 : flags |= JSTypeHintLowering::kBailoutOnUninitialized;
1038 : }
1039 : CallFrequency frequency = CallFrequency(1.0f);
1040 : BytecodeGraphBuilder graph_builder(
1041 : temp_zone, data->info()->bytecode_array(), data->info()->shared_info(),
1042 : handle(data->info()->closure()->feedback_vector(), data->isolate()),
1043 : data->info()->osr_offset(), data->jsgraph(), frequency,
1044 : data->source_positions(), data->native_context(),
1045 : SourcePosition::kNotInlined, flags, true,
1046 1855501 : data->info()->is_analyze_environment_liveness());
1047 463878 : graph_builder.CreateGraph();
1048 463869 : }
1049 : };
1050 :
1051 : namespace {
1052 :
1053 27974 : Maybe<OuterContext> GetModuleContext(Handle<JSFunction> closure) {
1054 27974 : Context current = closure->context();
1055 : size_t distance = 0;
1056 79274 : while (!current->IsNativeContext()) {
1057 25655 : if (current->IsModuleContext()) {
1058 : return Just(
1059 : OuterContext(handle(current, current->GetIsolate()), distance));
1060 : }
1061 25650 : current = current->previous();
1062 25650 : distance++;
1063 : }
1064 : return Nothing<OuterContext>();
1065 : }
1066 :
1067 463870 : Maybe<OuterContext> ChooseSpecializationContext(
1068 : Isolate* isolate, OptimizedCompilationInfo* info) {
1069 463870 : if (info->is_function_context_specializing()) {
1070 : DCHECK(info->has_context());
1071 435896 : return Just(OuterContext(handle(info->context(), isolate), 0));
1072 : }
1073 27974 : return GetModuleContext(info->closure());
1074 : }
1075 :
1076 : } // anonymous namespace
1077 :
1078 : struct InliningPhase {
1079 : static const char* phase_name() { return "inlining"; }
1080 :
1081 463860 : void Run(PipelineData* data, Zone* temp_zone) {
1082 : Isolate* isolate = data->isolate();
1083 : OptimizedCompilationInfo* info = data->info();
1084 : GraphReducer graph_reducer(temp_zone, data->graph(),
1085 927732 : data->jsgraph()->Dead());
1086 : DeadCodeElimination dead_code_elimination(&graph_reducer, data->graph(),
1087 463873 : data->common(), temp_zone);
1088 463877 : CheckpointElimination checkpoint_elimination(&graph_reducer);
1089 : CommonOperatorReducer common_reducer(&graph_reducer, data->graph(),
1090 : data->broker(), data->common(),
1091 463867 : data->machine(), temp_zone);
1092 : JSCallReducer call_reducer(&graph_reducer, data->jsgraph(), data->broker(),
1093 : data->info()->is_bailout_on_uninitialized()
1094 : ? JSCallReducer::kBailoutOnUninitialized
1095 : : JSCallReducer::kNoFlags,
1096 463871 : data->dependencies());
1097 : JSContextSpecialization context_specialization(
1098 : &graph_reducer, data->jsgraph(), data->broker(),
1099 : ChooseSpecializationContext(isolate, data->info()),
1100 : data->info()->is_function_context_specializing()
1101 : ? data->info()->closure()
1102 463871 : : MaybeHandle<JSFunction>());
1103 : JSNativeContextSpecialization::Flags flags =
1104 : JSNativeContextSpecialization::kNoFlags;
1105 463875 : if (data->info()->is_accessor_inlining_enabled()) {
1106 : flags |= JSNativeContextSpecialization::kAccessorInliningEnabled;
1107 : }
1108 463875 : if (data->info()->is_bailout_on_uninitialized()) {
1109 : flags |= JSNativeContextSpecialization::kBailoutOnUninitialized;
1110 : }
1111 : // Passing the OptimizedCompilationInfo's shared zone here as
1112 : // JSNativeContextSpecialization allocates out-of-heap objects
1113 : // that need to live until code generation.
1114 : JSNativeContextSpecialization native_context_specialization(
1115 : &graph_reducer, data->jsgraph(), data->broker(), flags,
1116 463875 : data->native_context(), data->dependencies(), temp_zone, info->zone());
1117 : JSInliningHeuristic inlining(&graph_reducer,
1118 : data->info()->is_inlining_enabled()
1119 : ? JSInliningHeuristic::kGeneralInlining
1120 : : JSInliningHeuristic::kRestrictedInlining,
1121 : temp_zone, data->info(), data->jsgraph(),
1122 927734 : data->broker(), data->source_positions());
1123 463869 : JSIntrinsicLowering intrinsic_lowering(&graph_reducer, data->jsgraph());
1124 463867 : AddReducer(data, &graph_reducer, &dead_code_elimination);
1125 463870 : AddReducer(data, &graph_reducer, &checkpoint_elimination);
1126 463875 : AddReducer(data, &graph_reducer, &common_reducer);
1127 463878 : AddReducer(data, &graph_reducer, &native_context_specialization);
1128 463877 : AddReducer(data, &graph_reducer, &context_specialization);
1129 463878 : AddReducer(data, &graph_reducer, &intrinsic_lowering);
1130 463872 : AddReducer(data, &graph_reducer, &call_reducer);
1131 463875 : AddReducer(data, &graph_reducer, &inlining);
1132 463877 : graph_reducer.ReduceGraph();
1133 463867 : }
1134 : };
1135 :
1136 :
1137 : struct TyperPhase {
1138 : static const char* phase_name() { return "typer"; }
1139 :
1140 463838 : void Run(PipelineData* data, Zone* temp_zone, Typer* typer) {
1141 : NodeVector roots(temp_zone);
1142 463838 : data->jsgraph()->GetCachedNodes(&roots);
1143 :
1144 : // Make sure we always type True and False. Needed for escape analysis.
1145 927683 : roots.push_back(data->jsgraph()->TrueConstant());
1146 927683 : roots.push_back(data->jsgraph()->FalseConstant());
1147 :
1148 : LoopVariableOptimizer induction_vars(data->jsgraph()->graph(),
1149 463844 : data->common(), temp_zone);
1150 463840 : if (FLAG_turbo_loop_variable) induction_vars.Run();
1151 463820 : typer->Run(roots, &induction_vars);
1152 463836 : }
1153 : };
1154 :
1155 : struct UntyperPhase {
1156 : static const char* phase_name() { return "untyper"; }
1157 :
1158 : void Run(PipelineData* data, Zone* temp_zone) {
1159 : class RemoveTypeReducer final : public Reducer {
1160 : public:
1161 0 : const char* reducer_name() const override { return "RemoveTypeReducer"; }
1162 0 : Reduction Reduce(Node* node) final {
1163 0 : if (NodeProperties::IsTyped(node)) {
1164 : NodeProperties::RemoveType(node);
1165 : return Changed(node);
1166 : }
1167 : return NoChange();
1168 : }
1169 : };
1170 :
1171 : NodeVector roots(temp_zone);
1172 : data->jsgraph()->GetCachedNodes(&roots);
1173 : for (Node* node : roots) {
1174 : NodeProperties::RemoveType(node);
1175 : }
1176 :
1177 : GraphReducer graph_reducer(temp_zone, data->graph(),
1178 : data->jsgraph()->Dead());
1179 : RemoveTypeReducer remove_type_reducer;
1180 : AddReducer(data, &graph_reducer, &remove_type_reducer);
1181 : graph_reducer.ReduceGraph();
1182 : }
1183 : };
1184 :
1185 : struct SerializeStandardObjectsPhase {
1186 : static const char* phase_name() { return "serialize standard objects"; }
1187 :
1188 : void Run(PipelineData* data, Zone* temp_zone) {
1189 463877 : data->broker()->SerializeStandardObjects();
1190 : }
1191 : };
1192 :
1193 : struct CopyMetadataForConcurrentCompilePhase {
1194 : static const char* phase_name() { return "serialize metadata"; }
1195 :
1196 463955 : void Run(PipelineData* data, Zone* temp_zone) {
1197 : GraphReducer graph_reducer(temp_zone, data->graph(),
1198 927912 : data->jsgraph()->Dead());
1199 463953 : JSHeapCopyReducer heap_copy_reducer(data->broker());
1200 463953 : AddReducer(data, &graph_reducer, &heap_copy_reducer);
1201 463949 : graph_reducer.ReduceGraph();
1202 :
1203 : // Some nodes that are no longer in the graph might still be in the cache.
1204 : NodeVector cached_nodes(temp_zone);
1205 463954 : data->jsgraph()->GetCachedNodes(&cached_nodes);
1206 5852585 : for (Node* const node : cached_nodes) graph_reducer.ReduceNode(node);
1207 463946 : }
1208 : };
1209 :
1210 : // TODO(turbofan): Move all calls from CopyMetaDataForConcurrentCompilePhase
1211 : // here. Also all the calls to Serialize* methods that are currently sprinkled
1212 : // over inlining will move here as well.
1213 : struct SerializationPhase {
1214 : static const char* phase_name() { return "serialize bytecode"; }
1215 :
1216 79 : void Run(PipelineData* data, Zone* temp_zone) {
1217 : SerializerForBackgroundCompilation serializer(data->broker(), temp_zone,
1218 79 : data->info()->closure());
1219 79 : serializer.Run();
1220 79 : }
1221 : };
1222 :
1223 : struct TypedLoweringPhase {
1224 : static const char* phase_name() { return "typed lowering"; }
1225 :
1226 463828 : void Run(PipelineData* data, Zone* temp_zone) {
1227 : GraphReducer graph_reducer(temp_zone, data->graph(),
1228 927667 : data->jsgraph()->Dead());
1229 : DeadCodeElimination dead_code_elimination(&graph_reducer, data->graph(),
1230 463842 : data->common(), temp_zone);
1231 : JSCreateLowering create_lowering(&graph_reducer, data->dependencies(),
1232 : data->jsgraph(), data->broker(),
1233 : temp_zone);
1234 : JSTypedLowering typed_lowering(&graph_reducer, data->jsgraph(),
1235 463840 : data->broker(), temp_zone);
1236 : ConstantFoldingReducer constant_folding_reducer(
1237 927657 : &graph_reducer, data->jsgraph(), data->broker());
1238 : TypedOptimization typed_optimization(&graph_reducer, data->dependencies(),
1239 927668 : data->jsgraph(), data->broker());
1240 : SimplifiedOperatorReducer simple_reducer(&graph_reducer, data->jsgraph(),
1241 927676 : data->broker());
1242 463823 : CheckpointElimination checkpoint_elimination(&graph_reducer);
1243 : CommonOperatorReducer common_reducer(&graph_reducer, data->graph(),
1244 : data->broker(), data->common(),
1245 463833 : data->machine(), temp_zone);
1246 463841 : AddReducer(data, &graph_reducer, &dead_code_elimination);
1247 463842 : AddReducer(data, &graph_reducer, &create_lowering);
1248 463844 : AddReducer(data, &graph_reducer, &constant_folding_reducer);
1249 463841 : AddReducer(data, &graph_reducer, &typed_lowering);
1250 463843 : AddReducer(data, &graph_reducer, &typed_optimization);
1251 463844 : AddReducer(data, &graph_reducer, &simple_reducer);
1252 463844 : AddReducer(data, &graph_reducer, &checkpoint_elimination);
1253 463842 : AddReducer(data, &graph_reducer, &common_reducer);
1254 463842 : graph_reducer.ReduceGraph();
1255 463841 : }
1256 : };
1257 :
1258 :
1259 : struct EscapeAnalysisPhase {
1260 : static const char* phase_name() { return "escape analysis"; }
1261 :
1262 463822 : void Run(PipelineData* data, Zone* temp_zone) {
1263 463822 : EscapeAnalysis escape_analysis(data->jsgraph(), temp_zone);
1264 : escape_analysis.ReduceGraph();
1265 927677 : GraphReducer reducer(temp_zone, data->graph(), data->jsgraph()->Dead());
1266 : EscapeAnalysisReducer escape_reducer(&reducer, data->jsgraph(),
1267 : escape_analysis.analysis_result(),
1268 927683 : temp_zone);
1269 463834 : AddReducer(data, &reducer, &escape_reducer);
1270 463844 : reducer.ReduceGraph();
1271 : // TODO(tebbi): Turn this into a debug mode check once we have confidence.
1272 463833 : escape_reducer.VerifyReplacement();
1273 463844 : }
1274 : };
1275 :
1276 : struct SimplifiedLoweringPhase {
1277 : static const char* phase_name() { return "simplified lowering"; }
1278 :
1279 463834 : void Run(PipelineData* data, Zone* temp_zone) {
1280 : SimplifiedLowering lowering(data->jsgraph(), data->broker(), temp_zone,
1281 : data->source_positions(), data->node_origins(),
1282 463834 : data->info()->GetPoisoningMitigationLevel());
1283 463837 : lowering.LowerAllNodes();
1284 463842 : }
1285 : };
1286 :
1287 : struct LoopPeelingPhase {
1288 : static const char* phase_name() { return "loop peeling"; }
1289 :
1290 460569 : void Run(PipelineData* data, Zone* temp_zone) {
1291 921139 : GraphTrimmer trimmer(temp_zone, data->graph());
1292 : NodeVector roots(temp_zone);
1293 460571 : data->jsgraph()->GetCachedNodes(&roots);
1294 460569 : trimmer.TrimGraph(roots.begin(), roots.end());
1295 :
1296 : LoopTree* loop_tree =
1297 460570 : LoopFinder::BuildLoopTree(data->jsgraph()->graph(), temp_zone);
1298 : LoopPeeler(data->graph(), data->common(), loop_tree, temp_zone,
1299 : data->source_positions(), data->node_origins())
1300 460564 : .PeelInnerLoopsOfTree();
1301 460569 : }
1302 : };
1303 :
1304 : struct LoopExitEliminationPhase {
1305 : static const char* phase_name() { return "loop exit elimination"; }
1306 :
1307 : void Run(PipelineData* data, Zone* temp_zone) {
1308 3271 : LoopPeeler::EliminateLoopExits(data->graph(), temp_zone);
1309 : }
1310 : };
1311 :
1312 : struct GenericLoweringPhase {
1313 : static const char* phase_name() { return "generic lowering"; }
1314 :
1315 463814 : void Run(PipelineData* data, Zone* temp_zone) {
1316 : GraphReducer graph_reducer(temp_zone, data->graph(),
1317 927652 : data->jsgraph()->Dead());
1318 927682 : JSGenericLowering generic_lowering(data->jsgraph());
1319 463836 : AddReducer(data, &graph_reducer, &generic_lowering);
1320 463839 : graph_reducer.ReduceGraph();
1321 463837 : }
1322 : };
1323 :
1324 : struct EarlyOptimizationPhase {
1325 : static const char* phase_name() { return "early optimization"; }
1326 :
1327 463830 : void Run(PipelineData* data, Zone* temp_zone) {
1328 : GraphReducer graph_reducer(temp_zone, data->graph(),
1329 927669 : data->jsgraph()->Dead());
1330 : DeadCodeElimination dead_code_elimination(&graph_reducer, data->graph(),
1331 463842 : data->common(), temp_zone);
1332 : SimplifiedOperatorReducer simple_reducer(&graph_reducer, data->jsgraph(),
1333 927683 : data->broker());
1334 927675 : RedundancyElimination redundancy_elimination(&graph_reducer, temp_zone);
1335 927659 : ValueNumberingReducer value_numbering(temp_zone, data->graph()->zone());
1336 927676 : MachineOperatorReducer machine_reducer(&graph_reducer, data->jsgraph());
1337 : CommonOperatorReducer common_reducer(&graph_reducer, data->graph(),
1338 : data->broker(), data->common(),
1339 463831 : data->machine(), temp_zone);
1340 463835 : AddReducer(data, &graph_reducer, &dead_code_elimination);
1341 463841 : AddReducer(data, &graph_reducer, &simple_reducer);
1342 463844 : AddReducer(data, &graph_reducer, &redundancy_elimination);
1343 463844 : AddReducer(data, &graph_reducer, &machine_reducer);
1344 463841 : AddReducer(data, &graph_reducer, &common_reducer);
1345 463844 : AddReducer(data, &graph_reducer, &value_numbering);
1346 463842 : graph_reducer.ReduceGraph();
1347 463841 : }
1348 : };
1349 :
1350 : struct ControlFlowOptimizationPhase {
1351 : static const char* phase_name() { return "control flow optimization"; }
1352 :
1353 463834 : void Run(PipelineData* data, Zone* temp_zone) {
1354 : ControlFlowOptimizer optimizer(data->graph(), data->common(),
1355 463834 : data->machine(), temp_zone);
1356 463841 : optimizer.Optimize();
1357 463841 : }
1358 : };
1359 :
1360 : struct EffectControlLinearizationPhase {
1361 : static const char* phase_name() { return "effect linearization"; }
1362 :
1363 463836 : void Run(PipelineData* data, Zone* temp_zone) {
1364 : {
1365 : // The scheduler requires the graphs to be trimmed, so trim now.
1366 : // TODO(jarin) Remove the trimming once the scheduler can handle untrimmed
1367 : // graphs.
1368 927668 : GraphTrimmer trimmer(temp_zone, data->graph());
1369 : NodeVector roots(temp_zone);
1370 463844 : data->jsgraph()->GetCachedNodes(&roots);
1371 463843 : trimmer.TrimGraph(roots.begin(), roots.end());
1372 :
1373 : // Schedule the graph without node splitting so that we can
1374 : // fix the effect and control flow for nodes with low-level side
1375 : // effects (such as changing representation to tagged or
1376 : // 'floating' allocation regions.)
1377 463838 : Schedule* schedule = Scheduler::ComputeSchedule(temp_zone, data->graph(),
1378 463844 : Scheduler::kTempSchedule);
1379 463838 : if (FLAG_turbo_verify) ScheduleVerifier::Run(schedule);
1380 : TraceSchedule(data->info(), data, schedule,
1381 463838 : "effect linearization schedule");
1382 :
1383 : EffectControlLinearizer::MaskArrayIndexEnable mask_array_index =
1384 : (data->info()->GetPoisoningMitigationLevel() !=
1385 : PoisoningMitigationLevel::kDontPoison)
1386 : ? EffectControlLinearizer::kMaskArrayIndex
1387 463829 : : EffectControlLinearizer::kDoNotMaskArrayIndex;
1388 : // Post-pass for wiring the control/effects
1389 : // - connect allocating representation changes into the control&effect
1390 : // chains and lower them,
1391 : // - get rid of the region markers,
1392 : // - introduce effect phis and rewire effects to get SSA again.
1393 : EffectControlLinearizer linearizer(
1394 : data->jsgraph(), schedule, temp_zone, data->source_positions(),
1395 463829 : data->node_origins(), mask_array_index, data->embedded_maps());
1396 463816 : linearizer.Run();
1397 : }
1398 : {
1399 : // The {EffectControlLinearizer} might leave {Dead} nodes behind, so we
1400 : // run {DeadCodeElimination} to prune these parts of the graph.
1401 : // Also, the following store-store elimination phase greatly benefits from
1402 : // doing a common operator reducer and dead code elimination just before
1403 : // it, to eliminate conditional deopts with a constant condition.
1404 : GraphReducer graph_reducer(temp_zone, data->graph(),
1405 927674 : data->jsgraph()->Dead());
1406 : DeadCodeElimination dead_code_elimination(&graph_reducer, data->graph(),
1407 463834 : data->common(), temp_zone);
1408 : CommonOperatorReducer common_reducer(&graph_reducer, data->graph(),
1409 : data->broker(), data->common(),
1410 463836 : data->machine(), temp_zone);
1411 463840 : AddReducer(data, &graph_reducer, &dead_code_elimination);
1412 463824 : AddReducer(data, &graph_reducer, &common_reducer);
1413 463841 : graph_reducer.ReduceGraph();
1414 : }
1415 463841 : }
1416 : };
1417 :
1418 : struct StoreStoreEliminationPhase {
1419 : static const char* phase_name() { return "store-store elimination"; }
1420 :
1421 463836 : void Run(PipelineData* data, Zone* temp_zone) {
1422 927659 : GraphTrimmer trimmer(temp_zone, data->graph());
1423 : NodeVector roots(temp_zone);
1424 463843 : data->jsgraph()->GetCachedNodes(&roots);
1425 463844 : trimmer.TrimGraph(roots.begin(), roots.end());
1426 :
1427 463843 : StoreStoreElimination::Run(data->jsgraph(), temp_zone);
1428 463830 : }
1429 : };
1430 :
1431 : struct LoadEliminationPhase {
1432 : static const char* phase_name() { return "load elimination"; }
1433 :
1434 463824 : void Run(PipelineData* data, Zone* temp_zone) {
1435 : GraphReducer graph_reducer(temp_zone, data->graph(),
1436 927649 : data->jsgraph()->Dead());
1437 : BranchElimination branch_condition_elimination(&graph_reducer,
1438 927639 : data->jsgraph(), temp_zone);
1439 : DeadCodeElimination dead_code_elimination(&graph_reducer, data->graph(),
1440 463832 : data->common(), temp_zone);
1441 927652 : RedundancyElimination redundancy_elimination(&graph_reducer, temp_zone);
1442 : LoadElimination load_elimination(&graph_reducer, data->jsgraph(),
1443 : temp_zone);
1444 463820 : CheckpointElimination checkpoint_elimination(&graph_reducer);
1445 927655 : ValueNumberingReducer value_numbering(temp_zone, data->graph()->zone());
1446 : CommonOperatorReducer common_reducer(&graph_reducer, data->graph(),
1447 : data->broker(), data->common(),
1448 463825 : data->machine(), temp_zone);
1449 : TypedOptimization typed_optimization(&graph_reducer, data->dependencies(),
1450 927662 : data->jsgraph(), data->broker());
1451 : ConstantFoldingReducer constant_folding_reducer(
1452 927662 : &graph_reducer, data->jsgraph(), data->broker());
1453 : TypeNarrowingReducer type_narrowing_reducer(&graph_reducer, data->jsgraph(),
1454 927663 : data->broker());
1455 463823 : AddReducer(data, &graph_reducer, &branch_condition_elimination);
1456 463827 : AddReducer(data, &graph_reducer, &dead_code_elimination);
1457 463833 : AddReducer(data, &graph_reducer, &redundancy_elimination);
1458 463833 : AddReducer(data, &graph_reducer, &load_elimination);
1459 463832 : AddReducer(data, &graph_reducer, &type_narrowing_reducer);
1460 463822 : AddReducer(data, &graph_reducer, &constant_folding_reducer);
1461 463832 : AddReducer(data, &graph_reducer, &typed_optimization);
1462 463833 : AddReducer(data, &graph_reducer, &checkpoint_elimination);
1463 463832 : AddReducer(data, &graph_reducer, &common_reducer);
1464 463833 : AddReducer(data, &graph_reducer, &value_numbering);
1465 463831 : graph_reducer.ReduceGraph();
1466 463830 : }
1467 : };
1468 :
1469 : struct MemoryOptimizationPhase {
1470 : static const char* phase_name() { return "memory optimization"; }
1471 :
1472 530756 : void Run(PipelineData* data, Zone* temp_zone) {
1473 : // The memory optimizer requires the graphs to be trimmed, so trim now.
1474 1061508 : GraphTrimmer trimmer(temp_zone, data->graph());
1475 : NodeVector roots(temp_zone);
1476 530759 : data->jsgraph()->GetCachedNodes(&roots);
1477 530759 : trimmer.TrimGraph(roots.begin(), roots.end());
1478 :
1479 : // Optimize allocations and load/store operations.
1480 : MemoryOptimizer optimizer(
1481 : data->jsgraph(), temp_zone, data->info()->GetPoisoningMitigationLevel(),
1482 : data->info()->is_allocation_folding_enabled()
1483 : ? MemoryOptimizer::AllocationFolding::kDoAllocationFolding
1484 1061507 : : MemoryOptimizer::AllocationFolding::kDontAllocationFolding);
1485 530753 : optimizer.Optimize();
1486 530742 : }
1487 : };
1488 :
1489 : struct LateOptimizationPhase {
1490 : static const char* phase_name() { return "late optimization"; }
1491 :
1492 463841 : void Run(PipelineData* data, Zone* temp_zone) {
1493 : GraphReducer graph_reducer(temp_zone, data->graph(),
1494 927679 : data->jsgraph()->Dead());
1495 : BranchElimination branch_condition_elimination(&graph_reducer,
1496 927680 : data->jsgraph(), temp_zone);
1497 : DeadCodeElimination dead_code_elimination(&graph_reducer, data->graph(),
1498 463833 : data->common(), temp_zone);
1499 927676 : ValueNumberingReducer value_numbering(temp_zone, data->graph()->zone());
1500 927678 : MachineOperatorReducer machine_reducer(&graph_reducer, data->jsgraph());
1501 : CommonOperatorReducer common_reducer(&graph_reducer, data->graph(),
1502 : data->broker(), data->common(),
1503 463838 : data->machine(), temp_zone);
1504 : SelectLowering select_lowering(data->jsgraph()->graph(),
1505 927683 : data->jsgraph()->common());
1506 463829 : AddReducer(data, &graph_reducer, &branch_condition_elimination);
1507 463837 : AddReducer(data, &graph_reducer, &dead_code_elimination);
1508 463843 : AddReducer(data, &graph_reducer, &machine_reducer);
1509 463844 : AddReducer(data, &graph_reducer, &common_reducer);
1510 463832 : AddReducer(data, &graph_reducer, &select_lowering);
1511 463842 : AddReducer(data, &graph_reducer, &value_numbering);
1512 463842 : graph_reducer.ReduceGraph();
1513 463835 : }
1514 : };
1515 :
1516 : struct CsaOptimizationPhase {
1517 : static const char* phase_name() { return "csa optimization"; }
1518 :
1519 66916 : void Run(PipelineData* data, Zone* temp_zone) {
1520 : GraphReducer graph_reducer(temp_zone, data->graph(),
1521 133832 : data->jsgraph()->Dead());
1522 : BranchElimination branch_condition_elimination(&graph_reducer,
1523 133832 : data->jsgraph(), temp_zone);
1524 : DeadCodeElimination dead_code_elimination(&graph_reducer, data->graph(),
1525 66916 : data->common(), temp_zone);
1526 133832 : MachineOperatorReducer machine_reducer(&graph_reducer, data->jsgraph());
1527 : CommonOperatorReducer common_reducer(&graph_reducer, data->graph(),
1528 : data->broker(), data->common(),
1529 66916 : data->machine(), temp_zone);
1530 66916 : AddReducer(data, &graph_reducer, &branch_condition_elimination);
1531 66916 : AddReducer(data, &graph_reducer, &dead_code_elimination);
1532 66916 : AddReducer(data, &graph_reducer, &machine_reducer);
1533 66916 : AddReducer(data, &graph_reducer, &common_reducer);
1534 66916 : graph_reducer.ReduceGraph();
1535 66916 : }
1536 : };
1537 :
1538 : struct EarlyGraphTrimmingPhase {
1539 : static const char* phase_name() { return "early trimming"; }
1540 463860 : void Run(PipelineData* data, Zone* temp_zone) {
1541 927737 : GraphTrimmer trimmer(temp_zone, data->graph());
1542 : NodeVector roots(temp_zone);
1543 463874 : data->jsgraph()->GetCachedNodes(&roots);
1544 463869 : trimmer.TrimGraph(roots.begin(), roots.end());
1545 463877 : }
1546 : };
1547 :
1548 :
1549 : struct LateGraphTrimmingPhase {
1550 : static const char* phase_name() { return "late graph trimming"; }
1551 2277486 : void Run(PipelineData* data, Zone* temp_zone) {
1552 4555554 : GraphTrimmer trimmer(temp_zone, data->graph());
1553 : NodeVector roots(temp_zone);
1554 2279114 : if (data->jsgraph()) {
1555 1406969 : data->jsgraph()->GetCachedNodes(&roots);
1556 : }
1557 2279111 : trimmer.TrimGraph(roots.begin(), roots.end());
1558 2278179 : }
1559 : };
1560 :
1561 :
1562 : struct ComputeSchedulePhase {
1563 : static const char* phase_name() { return "scheduling"; }
1564 :
1565 2277465 : void Run(PipelineData* data, Zone* temp_zone) {
1566 4556211 : Schedule* schedule = Scheduler::ComputeSchedule(
1567 : temp_zone, data->graph(), data->info()->is_splitting_enabled()
1568 : ? Scheduler::kSplitNodes
1569 2277465 : : Scheduler::kNoFlags);
1570 2278746 : if (FLAG_turbo_verify) ScheduleVerifier::Run(schedule);
1571 : data->set_schedule(schedule);
1572 2278746 : }
1573 : };
1574 :
1575 : struct InstructionRangesAsJSON {
1576 : const InstructionSequence* sequence;
1577 : const ZoneVector<std::pair<int, int>>* instr_origins;
1578 : };
1579 :
1580 1 : std::ostream& operator<<(std::ostream& out, const InstructionRangesAsJSON& s) {
1581 1 : const int max = static_cast<int>(s.sequence->LastInstructionIndex());
1582 :
1583 1 : out << ", \"nodeIdToInstructionRange\": {";
1584 : bool need_comma = false;
1585 335 : for (size_t i = 0; i < s.instr_origins->size(); ++i) {
1586 111 : std::pair<int, int> offset = (*s.instr_origins)[i];
1587 111 : if (offset.first == -1) continue;
1588 78 : const int first = max - offset.first + 1;
1589 78 : const int second = max - offset.second + 1;
1590 78 : if (need_comma) out << ", ";
1591 156 : out << "\"" << i << "\": [" << first << ", " << second << "]";
1592 : need_comma = true;
1593 : }
1594 1 : out << "}";
1595 1 : out << ", \"blockIdtoInstructionRange\": {";
1596 : need_comma = false;
1597 1 : for (auto block : s.sequence->instruction_blocks()) {
1598 10 : if (need_comma) out << ", ";
1599 40 : out << "\"" << block->rpo_number() << "\": [" << block->code_start() << ", "
1600 10 : << block->code_end() << "]";
1601 : need_comma = true;
1602 : }
1603 1 : out << "}";
1604 1 : return out;
1605 : }
1606 :
1607 : struct InstructionSelectionPhase {
1608 : static const char* phase_name() { return "select instructions"; }
1609 :
1610 2515703 : void Run(PipelineData* data, Zone* temp_zone, Linkage* linkage) {
1611 : InstructionSelector selector(
1612 : temp_zone, data->graph()->NodeCount(), linkage, data->sequence(),
1613 : data->schedule(), data->source_positions(), data->frame(),
1614 : data->info()->switch_jump_table_enabled()
1615 : ? InstructionSelector::kEnableSwitchJumpTable
1616 : : InstructionSelector::kDisableSwitchJumpTable,
1617 : data->info()->is_source_positions_enabled()
1618 : ? InstructionSelector::kAllSourcePositions
1619 : : InstructionSelector::kCallSourcePositions,
1620 : InstructionSelector::SupportedFeatures(),
1621 : FLAG_turbo_instruction_scheduling
1622 : ? InstructionSelector::kEnableScheduling
1623 : : InstructionSelector::kDisableScheduling,
1624 1645347 : !data->isolate() || data->isolate()->serializer_enabled() ||
1625 : data->isolate()->IsGeneratingEmbeddedBuiltins()
1626 : ? InstructionSelector::kDisableRootsRelativeAddressing
1627 : : InstructionSelector::kEnableRootsRelativeAddressing,
1628 : data->info()->GetPoisoningMitigationLevel(),
1629 : data->info()->trace_turbo_json_enabled()
1630 : ? InstructionSelector::kEnableTraceTurboJson
1631 11587262 : : InstructionSelector::kDisableTraceTurboJson);
1632 2517260 : if (!selector.SelectInstructions()) {
1633 : data->set_compilation_failed();
1634 : }
1635 2517360 : if (data->info()->trace_turbo_json_enabled()) {
1636 2 : TurboJsonFile json_of(data->info(), std::ios_base::app);
1637 1 : json_of << "{\"name\":\"" << phase_name()
1638 : << "\",\"type\":\"instructions\""
1639 : << InstructionRangesAsJSON{data->sequence(),
1640 3 : &selector.instr_origins()}
1641 1 : << "},\n";
1642 : }
1643 2517360 : }
1644 : };
1645 :
1646 :
1647 : struct MeetRegisterConstraintsPhase {
1648 : static const char* phase_name() { return "meet register constraints"; }
1649 :
1650 : void Run(PipelineData* data, Zone* temp_zone) {
1651 2516389 : ConstraintBuilder builder(data->register_allocation_data());
1652 2515670 : builder.MeetRegisterConstraints();
1653 : }
1654 : };
1655 :
1656 :
1657 : struct ResolvePhisPhase {
1658 : static const char* phase_name() { return "resolve phis"; }
1659 :
1660 : void Run(PipelineData* data, Zone* temp_zone) {
1661 2517122 : ConstraintBuilder builder(data->register_allocation_data());
1662 2517105 : builder.ResolvePhis();
1663 : }
1664 : };
1665 :
1666 :
1667 : struct BuildLiveRangesPhase {
1668 : static const char* phase_name() { return "build live ranges"; }
1669 :
1670 2515535 : void Run(PipelineData* data, Zone* temp_zone) {
1671 2515535 : LiveRangeBuilder builder(data->register_allocation_data(), temp_zone);
1672 2515529 : builder.BuildLiveRanges();
1673 2517212 : }
1674 : };
1675 :
1676 : struct BuildBundlesPhase {
1677 : static const char* phase_name() { return "build live range bundles"; }
1678 :
1679 : void Run(PipelineData* data, Zone* temp_zone) {
1680 : BundleBuilder builder(data->register_allocation_data());
1681 2517224 : builder.BuildBundles();
1682 : }
1683 : };
1684 :
1685 : struct SplinterLiveRangesPhase {
1686 : static const char* phase_name() { return "splinter live ranges"; }
1687 :
1688 : void Run(PipelineData* data, Zone* temp_zone) {
1689 : LiveRangeSeparator live_range_splinterer(data->register_allocation_data(),
1690 : temp_zone);
1691 2517073 : live_range_splinterer.Splinter();
1692 : }
1693 : };
1694 :
1695 :
1696 : template <typename RegAllocator>
1697 : struct AllocateGeneralRegistersPhase {
1698 : static const char* phase_name() { return "allocate general registers"; }
1699 :
1700 2516801 : void Run(PipelineData* data, Zone* temp_zone) {
1701 : RegAllocator allocator(data->register_allocation_data(), GENERAL_REGISTERS,
1702 2516801 : temp_zone);
1703 2517269 : allocator.AllocateRegisters();
1704 2516919 : }
1705 : };
1706 :
1707 : template <typename RegAllocator>
1708 : struct AllocateFPRegistersPhase {
1709 : static const char* phase_name() { return "allocate f.p. registers"; }
1710 :
1711 206875 : void Run(PipelineData* data, Zone* temp_zone) {
1712 : RegAllocator allocator(data->register_allocation_data(), FP_REGISTERS,
1713 206875 : temp_zone);
1714 206929 : allocator.AllocateRegisters();
1715 206933 : }
1716 : };
1717 :
1718 :
1719 : struct MergeSplintersPhase {
1720 : static const char* phase_name() { return "merge splintered ranges"; }
1721 : void Run(PipelineData* pipeline_data, Zone* temp_zone) {
1722 : RegisterAllocationData* data = pipeline_data->register_allocation_data();
1723 : LiveRangeMerger live_range_merger(data, temp_zone);
1724 2517177 : live_range_merger.Merge();
1725 : }
1726 : };
1727 :
1728 :
1729 : struct LocateSpillSlotsPhase {
1730 : static const char* phase_name() { return "locate spill slots"; }
1731 :
1732 : void Run(PipelineData* data, Zone* temp_zone) {
1733 2517228 : SpillSlotLocator locator(data->register_allocation_data());
1734 2516487 : locator.LocateSpillSlots();
1735 : }
1736 : };
1737 :
1738 : struct DecideSpillingModePhase {
1739 : static const char* phase_name() { return "decide spilling mode"; }
1740 :
1741 : void Run(PipelineData* data, Zone* temp_zone) {
1742 2517317 : OperandAssigner assigner(data->register_allocation_data());
1743 2516246 : assigner.DecideSpillingMode();
1744 : }
1745 : };
1746 :
1747 : struct AssignSpillSlotsPhase {
1748 : static const char* phase_name() { return "assign spill slots"; }
1749 :
1750 : void Run(PipelineData* data, Zone* temp_zone) {
1751 2517172 : OperandAssigner assigner(data->register_allocation_data());
1752 2517325 : assigner.AssignSpillSlots();
1753 : }
1754 : };
1755 :
1756 :
1757 : struct CommitAssignmentPhase {
1758 : static const char* phase_name() { return "commit assignment"; }
1759 :
1760 : void Run(PipelineData* data, Zone* temp_zone) {
1761 2517329 : OperandAssigner assigner(data->register_allocation_data());
1762 2517333 : assigner.CommitAssignment();
1763 : }
1764 : };
1765 :
1766 :
1767 : struct PopulateReferenceMapsPhase {
1768 : static const char* phase_name() { return "populate pointer maps"; }
1769 :
1770 : void Run(PipelineData* data, Zone* temp_zone) {
1771 2517351 : ReferenceMapPopulator populator(data->register_allocation_data());
1772 2517353 : populator.PopulateReferenceMaps();
1773 : }
1774 : };
1775 :
1776 :
1777 : struct ConnectRangesPhase {
1778 : static const char* phase_name() { return "connect ranges"; }
1779 :
1780 : void Run(PipelineData* data, Zone* temp_zone) {
1781 2517388 : LiveRangeConnector connector(data->register_allocation_data());
1782 2517357 : connector.ConnectRanges(temp_zone);
1783 : }
1784 : };
1785 :
1786 :
1787 : struct ResolveControlFlowPhase {
1788 : static const char* phase_name() { return "resolve control flow"; }
1789 :
1790 : void Run(PipelineData* data, Zone* temp_zone) {
1791 2517325 : LiveRangeConnector connector(data->register_allocation_data());
1792 2517348 : connector.ResolveControlFlow(temp_zone);
1793 : }
1794 : };
1795 :
1796 :
1797 : struct OptimizeMovesPhase {
1798 : static const char* phase_name() { return "optimize moves"; }
1799 :
1800 : void Run(PipelineData* data, Zone* temp_zone) {
1801 2517362 : MoveOptimizer move_optimizer(temp_zone, data->sequence());
1802 2515658 : move_optimizer.Run();
1803 : }
1804 : };
1805 :
1806 :
1807 : struct FrameElisionPhase {
1808 : static const char* phase_name() { return "frame elision"; }
1809 :
1810 : void Run(PipelineData* data, Zone* temp_zone) {
1811 2517180 : FrameElider(data->sequence()).Run();
1812 : }
1813 : };
1814 :
1815 :
1816 : struct JumpThreadingPhase {
1817 : static const char* phase_name() { return "jump threading"; }
1818 :
1819 2515660 : void Run(PipelineData* data, Zone* temp_zone, bool frame_at_start) {
1820 : ZoneVector<RpoNumber> result(temp_zone);
1821 2515660 : if (JumpThreading::ComputeForwarding(temp_zone, result, data->sequence(),
1822 : frame_at_start)) {
1823 622107 : JumpThreading::ApplyForwarding(temp_zone, result, data->sequence());
1824 : }
1825 2517106 : }
1826 : };
1827 :
1828 : struct AssembleCodePhase {
1829 : static const char* phase_name() { return "assemble code"; }
1830 :
1831 : void Run(PipelineData* data, Zone* temp_zone) {
1832 2516346 : data->code_generator()->AssembleCode();
1833 : }
1834 : };
1835 :
1836 : struct FinalizeCodePhase {
1837 : static const char* phase_name() { return "finalize code"; }
1838 :
1839 : void Run(PipelineData* data, Zone* temp_zone) {
1840 1588146 : data->set_code(data->code_generator()->FinalizeCode());
1841 : }
1842 : };
1843 :
1844 :
1845 : struct PrintGraphPhase {
1846 : static const char* phase_name() { return nullptr; }
1847 :
1848 17 : void Run(PipelineData* data, Zone* temp_zone, const char* phase) {
1849 : OptimizedCompilationInfo* info = data->info();
1850 : Graph* graph = data->graph();
1851 :
1852 17 : if (info->trace_turbo_json_enabled()) { // Print JSON.
1853 : AllowHandleDereference allow_deref;
1854 :
1855 34 : TurboJsonFile json_of(info, std::ios_base::app);
1856 17 : json_of << "{\"name\":\"" << phase << "\",\"type\":\"graph\",\"data\":"
1857 34 : << AsJSON(*graph, data->source_positions(), data->node_origins())
1858 17 : << "},\n";
1859 : }
1860 :
1861 17 : if (info->trace_turbo_scheduled_enabled()) {
1862 0 : AccountingAllocator allocator;
1863 : Schedule* schedule = data->schedule();
1864 0 : if (schedule == nullptr) {
1865 0 : schedule = Scheduler::ComputeSchedule(temp_zone, data->graph(),
1866 0 : Scheduler::kNoFlags);
1867 : }
1868 :
1869 : AllowHandleDereference allow_deref;
1870 0 : CodeTracer::Scope tracing_scope(data->GetCodeTracer());
1871 0 : OFStream os(tracing_scope.file());
1872 0 : os << "-- Graph after " << phase << " -- " << std::endl;
1873 0 : os << AsScheduledGraph(schedule);
1874 17 : } else if (info->trace_turbo_graph_enabled()) { // Simple textual RPO.
1875 : AllowHandleDereference allow_deref;
1876 17 : CodeTracer::Scope tracing_scope(data->GetCodeTracer());
1877 34 : OFStream os(tracing_scope.file());
1878 17 : os << "-- Graph after " << phase << " -- " << std::endl;
1879 17 : os << AsRPO(*graph);
1880 : }
1881 17 : }
1882 : };
1883 :
1884 :
1885 : struct VerifyGraphPhase {
1886 : static const char* phase_name() { return nullptr; }
1887 :
1888 : void Run(PipelineData* data, Zone* temp_zone, const bool untyped,
1889 : bool values_only = false) {
1890 : Verifier::CodeType code_type;
1891 67188 : switch (data->info()->code_kind()) {
1892 : case Code::WASM_FUNCTION:
1893 : case Code::WASM_TO_JS_FUNCTION:
1894 : case Code::JS_TO_WASM_FUNCTION:
1895 : case Code::WASM_INTERPRETER_ENTRY:
1896 : case Code::C_WASM_ENTRY:
1897 : code_type = Verifier::kWasm;
1898 : break;
1899 : default:
1900 : code_type = Verifier::kDefault;
1901 : }
1902 67188 : Verifier::Run(data->graph(), !untyped ? Verifier::TYPED : Verifier::UNTYPED,
1903 : values_only ? Verifier::kValuesOnly : Verifier::kAll,
1904 67188 : code_type);
1905 : }
1906 : };
1907 :
1908 12259503 : void PipelineImpl::RunPrintAndVerify(const char* phase, bool untyped) {
1909 24519518 : if (info()->trace_turbo_json_enabled() ||
1910 : info()->trace_turbo_graph_enabled()) {
1911 17 : Run<PrintGraphPhase>(phase);
1912 : }
1913 12260063 : if (FLAG_turbo_verify) {
1914 272 : Run<VerifyGraphPhase>(untyped);
1915 : }
1916 12260063 : }
1917 :
1918 463865 : bool PipelineImpl::CreateGraph() {
1919 463865 : PipelineData* data = this->data_;
1920 :
1921 : data->BeginPhaseKind("graph creation");
1922 :
1923 927739 : if (info()->trace_turbo_json_enabled() ||
1924 : info()->trace_turbo_graph_enabled()) {
1925 1 : CodeTracer::Scope tracing_scope(data->GetCodeTracer());
1926 2 : OFStream os(tracing_scope.file());
1927 : os << "---------------------------------------------------\n"
1928 3 : << "Begin compiling method " << info()->GetDebugName().get()
1929 : << " using Turbofan" << std::endl;
1930 : }
1931 463869 : if (info()->trace_turbo_json_enabled()) {
1932 2 : TurboCfgFile tcf(isolate());
1933 1 : tcf << AsC1VCompilation(info());
1934 : }
1935 :
1936 463869 : data->source_positions()->AddDecorator();
1937 463869 : if (data->info()->trace_turbo_json_enabled()) {
1938 1 : data->node_origins()->AddDecorator();
1939 : }
1940 :
1941 463865 : if (FLAG_concurrent_inlining) {
1942 79 : data->broker()->StartSerializing();
1943 79 : Run<SerializeStandardObjectsPhase>();
1944 79 : Run<SerializationPhase>();
1945 : } else {
1946 463786 : data->broker()->SetNativeContextRef();
1947 : }
1948 :
1949 463875 : Run<GraphBuilderPhase>();
1950 463877 : RunPrintAndVerify(GraphBuilderPhase::phase_name(), true);
1951 :
1952 463869 : if (FLAG_concurrent_inlining) {
1953 79 : Run<CopyMetadataForConcurrentCompilePhase>();
1954 : }
1955 :
1956 : // Perform function context specialization and inlining (if enabled).
1957 463869 : Run<InliningPhase>();
1958 463873 : RunPrintAndVerify(InliningPhase::phase_name(), true);
1959 :
1960 : // Remove dead->live edges from the graph.
1961 463873 : Run<EarlyGraphTrimmingPhase>();
1962 463878 : RunPrintAndVerify(EarlyGraphTrimmingPhase::phase_name(), true);
1963 :
1964 : // Determine the Typer operation flags.
1965 : {
1966 1300660 : if (is_sloppy(info()->shared_info()->language_mode()) &&
1967 836786 : info()->shared_info()->IsUserJavaScript()) {
1968 : // Sloppy mode functions always have an Object for this.
1969 : data->AddTyperFlag(Typer::kThisIsReceiver);
1970 : }
1971 463874 : if (IsClassConstructor(info()->shared_info()->kind())) {
1972 : // Class constructors cannot be [[Call]]ed.
1973 : data->AddTyperFlag(Typer::kNewTargetIsReceiver);
1974 : }
1975 : }
1976 :
1977 : // Run the type-sensitive lowerings and optimizations on the graph.
1978 : {
1979 463874 : if (FLAG_concurrent_inlining) {
1980 : // TODO(neis): Remove CopyMetadataForConcurrentCompilePhase call once
1981 : // brokerization of JSNativeContextSpecialization is complete.
1982 79 : Run<CopyMetadataForConcurrentCompilePhase>();
1983 79 : data->broker()->StopSerializing();
1984 : } else {
1985 463795 : data->broker()->StartSerializing();
1986 463788 : Run<SerializeStandardObjectsPhase>();
1987 463796 : Run<CopyMetadataForConcurrentCompilePhase>();
1988 463795 : data->broker()->StopSerializing();
1989 : }
1990 : }
1991 :
1992 : data->EndPhaseKind();
1993 :
1994 463875 : return true;
1995 : }
1996 :
1997 463830 : bool PipelineImpl::OptimizeGraph(Linkage* linkage) {
1998 463830 : PipelineData* data = this->data_;
1999 :
2000 : data->BeginPhaseKind("lowering");
2001 :
2002 : // Type the graph and keep the Typer running such that new nodes get
2003 : // automatically typed when they are created.
2004 463830 : Run<TyperPhase>(data->CreateTyper());
2005 463843 : RunPrintAndVerify(TyperPhase::phase_name());
2006 463839 : Run<TypedLoweringPhase>();
2007 463843 : RunPrintAndVerify(TypedLoweringPhase::phase_name());
2008 :
2009 463842 : if (data->info()->is_loop_peeling_enabled()) {
2010 460571 : Run<LoopPeelingPhase>();
2011 460568 : RunPrintAndVerify(LoopPeelingPhase::phase_name(), true);
2012 : } else {
2013 3271 : Run<LoopExitEliminationPhase>();
2014 3271 : RunPrintAndVerify(LoopExitEliminationPhase::phase_name(), true);
2015 : }
2016 :
2017 463837 : if (FLAG_turbo_load_elimination) {
2018 463826 : Run<LoadEliminationPhase>();
2019 463833 : RunPrintAndVerify(LoadEliminationPhase::phase_name());
2020 : }
2021 463841 : data->DeleteTyper();
2022 :
2023 463832 : if (FLAG_turbo_escape) {
2024 463836 : Run<EscapeAnalysisPhase>();
2025 463843 : if (data->compilation_failed()) {
2026 : info()->AbortOptimization(
2027 : BailoutReason::kCyclicObjectStateDetectedInEscapeAnalysis);
2028 : data->EndPhaseKind();
2029 : return false;
2030 : }
2031 463843 : RunPrintAndVerify(EscapeAnalysisPhase::phase_name());
2032 : }
2033 :
2034 : // Perform simplified lowering. This has to run w/o the Typer decorator,
2035 : // because we cannot compute meaningful types anyways, and the computed types
2036 : // might even conflict with the representation/truncation logic.
2037 463836 : Run<SimplifiedLoweringPhase>();
2038 463839 : RunPrintAndVerify(SimplifiedLoweringPhase::phase_name(), true);
2039 :
2040 : // From now on it is invalid to look at types on the nodes, because the types
2041 : // on the nodes might not make sense after representation selection due to the
2042 : // way we handle truncations; if we'd want to look at types afterwards we'd
2043 : // essentially need to re-type (large portions of) the graph.
2044 :
2045 : // In order to catch bugs related to type access after this point, we now
2046 : // remove the types from the nodes (currently only in Debug builds).
2047 : #ifdef DEBUG
2048 : Run<UntyperPhase>();
2049 : RunPrintAndVerify(UntyperPhase::phase_name(), true);
2050 : #endif
2051 :
2052 : // Run generic lowering pass.
2053 463839 : Run<GenericLoweringPhase>();
2054 463842 : RunPrintAndVerify(GenericLoweringPhase::phase_name(), true);
2055 :
2056 : data->BeginPhaseKind("block building");
2057 :
2058 : // Run early optimization pass.
2059 463843 : Run<EarlyOptimizationPhase>();
2060 463842 : RunPrintAndVerify(EarlyOptimizationPhase::phase_name(), true);
2061 :
2062 463844 : Run<EffectControlLinearizationPhase>();
2063 463836 : RunPrintAndVerify(EffectControlLinearizationPhase::phase_name(), true);
2064 :
2065 463836 : if (FLAG_turbo_store_elimination) {
2066 463837 : Run<StoreStoreEliminationPhase>();
2067 463841 : RunPrintAndVerify(StoreStoreEliminationPhase::phase_name(), true);
2068 : }
2069 :
2070 : // Optimize control flow.
2071 463840 : if (FLAG_turbo_cf_optimization) {
2072 463840 : Run<ControlFlowOptimizationPhase>();
2073 463842 : RunPrintAndVerify(ControlFlowOptimizationPhase::phase_name(), true);
2074 : }
2075 :
2076 : // Optimize memory access and allocation operations.
2077 463842 : Run<MemoryOptimizationPhase>();
2078 : // TODO(jarin, rossberg): Remove UNTYPED once machine typing works.
2079 463842 : RunPrintAndVerify(MemoryOptimizationPhase::phase_name(), true);
2080 :
2081 : // Lower changes that have been inserted before.
2082 463843 : Run<LateOptimizationPhase>();
2083 : // TODO(jarin, rossberg): Remove UNTYPED once machine typing works.
2084 463840 : RunPrintAndVerify(LateOptimizationPhase::phase_name(), true);
2085 :
2086 463839 : data->source_positions()->RemoveDecorator();
2087 463832 : if (data->info()->trace_turbo_json_enabled()) {
2088 1 : data->node_origins()->RemoveDecorator();
2089 : }
2090 :
2091 463832 : ComputeScheduledGraph();
2092 :
2093 463836 : return SelectInstructions(linkage);
2094 : }
2095 :
2096 66916 : MaybeHandle<Code> Pipeline::GenerateCodeForCodeStub(
2097 : Isolate* isolate, CallDescriptor* call_descriptor, Graph* graph,
2098 : SourcePositionTable* source_positions, Code::Kind kind,
2099 : const char* debug_name, int32_t builtin_index,
2100 : PoisoningMitigationLevel poisoning_level, const AssemblerOptions& options) {
2101 133832 : OptimizedCompilationInfo info(CStrVector(debug_name), graph->zone(), kind);
2102 : info.set_builtin_index(builtin_index);
2103 :
2104 66916 : if (poisoning_level != PoisoningMitigationLevel::kDontPoison) {
2105 : info.SetPoisoningMitigationLevel(poisoning_level);
2106 : }
2107 :
2108 : // Construct a pipeline for scheduling and code generation.
2109 133832 : ZoneStats zone_stats(isolate->allocator());
2110 66916 : NodeOriginTable node_origins(graph);
2111 : JumpOptimizationInfo jump_opt;
2112 : bool should_optimize_jumps =
2113 66916 : isolate->serializer_enabled() && FLAG_turbo_rewrite_far_jumps;
2114 : PipelineData data(&zone_stats, &info, isolate, graph, nullptr,
2115 : source_positions, &node_origins,
2116 133832 : should_optimize_jumps ? &jump_opt : nullptr, options);
2117 : data.set_verify_graph(FLAG_verify_csa);
2118 133832 : std::unique_ptr<PipelineStatistics> pipeline_statistics;
2119 66916 : if (FLAG_turbo_stats || FLAG_turbo_stats_nvp) {
2120 0 : pipeline_statistics.reset(new PipelineStatistics(
2121 0 : &info, isolate->GetTurboStatistics(), &zone_stats));
2122 0 : pipeline_statistics->BeginPhaseKind("stub codegen");
2123 : }
2124 :
2125 : PipelineImpl pipeline(&data);
2126 :
2127 133832 : if (info.trace_turbo_json_enabled() || info.trace_turbo_graph_enabled()) {
2128 0 : CodeTracer::Scope tracing_scope(data.GetCodeTracer());
2129 0 : OFStream os(tracing_scope.file());
2130 : os << "---------------------------------------------------\n"
2131 0 : << "Begin compiling " << debug_name << " using Turbofan" << std::endl;
2132 0 : if (info.trace_turbo_json_enabled()) {
2133 0 : TurboJsonFile json_of(&info, std::ios_base::trunc);
2134 0 : json_of << "{\"function\" : ";
2135 0 : JsonPrintFunctionSource(json_of, -1, info.GetDebugName(),
2136 : Handle<Script>(), isolate,
2137 0 : Handle<SharedFunctionInfo>());
2138 0 : json_of << ",\n\"phases\":[";
2139 : }
2140 0 : pipeline.Run<PrintGraphPhase>("Machine");
2141 : }
2142 :
2143 : // Optimize memory access and allocation operations.
2144 66916 : pipeline.Run<MemoryOptimizationPhase>();
2145 66916 : pipeline.RunPrintAndVerify(MemoryOptimizationPhase::phase_name(), true);
2146 :
2147 66916 : pipeline.Run<CsaOptimizationPhase>();
2148 66916 : pipeline.RunPrintAndVerify(CsaOptimizationPhase::phase_name(), true);
2149 :
2150 66916 : pipeline.Run<VerifyGraphPhase>(true);
2151 66916 : pipeline.ComputeScheduledGraph();
2152 : DCHECK_NOT_NULL(data.schedule());
2153 :
2154 : // First run code generation on a copy of the pipeline, in order to be able to
2155 : // repeat it for jump optimization. The first run has to happen on a temporary
2156 : // pipeline to avoid deletion of zones on the main pipeline.
2157 : PipelineData second_data(&zone_stats, &info, isolate, data.graph(),
2158 : data.schedule(), data.source_positions(),
2159 : data.node_origins(), data.jump_optimization_info(),
2160 133832 : options);
2161 : second_data.set_verify_graph(FLAG_verify_csa);
2162 : PipelineImpl second_pipeline(&second_data);
2163 66916 : second_pipeline.SelectInstructionsAndAssemble(call_descriptor);
2164 :
2165 : Handle<Code> code;
2166 66916 : if (jump_opt.is_optimizable()) {
2167 : jump_opt.set_optimizing();
2168 114016 : code = pipeline.GenerateCode(call_descriptor).ToHandleChecked();
2169 : } else {
2170 19816 : code = second_pipeline.FinalizeCode().ToHandleChecked();
2171 : }
2172 :
2173 133832 : return code;
2174 : }
2175 :
2176 : // static
2177 374747 : wasm::WasmCompilationResult Pipeline::GenerateCodeForWasmNativeStub(
2178 : wasm::WasmEngine* wasm_engine, CallDescriptor* call_descriptor,
2179 : MachineGraph* mcgraph, Code::Kind kind, int wasm_kind,
2180 : const char* debug_name, const AssemblerOptions& options,
2181 : SourcePositionTable* source_positions) {
2182 : Graph* graph = mcgraph->graph();
2183 749502 : OptimizedCompilationInfo info(CStrVector(debug_name), graph->zone(), kind);
2184 : // Construct a pipeline for scheduling and code generation.
2185 749511 : ZoneStats zone_stats(wasm_engine->allocator());
2186 374748 : NodeOriginTable* node_positions = new (graph->zone()) NodeOriginTable(graph);
2187 : // {instruction_buffer} must live longer than {PipelineData}, since
2188 : // {PipelineData} will reference the {instruction_buffer} via the
2189 : // {AssemblerBuffer} of the {Assembler} contained in the {CodeGenerator}.
2190 : std::unique_ptr<wasm::WasmInstructionBuffer> instruction_buffer =
2191 749508 : wasm::WasmInstructionBuffer::New();
2192 : PipelineData data(&zone_stats, wasm_engine, &info, mcgraph, nullptr,
2193 749501 : source_positions, node_positions, options);
2194 749505 : std::unique_ptr<PipelineStatistics> pipeline_statistics;
2195 374756 : if (FLAG_turbo_stats || FLAG_turbo_stats_nvp) {
2196 3 : pipeline_statistics.reset(new PipelineStatistics(
2197 0 : &info, wasm_engine->GetOrCreateTurboStatistics(), &zone_stats));
2198 0 : pipeline_statistics->BeginPhaseKind("wasm stub codegen");
2199 : }
2200 :
2201 : PipelineImpl pipeline(&data);
2202 :
2203 749507 : if (info.trace_turbo_json_enabled() || info.trace_turbo_graph_enabled()) {
2204 0 : CodeTracer::Scope tracing_scope(data.GetCodeTracer());
2205 0 : OFStream os(tracing_scope.file());
2206 : os << "---------------------------------------------------\n"
2207 0 : << "Begin compiling method " << info.GetDebugName().get()
2208 : << " using Turbofan" << std::endl;
2209 : }
2210 :
2211 374753 : if (info.trace_turbo_graph_enabled()) { // Simple textual RPO.
2212 0 : StdoutStream{} << "-- wasm stub " << Code::Kind2String(kind) << " graph -- "
2213 : << std::endl
2214 0 : << AsRPO(*graph);
2215 : }
2216 :
2217 374753 : if (info.trace_turbo_json_enabled()) {
2218 0 : TurboJsonFile json_of(&info, std::ios_base::trunc);
2219 0 : json_of << "{\"function\":\"" << info.GetDebugName().get()
2220 0 : << "\", \"source\":\"\",\n\"phases\":[";
2221 : }
2222 :
2223 374753 : pipeline.RunPrintAndVerify("machine", true);
2224 374753 : pipeline.ComputeScheduledGraph();
2225 :
2226 : Linkage linkage(call_descriptor);
2227 374749 : CHECK(pipeline.SelectInstructions(&linkage));
2228 749506 : pipeline.AssembleCode(&linkage, instruction_buffer->CreateView());
2229 :
2230 : CodeGenerator* code_generator = pipeline.code_generator();
2231 : wasm::WasmCompilationResult result;
2232 374754 : code_generator->tasm()->GetCode(
2233 : nullptr, &result.code_desc, code_generator->safepoint_table_builder(),
2234 374754 : static_cast<int>(code_generator->GetHandlerTableOffset()));
2235 749500 : result.instr_buffer = instruction_buffer->ReleaseBuffer();
2236 749492 : result.source_positions = code_generator->GetSourcePositionTable();
2237 749495 : result.protected_instructions = code_generator->GetProtectedInstructions();
2238 374750 : result.frame_slot_count = code_generator->frame()->GetTotalFrameSlotCount();
2239 374750 : result.tagged_parameter_slots = call_descriptor->GetTaggedParameterSlots();
2240 374749 : result.result_tier = wasm::ExecutionTier::kOptimized;
2241 :
2242 : DCHECK(result.succeeded());
2243 :
2244 374749 : if (info.trace_turbo_json_enabled()) {
2245 0 : TurboJsonFile json_of(&info, std::ios_base::app);
2246 0 : json_of << "{\"name\":\"disassembly\",\"type\":\"disassembly\",\"data\":\"";
2247 : #ifdef ENABLE_DISASSEMBLER
2248 : std::stringstream disassembler_stream;
2249 : Disassembler::Decode(
2250 : nullptr, &disassembler_stream, result.code_desc.buffer,
2251 : result.code_desc.buffer + result.code_desc.safepoint_table_offset,
2252 : CodeReference(&result.code_desc));
2253 : for (auto const c : disassembler_stream.str()) {
2254 : json_of << AsEscapedUC16ForJSON(c);
2255 : }
2256 : #endif // ENABLE_DISASSEMBLER
2257 0 : json_of << "\"}\n]";
2258 0 : json_of << "\n}";
2259 : }
2260 :
2261 749498 : if (info.trace_turbo_json_enabled() || info.trace_turbo_graph_enabled()) {
2262 0 : CodeTracer::Scope tracing_scope(data.GetCodeTracer());
2263 0 : OFStream os(tracing_scope.file());
2264 : os << "---------------------------------------------------\n"
2265 0 : << "Finished compiling method " << info.GetDebugName().get()
2266 : << " using Turbofan" << std::endl;
2267 : }
2268 :
2269 374751 : return result;
2270 : }
2271 :
2272 : // static
2273 141687 : MaybeHandle<Code> Pipeline::GenerateCodeForWasmHeapStub(
2274 : Isolate* isolate, CallDescriptor* call_descriptor, Graph* graph,
2275 : Code::Kind kind, const char* debug_name, const AssemblerOptions& options,
2276 : SourcePositionTable* source_positions) {
2277 283381 : OptimizedCompilationInfo info(CStrVector(debug_name), graph->zone(), kind);
2278 : // Construct a pipeline for scheduling and code generation.
2279 283389 : ZoneStats zone_stats(isolate->allocator());
2280 141688 : NodeOriginTable* node_positions = new (graph->zone()) NodeOriginTable(graph);
2281 : PipelineData data(&zone_stats, &info, isolate, graph, nullptr,
2282 283378 : source_positions, node_positions, nullptr, options);
2283 283390 : std::unique_ptr<PipelineStatistics> pipeline_statistics;
2284 141694 : if (FLAG_turbo_stats || FLAG_turbo_stats_nvp) {
2285 4 : pipeline_statistics.reset(new PipelineStatistics(
2286 0 : &info, isolate->GetTurboStatistics(), &zone_stats));
2287 0 : pipeline_statistics->BeginPhaseKind("wasm stub codegen");
2288 : }
2289 :
2290 : PipelineImpl pipeline(&data);
2291 :
2292 283381 : if (info.trace_turbo_json_enabled() ||
2293 : info.trace_turbo_graph_enabled()) {
2294 0 : CodeTracer::Scope tracing_scope(data.GetCodeTracer());
2295 0 : OFStream os(tracing_scope.file());
2296 : os << "---------------------------------------------------\n"
2297 0 : << "Begin compiling method " << info.GetDebugName().get()
2298 : << " using Turbofan" << std::endl;
2299 : }
2300 :
2301 141690 : if (info.trace_turbo_graph_enabled()) { // Simple textual RPO.
2302 0 : StdoutStream{} << "-- wasm stub " << Code::Kind2String(kind) << " graph -- "
2303 : << std::endl
2304 0 : << AsRPO(*graph);
2305 : }
2306 :
2307 141690 : if (info.trace_turbo_json_enabled()) {
2308 0 : TurboJsonFile json_of(&info, std::ios_base::trunc);
2309 0 : json_of << "{\"function\":\"" << info.GetDebugName().get()
2310 0 : << "\", \"source\":\"\",\n\"phases\":[";
2311 : }
2312 :
2313 141690 : pipeline.RunPrintAndVerify("machine", true);
2314 141688 : pipeline.ComputeScheduledGraph();
2315 :
2316 : Handle<Code> code;
2317 425085 : if (pipeline.GenerateCode(call_descriptor).ToHandle(&code) &&
2318 : pipeline.CommitDependencies(code)) {
2319 141696 : return code;
2320 : }
2321 0 : return MaybeHandle<Code>();
2322 : }
2323 :
2324 : // static
2325 3247 : MaybeHandle<Code> Pipeline::GenerateCodeForTesting(
2326 : OptimizedCompilationInfo* info, Isolate* isolate,
2327 : JSHeapBroker** out_broker) {
2328 6494 : ZoneStats zone_stats(isolate->allocator());
2329 : std::unique_ptr<PipelineStatistics> pipeline_statistics(
2330 : CreatePipelineStatistics(Handle<Script>::null(), info, isolate,
2331 6494 : &zone_stats));
2332 6494 : PipelineData data(&zone_stats, isolate, info, pipeline_statistics.get());
2333 3247 : if (out_broker != nullptr) {
2334 59 : *out_broker = data.broker();
2335 : }
2336 :
2337 : PipelineImpl pipeline(&data);
2338 :
2339 3247 : Linkage linkage(Linkage::ComputeIncoming(data.instruction_zone(), info));
2340 3247 : Deoptimizer::EnsureCodeForDeoptimizationEntries(isolate);
2341 :
2342 3247 : if (!pipeline.CreateGraph()) return MaybeHandle<Code>();
2343 3247 : if (!pipeline.OptimizeGraph(&linkage)) return MaybeHandle<Code>();
2344 6494 : pipeline.AssembleCode(&linkage);
2345 : Handle<Code> code;
2346 9741 : if (pipeline.FinalizeCode(out_broker == nullptr).ToHandle(&code) &&
2347 : pipeline.CommitDependencies(code)) {
2348 3247 : return code;
2349 : }
2350 0 : return MaybeHandle<Code>();
2351 : }
2352 :
2353 : // static
2354 915886 : MaybeHandle<Code> Pipeline::GenerateCodeForTesting(
2355 : OptimizedCompilationInfo* info, Isolate* isolate,
2356 : CallDescriptor* call_descriptor, Graph* graph,
2357 : const AssemblerOptions& options, Schedule* schedule) {
2358 : // Construct a pipeline for scheduling and code generation.
2359 1831772 : ZoneStats zone_stats(isolate->allocator());
2360 915886 : NodeOriginTable* node_positions = new (info->zone()) NodeOriginTable(graph);
2361 : PipelineData data(&zone_stats, info, isolate, graph, schedule, nullptr,
2362 1831772 : node_positions, nullptr, options);
2363 1831772 : std::unique_ptr<PipelineStatistics> pipeline_statistics;
2364 915886 : if (FLAG_turbo_stats || FLAG_turbo_stats_nvp) {
2365 0 : pipeline_statistics.reset(new PipelineStatistics(
2366 0 : info, isolate->GetTurboStatistics(), &zone_stats));
2367 0 : pipeline_statistics->BeginPhaseKind("test codegen");
2368 : }
2369 :
2370 : PipelineImpl pipeline(&data);
2371 :
2372 915886 : if (info->trace_turbo_json_enabled()) {
2373 0 : TurboJsonFile json_of(info, std::ios_base::trunc);
2374 0 : json_of << "{\"function\":\"" << info->GetDebugName().get()
2375 0 : << "\", \"source\":\"\",\n\"phases\":[";
2376 : }
2377 : // TODO(rossberg): Should this really be untyped?
2378 915886 : pipeline.RunPrintAndVerify("machine", true);
2379 :
2380 : // Ensure we have a schedule.
2381 915886 : if (data.schedule() == nullptr) {
2382 734516 : pipeline.ComputeScheduledGraph();
2383 : }
2384 :
2385 : Handle<Code> code;
2386 2747658 : if (pipeline.GenerateCode(call_descriptor).ToHandle(&code) &&
2387 : pipeline.CommitDependencies(code)) {
2388 915886 : return code;
2389 : }
2390 0 : return MaybeHandle<Code>();
2391 : }
2392 :
2393 : // static
2394 479299 : OptimizedCompilationJob* Pipeline::NewCompilationJob(
2395 : Isolate* isolate, Handle<JSFunction> function, bool has_script) {
2396 : Handle<SharedFunctionInfo> shared =
2397 : handle(function->shared(), function->GetIsolate());
2398 479303 : return new PipelineCompilationJob(isolate, shared, function);
2399 : }
2400 :
2401 : // static
2402 495296 : void Pipeline::GenerateCodeForWasmFunction(
2403 : OptimizedCompilationInfo* info, wasm::WasmEngine* wasm_engine,
2404 : MachineGraph* mcgraph, CallDescriptor* call_descriptor,
2405 : SourcePositionTable* source_positions, NodeOriginTable* node_origins,
2406 : wasm::FunctionBody function_body, const wasm::WasmModule* module,
2407 : int function_index) {
2408 991631 : ZoneStats zone_stats(wasm_engine->allocator());
2409 : std::unique_ptr<PipelineStatistics> pipeline_statistics(
2410 : CreatePipelineStatistics(wasm_engine, function_body, module, info,
2411 992723 : &zone_stats));
2412 : // {instruction_buffer} must live longer than {PipelineData}, since
2413 : // {PipelineData} will reference the {instruction_buffer} via the
2414 : // {AssemblerBuffer} of the {Assembler} contained in the {CodeGenerator}.
2415 : std::unique_ptr<wasm::WasmInstructionBuffer> instruction_buffer =
2416 992547 : wasm::WasmInstructionBuffer::New();
2417 : PipelineData data(&zone_stats, wasm_engine, info, mcgraph,
2418 : pipeline_statistics.get(), source_positions, node_origins,
2419 1488950 : WasmAssemblerOptions());
2420 :
2421 : PipelineImpl pipeline(&data);
2422 :
2423 994398 : if (data.info()->trace_turbo_json_enabled() ||
2424 : data.info()->trace_turbo_graph_enabled()) {
2425 0 : CodeTracer::Scope tracing_scope(data.GetCodeTracer());
2426 0 : OFStream os(tracing_scope.file());
2427 : os << "---------------------------------------------------\n"
2428 0 : << "Begin compiling method " << data.info()->GetDebugName().get()
2429 : << " using Turbofan" << std::endl;
2430 : }
2431 :
2432 497196 : pipeline.RunPrintAndVerify("Machine", true);
2433 :
2434 : data.BeginPhaseKind("wasm optimization");
2435 496704 : const bool is_asm_js = module->origin == wasm::kAsmJsOrigin;
2436 496704 : if (FLAG_turbo_splitting && !is_asm_js) {
2437 : data.info()->MarkAsSplittingEnabled();
2438 : }
2439 496704 : if (FLAG_wasm_opt || is_asm_js) {
2440 16261 : PipelineRunScope scope(&data, "wasm full optimization");
2441 : GraphReducer graph_reducer(scope.zone(), data.graph(),
2442 24546 : data.mcgraph()->Dead());
2443 : DeadCodeElimination dead_code_elimination(&graph_reducer, data.graph(),
2444 8182 : data.common(), scope.zone());
2445 16364 : ValueNumberingReducer value_numbering(scope.zone(), data.graph()->zone());
2446 : const bool allow_signalling_nan = is_asm_js;
2447 : MachineOperatorReducer machine_reducer(&graph_reducer, data.mcgraph(),
2448 16364 : allow_signalling_nan);
2449 : CommonOperatorReducer common_reducer(&graph_reducer, data.graph(),
2450 : data.broker(), data.common(),
2451 8182 : data.machine(), scope.zone());
2452 8182 : AddReducer(&data, &graph_reducer, &dead_code_elimination);
2453 8182 : AddReducer(&data, &graph_reducer, &machine_reducer);
2454 8182 : AddReducer(&data, &graph_reducer, &common_reducer);
2455 8182 : AddReducer(&data, &graph_reducer, &value_numbering);
2456 16364 : graph_reducer.ReduceGraph();
2457 : } else {
2458 976869 : PipelineRunScope scope(&data, "wasm base optimization");
2459 : GraphReducer graph_reducer(scope.zone(), data.graph(),
2460 1466101 : data.mcgraph()->Dead());
2461 976712 : ValueNumberingReducer value_numbering(scope.zone(), data.graph()->zone());
2462 486900 : AddReducer(&data, &graph_reducer, &value_numbering);
2463 486792 : graph_reducer.ReduceGraph();
2464 : }
2465 497389 : pipeline.RunPrintAndVerify("wasm optimization", true);
2466 :
2467 497160 : if (data.node_origins()) {
2468 0 : data.node_origins()->RemoveDecorator();
2469 : }
2470 :
2471 497160 : pipeline.ComputeScheduledGraph();
2472 :
2473 : Linkage linkage(call_descriptor);
2474 495237 : if (!pipeline.SelectInstructions(&linkage)) return;
2475 992640 : pipeline.AssembleCode(&linkage, instruction_buffer->CreateView());
2476 :
2477 991823 : auto result = base::make_unique<wasm::WasmCompilationResult>();
2478 : CodeGenerator* code_generator = pipeline.code_generator();
2479 497066 : code_generator->tasm()->GetCode(
2480 : nullptr, &result->code_desc, code_generator->safepoint_table_builder(),
2481 497066 : static_cast<int>(code_generator->GetHandlerTableOffset()));
2482 :
2483 991404 : result->instr_buffer = instruction_buffer->ReleaseBuffer();
2484 494927 : result->frame_slot_count = code_generator->frame()->GetTotalFrameSlotCount();
2485 494927 : result->tagged_parameter_slots = call_descriptor->GetTaggedParameterSlots();
2486 990557 : result->source_positions = code_generator->GetSourcePositionTable();
2487 990569 : result->protected_instructions = code_generator->GetProtectedInstructions();
2488 495241 : result->result_tier = wasm::ExecutionTier::kOptimized;
2489 :
2490 495241 : if (data.info()->trace_turbo_json_enabled()) {
2491 0 : TurboJsonFile json_of(data.info(), std::ios_base::app);
2492 0 : json_of << "{\"name\":\"disassembly\",\"type\":\"disassembly\",\"data\":\"";
2493 : #ifdef ENABLE_DISASSEMBLER
2494 : std::stringstream disassembler_stream;
2495 : Disassembler::Decode(
2496 : nullptr, &disassembler_stream, result->code_desc.buffer,
2497 : result->code_desc.buffer + result->code_desc.safepoint_table_offset,
2498 : CodeReference(&result->code_desc));
2499 : for (auto const c : disassembler_stream.str()) {
2500 : json_of << AsEscapedUC16ForJSON(c);
2501 : }
2502 : #endif // ENABLE_DISASSEMBLER
2503 0 : json_of << "\"}\n]";
2504 0 : json_of << "\n}";
2505 : }
2506 :
2507 990384 : if (data.info()->trace_turbo_json_enabled() ||
2508 : data.info()->trace_turbo_graph_enabled()) {
2509 0 : CodeTracer::Scope tracing_scope(data.GetCodeTracer());
2510 0 : OFStream os(tracing_scope.file());
2511 : os << "---------------------------------------------------\n"
2512 0 : << "Finished compiling method " << data.info()->GetDebugName().get()
2513 : << " using Turbofan" << std::endl;
2514 : }
2515 :
2516 : DCHECK(result->succeeded());
2517 495241 : info->SetWasmCompilationResult(std::move(result));
2518 : }
2519 :
2520 42 : bool Pipeline::AllocateRegistersForTesting(const RegisterConfiguration* config,
2521 : InstructionSequence* sequence,
2522 : bool run_verifier) {
2523 : OptimizedCompilationInfo info(ArrayVector("testing"), sequence->zone(),
2524 84 : Code::STUB);
2525 84 : ZoneStats zone_stats(sequence->isolate()->allocator());
2526 84 : PipelineData data(&zone_stats, &info, sequence->isolate(), sequence);
2527 42 : data.InitializeFrameData(nullptr);
2528 : PipelineImpl pipeline(&data);
2529 42 : pipeline.AllocateRegisters(config, nullptr, run_verifier);
2530 84 : return !data.compilation_failed();
2531 : }
2532 :
2533 2277721 : void PipelineImpl::ComputeScheduledGraph() {
2534 2277721 : PipelineData* data = this->data_;
2535 :
2536 : // We should only schedule the graph if it is not scheduled yet.
2537 : DCHECK_NULL(data->schedule());
2538 :
2539 2277721 : Run<LateGraphTrimmingPhase>();
2540 2279021 : RunPrintAndVerify(LateGraphTrimmingPhase::phase_name(), true);
2541 :
2542 2278991 : Run<ComputeSchedulePhase>();
2543 2278651 : TraceSchedule(data->info(), data, data->schedule(), "schedule");
2544 2277055 : }
2545 :
2546 2515128 : bool PipelineImpl::SelectInstructions(Linkage* linkage) {
2547 2515128 : auto call_descriptor = linkage->GetIncomingDescriptor();
2548 2515128 : PipelineData* data = this->data_;
2549 :
2550 : // We should have a scheduled graph.
2551 : DCHECK_NOT_NULL(data->graph());
2552 : DCHECK_NOT_NULL(data->schedule());
2553 :
2554 2515128 : if (FLAG_turbo_profiling) {
2555 8 : data->set_profiler_data(BasicBlockInstrumentor::Instrument(
2556 : info(), data->graph(), data->schedule(), data->isolate()));
2557 : }
2558 :
2559 : bool verify_stub_graph = data->verify_graph();
2560 : // Jump optimization runs instruction selection twice, but the instruction
2561 : // selector mutates nodes like swapping the inputs of a load, which can
2562 : // violate the machine graph verification rules. So we skip the second
2563 : // verification on a graph that already verified before.
2564 : auto jump_opt = data->jump_optimization_info();
2565 2515128 : if (jump_opt && jump_opt->is_optimizing()) {
2566 : verify_stub_graph = false;
2567 : }
2568 7545510 : if (verify_stub_graph ||
2569 2515254 : (FLAG_turbo_verify_machine_graph != nullptr &&
2570 0 : (!strcmp(FLAG_turbo_verify_machine_graph, "*") ||
2571 0 : !strcmp(FLAG_turbo_verify_machine_graph, data->debug_name())))) {
2572 0 : if (FLAG_trace_verify_csa) {
2573 : AllowHandleDereference allow_deref;
2574 0 : CodeTracer::Scope tracing_scope(data->GetCodeTracer());
2575 0 : OFStream os(tracing_scope.file());
2576 : os << "--------------------------------------------------\n"
2577 0 : << "--- Verifying " << data->debug_name() << " generated by TurboFan\n"
2578 : << "--------------------------------------------------\n"
2579 0 : << *data->schedule()
2580 : << "--------------------------------------------------\n"
2581 0 : << "--- End of " << data->debug_name() << " generated by TurboFan\n"
2582 0 : << "--------------------------------------------------\n";
2583 : }
2584 0 : Zone temp_zone(data->allocator(), ZONE_NAME);
2585 0 : MachineGraphVerifier::Run(
2586 : data->graph(), data->schedule(), linkage,
2587 : data->info()->IsNotOptimizedFunctionOrWasmFunction(),
2588 0 : data->debug_name(), &temp_zone);
2589 : }
2590 :
2591 2515128 : data->InitializeInstructionSequence(call_descriptor);
2592 :
2593 2516184 : data->InitializeFrameData(call_descriptor);
2594 : // Select and schedule instructions covering the scheduled graph.
2595 2515813 : Run<InstructionSelectionPhase>(linkage);
2596 2517307 : if (data->compilation_failed()) {
2597 : info()->AbortOptimization(BailoutReason::kCodeGenerationFailed);
2598 : data->EndPhaseKind();
2599 : return false;
2600 : }
2601 :
2602 2517298 : if (info()->trace_turbo_json_enabled() && !data->MayHaveUnverifiableGraph()) {
2603 : AllowHandleDereference allow_deref;
2604 2 : TurboCfgFile tcf(isolate());
2605 : tcf << AsC1V("CodeGen", data->schedule(), data->source_positions(),
2606 1 : data->sequence());
2607 : }
2608 :
2609 2517298 : if (info()->trace_turbo_json_enabled()) {
2610 2 : std::ostringstream source_position_output;
2611 : // Output source position information before the graph is deleted.
2612 1 : if (data_->source_positions() != nullptr) {
2613 1 : data_->source_positions()->PrintJson(source_position_output);
2614 : } else {
2615 0 : source_position_output << "{}";
2616 : }
2617 1 : source_position_output << ",\n\"NodeOrigins\" : ";
2618 1 : data_->node_origins()->PrintJson(source_position_output);
2619 2 : data_->set_source_position_output(source_position_output.str());
2620 : }
2621 :
2622 2517298 : data->DeleteGraphZone();
2623 :
2624 : data->BeginPhaseKind("register allocation");
2625 :
2626 2516601 : bool run_verifier = FLAG_turbo_verify_allocation;
2627 :
2628 : // Allocate registers.
2629 2516601 : if (call_descriptor->HasRestrictedAllocatableRegisters()) {
2630 : RegList registers = call_descriptor->AllocatableRegisters();
2631 : DCHECK_LT(0, NumRegs(registers));
2632 : std::unique_ptr<const RegisterConfiguration> config;
2633 224 : config.reset(RegisterConfiguration::RestrictGeneralRegisters(registers));
2634 224 : AllocateRegisters(config.get(), call_descriptor, run_verifier);
2635 2516377 : } else if (data->info()->GetPoisoningMitigationLevel() !=
2636 : PoisoningMitigationLevel::kDontPoison) {
2637 : #ifdef V8_TARGET_ARCH_IA32
2638 : FATAL("Poisoning is not supported on ia32.");
2639 : #else
2640 0 : AllocateRegisters(RegisterConfiguration::Poisoning(), call_descriptor,
2641 0 : run_verifier);
2642 : #endif // V8_TARGET_ARCH_IA32
2643 : } else {
2644 2516377 : AllocateRegisters(RegisterConfiguration::Default(), call_descriptor,
2645 2515583 : run_verifier);
2646 : }
2647 :
2648 : // Verify the instruction sequence has the same hash in two stages.
2649 2516975 : VerifyGeneratedCodeIsIdempotent();
2650 :
2651 2516382 : Run<FrameElisionPhase>();
2652 2517507 : if (data->compilation_failed()) {
2653 : info()->AbortOptimization(
2654 : BailoutReason::kNotEnoughVirtualRegistersRegalloc);
2655 : data->EndPhaseKind();
2656 : return false;
2657 : }
2658 :
2659 : // TODO(mtrofin): move this off to the register allocator.
2660 : bool generate_frame_at_start =
2661 2517507 : data_->sequence()->instruction_blocks().front()->must_construct_frame();
2662 : // Optimimize jumps.
2663 2517507 : if (FLAG_turbo_jt) {
2664 2517467 : Run<JumpThreadingPhase>(generate_frame_at_start);
2665 : }
2666 :
2667 : data->EndPhaseKind();
2668 :
2669 : return true;
2670 : }
2671 :
2672 2516278 : void PipelineImpl::VerifyGeneratedCodeIsIdempotent() {
2673 2516278 : PipelineData* data = this->data_;
2674 : JumpOptimizationInfo* jump_opt = data->jump_optimization_info();
2675 2516278 : if (jump_opt == nullptr) return;
2676 :
2677 : InstructionSequence* code = data->sequence();
2678 : int instruction_blocks = code->InstructionBlockCount();
2679 : int virtual_registers = code->VirtualRegisterCount();
2680 : size_t hash_code = base::hash_combine(instruction_blocks, virtual_registers);
2681 19266642 : for (auto instr : *code) {
2682 : hash_code = base::hash_combine(hash_code, instr->opcode(),
2683 : instr->InputCount(), instr->OutputCount());
2684 : }
2685 27428396 : for (int i = 0; i < virtual_registers; i++) {
2686 13652962 : hash_code = base::hash_combine(hash_code, code->GetRepresentation(i));
2687 : }
2688 122472 : if (jump_opt->is_collecting()) {
2689 : jump_opt->set_hash_code(hash_code);
2690 : } else {
2691 57008 : CHECK_EQ(hash_code, jump_opt->hash_code());
2692 : }
2693 : }
2694 :
2695 : struct InstructionStartsAsJSON {
2696 : const ZoneVector<int>* instr_starts;
2697 : };
2698 :
2699 1 : std::ostream& operator<<(std::ostream& out, const InstructionStartsAsJSON& s) {
2700 1 : out << ", \"instructionOffsetToPCOffset\": {";
2701 : bool need_comma = false;
2702 125 : for (size_t i = 0; i < s.instr_starts->size(); ++i) {
2703 41 : if (need_comma) out << ", ";
2704 82 : int offset = (*s.instr_starts)[i];
2705 41 : out << "\"" << i << "\":" << offset;
2706 : need_comma = true;
2707 : }
2708 1 : out << "}";
2709 1 : return out;
2710 : }
2711 :
2712 2515130 : void PipelineImpl::AssembleCode(Linkage* linkage,
2713 : std::unique_ptr<AssemblerBuffer> buffer) {
2714 2515130 : PipelineData* data = this->data_;
2715 : data->BeginPhaseKind("code generation");
2716 5030577 : data->InitializeCodeGenerator(linkage, std::move(buffer));
2717 :
2718 2515447 : Run<AssembleCodePhase>();
2719 2517187 : if (data->info()->trace_turbo_json_enabled()) {
2720 2 : TurboJsonFile json_of(data->info(), std::ios_base::app);
2721 : json_of << "{\"name\":\"code generation\""
2722 : << ", \"type\":\"instructions\""
2723 2 : << InstructionStartsAsJSON{&data->code_generator()->instr_starts()};
2724 1 : json_of << "},\n";
2725 : }
2726 2517187 : data->DeleteInstructionZone();
2727 2517088 : }
2728 :
2729 : struct BlockStartsAsJSON {
2730 : const ZoneVector<int>* block_starts;
2731 : };
2732 :
2733 1 : std::ostream& operator<<(std::ostream& out, const BlockStartsAsJSON& s) {
2734 1 : out << ", \"blockIdToOffset\": {";
2735 : bool need_comma = false;
2736 32 : for (size_t i = 0; i < s.block_starts->size(); ++i) {
2737 10 : if (need_comma) out << ", ";
2738 20 : int offset = (*s.block_starts)[i];
2739 10 : out << "\"" << i << "\":" << offset;
2740 : need_comma = true;
2741 : }
2742 1 : out << "},";
2743 1 : return out;
2744 : }
2745 :
2746 1588119 : MaybeHandle<Code> PipelineImpl::FinalizeCode(bool retire_broker) {
2747 1588119 : PipelineData* data = this->data_;
2748 1588119 : if (data->broker() && retire_broker) {
2749 463574 : data->broker()->Retire();
2750 : }
2751 1588116 : Run<FinalizeCodePhase>();
2752 :
2753 : MaybeHandle<Code> maybe_code = data->code();
2754 : Handle<Code> code;
2755 1588141 : if (!maybe_code.ToHandle(&code)) {
2756 8 : return maybe_code;
2757 : }
2758 :
2759 : if (data->profiler_data()) {
2760 : #ifdef ENABLE_DISASSEMBLER
2761 : std::ostringstream os;
2762 : code->Disassemble(nullptr, os);
2763 : data->profiler_data()->SetCode(&os);
2764 : #endif // ENABLE_DISASSEMBLER
2765 : }
2766 :
2767 : info()->SetCode(code);
2768 1588133 : PrintCode(isolate(), code, info());
2769 :
2770 1588131 : if (info()->trace_turbo_json_enabled()) {
2771 2 : TurboJsonFile json_of(info(), std::ios_base::app);
2772 :
2773 : json_of << "{\"name\":\"disassembly\",\"type\":\"disassembly\""
2774 3 : << BlockStartsAsJSON{&data->code_generator()->block_starts()}
2775 1 : << "\"data\":\"";
2776 : #ifdef ENABLE_DISASSEMBLER
2777 : std::stringstream disassembly_stream;
2778 : code->Disassemble(nullptr, disassembly_stream);
2779 : std::string disassembly_string(disassembly_stream.str());
2780 : for (const auto& c : disassembly_string) {
2781 : json_of << AsEscapedUC16ForJSON(c);
2782 : }
2783 : #endif // ENABLE_DISASSEMBLER
2784 1 : json_of << "\"}\n],\n";
2785 1 : json_of << "\"nodePositions\":";
2786 1 : json_of << data->source_position_output() << ",\n";
2787 1 : JsonPrintAllSourceWithPositions(json_of, data->info(), isolate());
2788 1 : json_of << "\n}";
2789 : }
2790 3176262 : if (info()->trace_turbo_json_enabled() ||
2791 : info()->trace_turbo_graph_enabled()) {
2792 1 : CodeTracer::Scope tracing_scope(data->GetCodeTracer());
2793 2 : OFStream os(tracing_scope.file());
2794 : os << "---------------------------------------------------\n"
2795 3 : << "Finished compiling method " << info()->GetDebugName().get()
2796 : << " using Turbofan" << std::endl;
2797 : }
2798 1588131 : return code;
2799 : }
2800 :
2801 1181504 : bool PipelineImpl::SelectInstructionsAndAssemble(
2802 : CallDescriptor* call_descriptor) {
2803 : Linkage linkage(call_descriptor);
2804 :
2805 : // Perform instruction selection and register allocation.
2806 1181504 : if (!SelectInstructions(&linkage)) return false;
2807 :
2808 : // Generate the final machine code.
2809 2363010 : AssembleCode(&linkage);
2810 1181506 : return true;
2811 : }
2812 :
2813 1114588 : MaybeHandle<Code> PipelineImpl::GenerateCode(CallDescriptor* call_descriptor) {
2814 1114588 : if (!SelectInstructionsAndAssemble(call_descriptor))
2815 0 : return MaybeHandle<Code>();
2816 1114589 : return FinalizeCode();
2817 : }
2818 :
2819 0 : bool PipelineImpl::CommitDependencies(Handle<Code> code) {
2820 1984849 : return data_->dependencies() == nullptr ||
2821 463634 : data_->dependencies()->Commit(code);
2822 : }
2823 :
2824 : namespace {
2825 :
2826 5032946 : void TraceSequence(OptimizedCompilationInfo* info, PipelineData* data,
2827 : const char* phase_name) {
2828 5032946 : if (info->trace_turbo_json_enabled()) {
2829 : AllowHandleDereference allow_deref;
2830 4 : TurboJsonFile json_of(info, std::ios_base::app);
2831 2 : json_of << "{\"name\":\"" << phase_name << "\",\"type\":\"sequence\",";
2832 2 : json_of << InstructionSequenceAsJSON{data->sequence()};
2833 2 : json_of << "},\n";
2834 : }
2835 5032946 : if (info->trace_turbo_graph_enabled()) {
2836 : AllowHandleDereference allow_deref;
2837 2 : CodeTracer::Scope tracing_scope(data->GetCodeTracer());
2838 4 : OFStream os(tracing_scope.file());
2839 2 : os << "----- Instruction sequence " << phase_name << " -----\n"
2840 2 : << *data->sequence();
2841 : }
2842 5032946 : }
2843 :
2844 : } // namespace
2845 :
2846 2515736 : void PipelineImpl::AllocateRegisters(const RegisterConfiguration* config,
2847 : CallDescriptor* call_descriptor,
2848 : bool run_verifier) {
2849 2515736 : PipelineData* data = this->data_;
2850 : // Don't track usage for this zone in compiler stats.
2851 5033372 : std::unique_ptr<Zone> verifier_zone;
2852 : RegisterAllocatorVerifier* verifier = nullptr;
2853 2515736 : if (run_verifier) {
2854 42 : verifier_zone.reset(new Zone(data->allocator(), ZONE_NAME));
2855 : verifier = new (verifier_zone.get()) RegisterAllocatorVerifier(
2856 42 : verifier_zone.get(), config, data->sequence());
2857 : }
2858 :
2859 : #ifdef DEBUG
2860 : data_->sequence()->ValidateEdgeSplitForm();
2861 : data_->sequence()->ValidateDeferredBlockEntryPaths();
2862 : data_->sequence()->ValidateDeferredBlockExitPaths();
2863 : #endif
2864 :
2865 2515736 : data->InitializeRegisterAllocationData(config, call_descriptor);
2866 2521745 : if (info()->is_osr()) data->osr_helper()->SetupFrame(data->frame());
2867 :
2868 2517047 : Run<MeetRegisterConstraintsPhase>();
2869 2516706 : Run<ResolvePhisPhase>();
2870 2517544 : Run<BuildLiveRangesPhase>();
2871 2517428 : Run<BuildBundlesPhase>();
2872 :
2873 2517531 : TraceSequence(info(), data, "before register allocation");
2874 2515946 : if (verifier != nullptr) {
2875 42 : CHECK(!data->register_allocation_data()->ExistsUseWithoutDefinition());
2876 42 : CHECK(data->register_allocation_data()
2877 : ->RangesDefinedInDeferredStayInDeferred());
2878 : }
2879 :
2880 2515946 : if (info()->trace_turbo_json_enabled() && !data->MayHaveUnverifiableGraph()) {
2881 2 : TurboCfgFile tcf(isolate());
2882 : tcf << AsC1VRegisterAllocationData("PreAllocation",
2883 1 : data->register_allocation_data());
2884 : }
2885 :
2886 2515946 : if (FLAG_turbo_preprocess_ranges) {
2887 2515498 : Run<SplinterLiveRangesPhase>();
2888 2517557 : if (info()->trace_turbo_json_enabled() &&
2889 : !data->MayHaveUnverifiableGraph()) {
2890 2 : TurboCfgFile tcf(isolate());
2891 : tcf << AsC1VRegisterAllocationData("PostSplinter",
2892 1 : data->register_allocation_data());
2893 : }
2894 : }
2895 :
2896 2518005 : Run<AllocateGeneralRegistersPhase<LinearScanAllocator>>();
2897 :
2898 2517182 : if (data->sequence()->HasFPVirtualRegisters()) {
2899 206896 : Run<AllocateFPRegistersPhase<LinearScanAllocator>>();
2900 : }
2901 :
2902 2517362 : if (FLAG_turbo_preprocess_ranges) {
2903 2517362 : Run<MergeSplintersPhase>();
2904 : }
2905 :
2906 2517587 : Run<DecideSpillingModePhase>();
2907 2517610 : Run<AssignSpillSlotsPhase>();
2908 2516905 : Run<CommitAssignmentPhase>();
2909 :
2910 : // TODO(chromium:725559): remove this check once
2911 : // we understand the cause of the bug. We keep just the
2912 : // check at the end of the allocation.
2913 2517590 : if (verifier != nullptr) {
2914 42 : verifier->VerifyAssignment("Immediately after CommitAssignmentPhase.");
2915 : }
2916 :
2917 2517590 : Run<PopulateReferenceMapsPhase>();
2918 :
2919 2517636 : Run<ConnectRangesPhase>();
2920 :
2921 2517586 : Run<ResolveControlFlowPhase>();
2922 2517575 : if (FLAG_turbo_move_optimization) {
2923 2517605 : Run<OptimizeMovesPhase>();
2924 : }
2925 2517209 : Run<LocateSpillSlotsPhase>();
2926 :
2927 2517612 : TraceSequence(info(), data, "after register allocation");
2928 :
2929 2517142 : if (verifier != nullptr) {
2930 42 : verifier->VerifyAssignment("End of regalloc pipeline.");
2931 42 : verifier->VerifyGapMoves();
2932 : }
2933 :
2934 2516329 : if (info()->trace_turbo_json_enabled() && !data->MayHaveUnverifiableGraph()) {
2935 2 : TurboCfgFile tcf(isolate());
2936 : tcf << AsC1VRegisterAllocationData("CodeGen",
2937 1 : data->register_allocation_data());
2938 : }
2939 :
2940 2516329 : data->DeleteRegisterAllocationZone();
2941 2517213 : }
2942 :
2943 40977060 : OptimizedCompilationInfo* PipelineImpl::info() const { return data_->info(); }
2944 :
2945 1 : Isolate* PipelineImpl::isolate() const { return data_->isolate(); }
2946 :
2947 0 : CodeGenerator* PipelineImpl::code_generator() const {
2948 871820 : return data_->code_generator();
2949 : }
2950 :
2951 : } // namespace compiler
2952 : } // namespace internal
2953 120216 : } // namespace v8
|