Line data Source code
1 : // Copyright 2014 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #include "src/compiler/pipeline.h"
6 :
7 : #include <fstream> // NOLINT(readability/streams)
8 : #include <iostream>
9 : #include <memory>
10 : #include <sstream>
11 :
12 : #include "src/assembler-inl.h"
13 : #include "src/base/adapters.h"
14 : #include "src/base/optional.h"
15 : #include "src/base/platform/elapsed-timer.h"
16 : #include "src/bootstrapper.h"
17 : #include "src/code-tracer.h"
18 : #include "src/compiler.h"
19 : #include "src/compiler/backend/code-generator.h"
20 : #include "src/compiler/backend/frame-elider.h"
21 : #include "src/compiler/backend/instruction-selector.h"
22 : #include "src/compiler/backend/instruction.h"
23 : #include "src/compiler/backend/jump-threading.h"
24 : #include "src/compiler/backend/live-range-separator.h"
25 : #include "src/compiler/backend/move-optimizer.h"
26 : #include "src/compiler/backend/register-allocator-verifier.h"
27 : #include "src/compiler/backend/register-allocator.h"
28 : #include "src/compiler/basic-block-instrumentor.h"
29 : #include "src/compiler/branch-elimination.h"
30 : #include "src/compiler/bytecode-graph-builder.h"
31 : #include "src/compiler/checkpoint-elimination.h"
32 : #include "src/compiler/common-operator-reducer.h"
33 : #include "src/compiler/compilation-dependencies.h"
34 : #include "src/compiler/compiler-source-position-table.h"
35 : #include "src/compiler/constant-folding-reducer.h"
36 : #include "src/compiler/control-flow-optimizer.h"
37 : #include "src/compiler/dead-code-elimination.h"
38 : #include "src/compiler/effect-control-linearizer.h"
39 : #include "src/compiler/escape-analysis-reducer.h"
40 : #include "src/compiler/escape-analysis.h"
41 : #include "src/compiler/graph-trimmer.h"
42 : #include "src/compiler/graph-visualizer.h"
43 : #include "src/compiler/js-call-reducer.h"
44 : #include "src/compiler/js-context-specialization.h"
45 : #include "src/compiler/js-create-lowering.h"
46 : #include "src/compiler/js-generic-lowering.h"
47 : #include "src/compiler/js-heap-broker.h"
48 : #include "src/compiler/js-heap-copy-reducer.h"
49 : #include "src/compiler/js-inlining-heuristic.h"
50 : #include "src/compiler/js-intrinsic-lowering.h"
51 : #include "src/compiler/js-native-context-specialization.h"
52 : #include "src/compiler/js-typed-lowering.h"
53 : #include "src/compiler/load-elimination.h"
54 : #include "src/compiler/loop-analysis.h"
55 : #include "src/compiler/loop-peeling.h"
56 : #include "src/compiler/loop-variable-optimizer.h"
57 : #include "src/compiler/machine-graph-verifier.h"
58 : #include "src/compiler/machine-operator-reducer.h"
59 : #include "src/compiler/memory-optimizer.h"
60 : #include "src/compiler/node-origin-table.h"
61 : #include "src/compiler/osr.h"
62 : #include "src/compiler/pipeline-statistics.h"
63 : #include "src/compiler/redundancy-elimination.h"
64 : #include "src/compiler/schedule.h"
65 : #include "src/compiler/scheduler.h"
66 : #include "src/compiler/select-lowering.h"
67 : #include "src/compiler/serializer-for-background-compilation.h"
68 : #include "src/compiler/simplified-lowering.h"
69 : #include "src/compiler/simplified-operator-reducer.h"
70 : #include "src/compiler/simplified-operator.h"
71 : #include "src/compiler/store-store-elimination.h"
72 : #include "src/compiler/type-narrowing-reducer.h"
73 : #include "src/compiler/typed-optimization.h"
74 : #include "src/compiler/typer.h"
75 : #include "src/compiler/value-numbering-reducer.h"
76 : #include "src/compiler/verifier.h"
77 : #include "src/compiler/wasm-compiler.h"
78 : #include "src/compiler/zone-stats.h"
79 : #include "src/disassembler.h"
80 : #include "src/isolate-inl.h"
81 : #include "src/objects/shared-function-info.h"
82 : #include "src/optimized-compilation-info.h"
83 : #include "src/ostreams.h"
84 : #include "src/parsing/parse-info.h"
85 : #include "src/register-configuration.h"
86 : #include "src/utils.h"
87 : #include "src/wasm/function-body-decoder.h"
88 : #include "src/wasm/wasm-engine.h"
89 :
90 : namespace v8 {
91 : namespace internal {
92 : namespace compiler {
93 :
94 : // Turbofan can only handle 2^16 control inputs. Since each control flow split
95 : // requires at least two bytes (jump and offset), we limit the bytecode size
96 : // to 128K bytes.
97 : const int kMaxBytecodeSizeForTurbofan = 128 * 1024;
98 :
99 : class PipelineData {
100 : public:
101 : // For main entry point.
102 948962 : PipelineData(ZoneStats* zone_stats, Isolate* isolate,
103 : OptimizedCompilationInfo* info,
104 : PipelineStatistics* pipeline_statistics)
105 : : isolate_(isolate),
106 : allocator_(isolate->allocator()),
107 : info_(info),
108 : debug_name_(info_->GetDebugName()),
109 : may_have_unverifiable_graph_(false),
110 : zone_stats_(zone_stats),
111 : pipeline_statistics_(pipeline_statistics),
112 : graph_zone_scope_(zone_stats_, ZONE_NAME),
113 474496 : graph_zone_(graph_zone_scope_.zone()),
114 : instruction_zone_scope_(zone_stats_, ZONE_NAME),
115 474496 : instruction_zone_(instruction_zone_scope_.zone()),
116 : codegen_zone_scope_(zone_stats_, ZONE_NAME),
117 474495 : codegen_zone_(codegen_zone_scope_.zone()),
118 : register_allocation_zone_scope_(zone_stats_, ZONE_NAME),
119 474494 : register_allocation_zone_(register_allocation_zone_scope_.zone()),
120 4270399 : assembler_options_(AssemblerOptions::Default(isolate)) {
121 : PhaseScope scope(pipeline_statistics, "init pipeline data");
122 948983 : graph_ = new (graph_zone_) Graph(graph_zone_);
123 948970 : source_positions_ = new (graph_zone_) SourcePositionTable(graph_);
124 : node_origins_ = info->trace_turbo_json_enabled()
125 2 : ? new (graph_zone_) NodeOriginTable(graph_)
126 948966 : : nullptr;
127 948966 : simplified_ = new (graph_zone_) SimplifiedOperatorBuilder(graph_zone_);
128 : machine_ = new (graph_zone_) MachineOperatorBuilder(
129 : graph_zone_, MachineType::PointerRepresentation(),
130 : InstructionSelector::SupportedMachineOperatorFlags(),
131 948980 : InstructionSelector::AlignmentRequirements());
132 948981 : common_ = new (graph_zone_) CommonOperatorBuilder(graph_zone_);
133 948978 : javascript_ = new (graph_zone_) JSOperatorBuilder(graph_zone_);
134 : jsgraph_ = new (graph_zone_)
135 948987 : JSGraph(isolate_, graph_, common_, javascript_, simplified_, machine_);
136 948985 : broker_ = new (info_->zone()) JSHeapBroker(isolate_, info_->zone());
137 : dependencies_ =
138 948962 : new (info_->zone()) CompilationDependencies(isolate_, info_->zone());
139 474485 : }
140 :
141 : // For WebAssembly compile entry point.
142 1068468 : PipelineData(ZoneStats* zone_stats, wasm::WasmEngine* wasm_engine,
143 3206436 : OptimizedCompilationInfo* info, MachineGraph* mcgraph,
144 : PipelineStatistics* pipeline_statistics,
145 : SourcePositionTable* source_positions,
146 : NodeOriginTable* node_origins,
147 : const AssemblerOptions& assembler_options)
148 : : isolate_(nullptr),
149 : wasm_engine_(wasm_engine),
150 1068468 : allocator_(wasm_engine->allocator()),
151 : info_(info),
152 : debug_name_(info_->GetDebugName()),
153 : may_have_unverifiable_graph_(false),
154 : zone_stats_(zone_stats),
155 : pipeline_statistics_(pipeline_statistics),
156 : graph_zone_scope_(zone_stats_, ZONE_NAME),
157 1068680 : graph_zone_(graph_zone_scope_.zone()),
158 : graph_(mcgraph->graph()),
159 : source_positions_(source_positions),
160 : node_origins_(node_origins),
161 : machine_(mcgraph->machine()),
162 : common_(mcgraph->common()),
163 : mcgraph_(mcgraph),
164 : instruction_zone_scope_(zone_stats_, ZONE_NAME),
165 1068812 : instruction_zone_(instruction_zone_scope_.zone()),
166 : codegen_zone_scope_(zone_stats_, ZONE_NAME),
167 1068820 : codegen_zone_(codegen_zone_scope_.zone()),
168 : register_allocation_zone_scope_(zone_stats_, ZONE_NAME),
169 1068729 : register_allocation_zone_(register_allocation_zone_scope_.zone()),
170 10687369 : assembler_options_(assembler_options) {}
171 :
172 : // For CodeStubAssembler and machine graph testing entry point.
173 1436037 : PipelineData(ZoneStats* zone_stats, OptimizedCompilationInfo* info,
174 1436037 : Isolate* isolate, Graph* graph, Schedule* schedule,
175 : SourcePositionTable* source_positions,
176 : NodeOriginTable* node_origins, JumpOptimizationInfo* jump_opt,
177 : const AssemblerOptions& assembler_options)
178 : : isolate_(isolate),
179 : allocator_(isolate->allocator()),
180 : info_(info),
181 : debug_name_(info_->GetDebugName()),
182 : zone_stats_(zone_stats),
183 : graph_zone_scope_(zone_stats_, ZONE_NAME),
184 1436049 : graph_zone_(graph_zone_scope_.zone()),
185 : graph_(graph),
186 : source_positions_(source_positions),
187 : node_origins_(node_origins),
188 : schedule_(schedule),
189 : instruction_zone_scope_(zone_stats_, ZONE_NAME),
190 1436049 : instruction_zone_(instruction_zone_scope_.zone()),
191 : codegen_zone_scope_(zone_stats_, ZONE_NAME),
192 1436049 : codegen_zone_(codegen_zone_scope_.zone()),
193 : register_allocation_zone_scope_(zone_stats_, ZONE_NAME),
194 1436045 : register_allocation_zone_(register_allocation_zone_scope_.zone()),
195 : jump_optimization_info_(jump_opt),
196 10052315 : assembler_options_(assembler_options) {
197 2872098 : simplified_ = new (graph_zone_) SimplifiedOperatorBuilder(graph_zone_);
198 : machine_ = new (graph_zone_) MachineOperatorBuilder(
199 : graph_zone_, MachineType::PointerRepresentation(),
200 : InstructionSelector::SupportedMachineOperatorFlags(),
201 2872097 : InstructionSelector::AlignmentRequirements());
202 2872096 : common_ = new (graph_zone_) CommonOperatorBuilder(graph_zone_);
203 2872097 : javascript_ = new (graph_zone_) JSOperatorBuilder(graph_zone_);
204 : jsgraph_ = new (graph_zone_)
205 2872097 : JSGraph(isolate_, graph_, common_, javascript_, simplified_, machine_);
206 1436047 : }
207 :
208 : // For register allocation testing entry point.
209 42 : PipelineData(ZoneStats* zone_stats, OptimizedCompilationInfo* info,
210 84 : Isolate* isolate, InstructionSequence* sequence)
211 : : isolate_(isolate),
212 : allocator_(isolate->allocator()),
213 : info_(info),
214 : debug_name_(info_->GetDebugName()),
215 : zone_stats_(zone_stats),
216 : graph_zone_scope_(zone_stats_, ZONE_NAME),
217 : instruction_zone_scope_(zone_stats_, ZONE_NAME),
218 : instruction_zone_(sequence->zone()),
219 : sequence_(sequence),
220 : codegen_zone_scope_(zone_stats_, ZONE_NAME),
221 42 : codegen_zone_(codegen_zone_scope_.zone()),
222 : register_allocation_zone_scope_(zone_stats_, ZONE_NAME),
223 42 : register_allocation_zone_(register_allocation_zone_scope_.zone()),
224 294 : assembler_options_(AssemblerOptions::Default(isolate)) {}
225 :
226 2979394 : ~PipelineData() {
227 : // Must happen before zones are destroyed.
228 2979394 : delete code_generator_;
229 2979424 : code_generator_ = nullptr;
230 2979424 : DeleteTyper();
231 :
232 2979396 : DeleteRegisterAllocationZone();
233 2979398 : DeleteInstructionZone();
234 2979404 : DeleteCodegenZone();
235 2979433 : DeleteGraphZone();
236 2979417 : }
237 :
238 : Isolate* isolate() const { return isolate_; }
239 : AccountingAllocator* allocator() const { return allocator_; }
240 : OptimizedCompilationInfo* info() const { return info_; }
241 : ZoneStats* zone_stats() const { return zone_stats_; }
242 : CompilationDependencies* dependencies() const { return dependencies_; }
243 : PipelineStatistics* pipeline_statistics() { return pipeline_statistics_; }
244 : OsrHelper* osr_helper() { return &(*osr_helper_); }
245 : bool compilation_failed() const { return compilation_failed_; }
246 9 : void set_compilation_failed() { compilation_failed_ = true; }
247 :
248 : bool verify_graph() const { return verify_graph_; }
249 133830 : void set_verify_graph(bool value) { verify_graph_ = value; }
250 :
251 : MaybeHandle<Code> code() { return code_; }
252 : void set_code(MaybeHandle<Code> code) {
253 : DCHECK(code_.is_null());
254 1880739 : code_ = code;
255 : }
256 :
257 : CodeGenerator* code_generator() const { return code_generator_; }
258 :
259 : // RawMachineAssembler generally produces graphs which cannot be verified.
260 : bool MayHaveUnverifiableGraph() const { return may_have_unverifiable_graph_; }
261 :
262 : Zone* graph_zone() const { return graph_zone_; }
263 : Graph* graph() const { return graph_; }
264 : SourcePositionTable* source_positions() const { return source_positions_; }
265 : NodeOriginTable* node_origins() const { return node_origins_; }
266 : MachineOperatorBuilder* machine() const { return machine_; }
267 : CommonOperatorBuilder* common() const { return common_; }
268 : JSOperatorBuilder* javascript() const { return javascript_; }
269 : JSGraph* jsgraph() const { return jsgraph_; }
270 : MachineGraph* mcgraph() const { return mcgraph_; }
271 912292 : Handle<Context> native_context() const {
272 1824600 : return handle(info()->native_context(), isolate());
273 : }
274 : Handle<JSGlobalObject> global_object() const {
275 : return handle(info()->global_object(), isolate());
276 : }
277 :
278 : JSHeapBroker* broker() const { return broker_; }
279 :
280 : Schedule* schedule() const { return schedule_; }
281 : void set_schedule(Schedule* schedule) {
282 : DCHECK(!schedule_);
283 2667311 : schedule_ = schedule;
284 : }
285 : void reset_schedule() { schedule_ = nullptr; }
286 :
287 : Zone* instruction_zone() const { return instruction_zone_; }
288 : Zone* codegen_zone() const { return codegen_zone_; }
289 : InstructionSequence* sequence() const { return sequence_; }
290 : Frame* frame() const { return frame_; }
291 :
292 : Zone* register_allocation_zone() const { return register_allocation_zone_; }
293 : RegisterAllocationData* register_allocation_data() const {
294 : return register_allocation_data_;
295 : }
296 :
297 : BasicBlockProfiler::Data* profiler_data() const { return profiler_data_; }
298 : void set_profiler_data(BasicBlockProfiler::Data* profiler_data) {
299 10 : profiler_data_ = profiler_data;
300 : }
301 :
302 : std::string const& source_position_output() const {
303 : return source_position_output_;
304 : }
305 : void set_source_position_output(std::string const& source_position_output) {
306 2 : source_position_output_ = source_position_output;
307 : }
308 :
309 : JumpOptimizationInfo* jump_optimization_info() const {
310 : return jump_optimization_info_;
311 : }
312 :
313 : const AssemblerOptions& assembler_options() const {
314 : return assembler_options_;
315 : }
316 :
317 30 : CodeTracer* GetCodeTracer() const {
318 60 : return wasm_engine_ == nullptr ? isolate_->GetCodeTracer()
319 60 : : wasm_engine_->GetCodeTracer();
320 : }
321 :
322 912214 : Typer* CreateTyper() {
323 : DCHECK_NULL(typer_);
324 912214 : typer_ = new Typer(broker(), typer_flags_, graph());
325 456108 : return typer_;
326 : }
327 :
328 : void AddTyperFlag(Typer::Flag flag) {
329 : DCHECK_NULL(typer_);
330 : typer_flags_ |= flag;
331 : }
332 :
333 3435504 : void DeleteTyper() {
334 3435504 : delete typer_;
335 3435507 : typer_ = nullptr;
336 3435507 : }
337 :
338 5929054 : void DeleteGraphZone() {
339 11858146 : if (graph_zone_ == nullptr) return;
340 2979344 : graph_zone_scope_.Destroy();
341 2979382 : graph_zone_ = nullptr;
342 2979382 : graph_ = nullptr;
343 2979382 : source_positions_ = nullptr;
344 2979382 : node_origins_ = nullptr;
345 2979382 : simplified_ = nullptr;
346 2979382 : machine_ = nullptr;
347 2979382 : common_ = nullptr;
348 2979382 : javascript_ = nullptr;
349 2979382 : jsgraph_ = nullptr;
350 2979382 : mcgraph_ = nullptr;
351 2979382 : schedule_ = nullptr;
352 : }
353 :
354 5928918 : void DeleteInstructionZone() {
355 11857965 : if (instruction_zone_ == nullptr) return;
356 2979305 : instruction_zone_scope_.Destroy();
357 2979434 : instruction_zone_ = nullptr;
358 2979434 : sequence_ = nullptr;
359 : }
360 :
361 2979404 : void DeleteCodegenZone() {
362 5958838 : if (codegen_zone_ == nullptr) return;
363 2979406 : codegen_zone_scope_.Destroy();
364 2979436 : codegen_zone_ = nullptr;
365 2979436 : dependencies_ = nullptr;
366 2979436 : broker_ = nullptr;
367 2979436 : frame_ = nullptr;
368 : }
369 :
370 5928997 : void DeleteRegisterAllocationZone() {
371 11858081 : if (register_allocation_zone_ == nullptr) return;
372 2979350 : register_allocation_zone_scope_.Destroy();
373 2979437 : register_allocation_zone_ = nullptr;
374 2979437 : register_allocation_data_ = nullptr;
375 : }
376 :
377 5899100 : void InitializeInstructionSequence(const CallDescriptor* call_descriptor) {
378 : DCHECK_NULL(sequence_);
379 : InstructionBlocks* instruction_blocks =
380 : InstructionSequence::InstructionBlocksFor(instruction_zone(),
381 2949441 : schedule());
382 : sequence_ = new (instruction_zone())
383 2949492 : InstructionSequence(isolate(), instruction_zone(), instruction_blocks);
384 5899040 : if (call_descriptor && call_descriptor->RequiresFrameAsIncoming()) {
385 2845531 : sequence_->instruction_blocks()[0]->mark_needs_frame();
386 : } else {
387 : DCHECK_EQ(0u, call_descriptor->CalleeSavedFPRegisters());
388 : DCHECK_EQ(0u, call_descriptor->CalleeSavedRegisters());
389 : }
390 2949641 : }
391 :
392 5898882 : void InitializeFrameData(CallDescriptor* call_descriptor) {
393 : DCHECK_NULL(frame_);
394 : int fixed_frame_size = 0;
395 2949516 : if (call_descriptor != nullptr) {
396 2949468 : fixed_frame_size = call_descriptor->CalculateFixedFrameSize();
397 : }
398 2949743 : frame_ = new (codegen_zone()) Frame(fixed_frame_size);
399 2949642 : }
400 :
401 2949455 : void InitializeRegisterAllocationData(const RegisterConfiguration* config,
402 2949455 : CallDescriptor* call_descriptor) {
403 : DCHECK_NULL(register_allocation_data_);
404 : register_allocation_data_ = new (register_allocation_zone())
405 : RegisterAllocationData(config, register_allocation_zone(), frame(),
406 2949741 : sequence(), debug_name());
407 2949665 : }
408 :
409 4917 : void InitializeOsrHelper() {
410 : DCHECK(!osr_helper_.has_value());
411 4917 : osr_helper_.emplace(info());
412 : }
413 :
414 : void set_start_source_position(int position) {
415 : DCHECK_EQ(start_source_position_, kNoSourcePosition);
416 452027 : start_source_position_ = position;
417 : }
418 :
419 2949400 : void InitializeCodeGenerator(Linkage* linkage) {
420 : DCHECK_NULL(code_generator_);
421 :
422 : code_generator_ = new CodeGenerator(
423 : codegen_zone(), frame(), linkage, sequence(), info(), isolate(),
424 : osr_helper_, start_source_position_, jump_optimization_info_,
425 2949400 : info()->GetPoisoningMitigationLevel(), assembler_options_,
426 2949400 : info_->builtin_index());
427 2949405 : }
428 :
429 8327420 : void BeginPhaseKind(const char* phase_kind_name) {
430 8327420 : if (pipeline_statistics() != nullptr) {
431 0 : pipeline_statistics()->BeginPhaseKind(phase_kind_name);
432 : }
433 : }
434 :
435 3405859 : void EndPhaseKind() {
436 3405859 : if (pipeline_statistics() != nullptr) {
437 0 : pipeline_statistics()->EndPhaseKind();
438 : }
439 : }
440 :
441 : const char* debug_name() const { return debug_name_.get(); }
442 :
443 : private:
444 : Isolate* const isolate_;
445 : wasm::WasmEngine* const wasm_engine_ = nullptr;
446 : AccountingAllocator* const allocator_;
447 : OptimizedCompilationInfo* const info_;
448 : std::unique_ptr<char[]> debug_name_;
449 : bool may_have_unverifiable_graph_ = true;
450 : ZoneStats* const zone_stats_;
451 : PipelineStatistics* pipeline_statistics_ = nullptr;
452 : bool compilation_failed_ = false;
453 : bool verify_graph_ = false;
454 : int start_source_position_ = kNoSourcePosition;
455 : base::Optional<OsrHelper> osr_helper_;
456 : MaybeHandle<Code> code_;
457 : CodeGenerator* code_generator_ = nullptr;
458 : Typer* typer_ = nullptr;
459 : Typer::Flags typer_flags_ = Typer::kNoFlags;
460 :
461 : // All objects in the following group of fields are allocated in graph_zone_.
462 : // They are all set to nullptr when the graph_zone_ is destroyed.
463 : ZoneStats::Scope graph_zone_scope_;
464 : Zone* graph_zone_ = nullptr;
465 : Graph* graph_ = nullptr;
466 : SourcePositionTable* source_positions_ = nullptr;
467 : NodeOriginTable* node_origins_ = nullptr;
468 : SimplifiedOperatorBuilder* simplified_ = nullptr;
469 : MachineOperatorBuilder* machine_ = nullptr;
470 : CommonOperatorBuilder* common_ = nullptr;
471 : JSOperatorBuilder* javascript_ = nullptr;
472 : JSGraph* jsgraph_ = nullptr;
473 : MachineGraph* mcgraph_ = nullptr;
474 : Schedule* schedule_ = nullptr;
475 :
476 : // All objects in the following group of fields are allocated in
477 : // instruction_zone_. They are all set to nullptr when the instruction_zone_
478 : // is destroyed.
479 : ZoneStats::Scope instruction_zone_scope_;
480 : Zone* instruction_zone_;
481 : InstructionSequence* sequence_ = nullptr;
482 :
483 : // All objects in the following group of fields are allocated in
484 : // codegen_zone_. They are all set to nullptr when the codegen_zone_
485 : // is destroyed.
486 : ZoneStats::Scope codegen_zone_scope_;
487 : Zone* codegen_zone_;
488 : CompilationDependencies* dependencies_ = nullptr;
489 : JSHeapBroker* broker_ = nullptr;
490 : Frame* frame_ = nullptr;
491 :
492 : // All objects in the following group of fields are allocated in
493 : // register_allocation_zone_. They are all set to nullptr when the zone is
494 : // destroyed.
495 : ZoneStats::Scope register_allocation_zone_scope_;
496 : Zone* register_allocation_zone_;
497 : RegisterAllocationData* register_allocation_data_ = nullptr;
498 :
499 : // Basic block profiling support.
500 : BasicBlockProfiler::Data* profiler_data_ = nullptr;
501 :
502 : // Source position output for --trace-turbo.
503 : std::string source_position_output_;
504 :
505 : JumpOptimizationInfo* jump_optimization_info_ = nullptr;
506 : AssemblerOptions assembler_options_;
507 :
508 : DISALLOW_COPY_AND_ASSIGN(PipelineData);
509 : };
510 :
511 : class PipelineImpl final {
512 : public:
513 2979388 : explicit PipelineImpl(PipelineData* data) : data_(data) {}
514 :
515 : // Helpers for executing pipeline phases.
516 : template <typename Phase, typename... Args>
517 : void Run(Args&&... args);
518 :
519 : // Step A. Run the graph creation and initial optimization passes.
520 : bool CreateGraph();
521 :
522 : // B. Run the concurrent optimization passes.
523 : bool OptimizeGraph(Linkage* linkage);
524 :
525 : // Substep B.1. Produce a scheduled graph.
526 : void ComputeScheduledGraph();
527 :
528 : // Substep B.2. Select instructions from a scheduled graph.
529 : bool SelectInstructions(Linkage* linkage);
530 :
531 : // Step C. Run the code assembly pass.
532 : void AssembleCode(Linkage* linkage);
533 :
534 : // Step D. Run the code finalization pass.
535 : MaybeHandle<Code> FinalizeCode();
536 :
537 : // Step E. Install any code dependencies.
538 : bool CommitDependencies(Handle<Code> code);
539 :
540 : void VerifyGeneratedCodeIsIdempotent();
541 : void RunPrintAndVerify(const char* phase, bool untyped = false);
542 : MaybeHandle<Code> GenerateCode(CallDescriptor* call_descriptor);
543 : void AllocateRegisters(const RegisterConfiguration* config,
544 : CallDescriptor* call_descriptor, bool run_verifier);
545 :
546 : OptimizedCompilationInfo* info() const;
547 : Isolate* isolate() const;
548 : CodeGenerator* code_generator() const;
549 :
550 : private:
551 : PipelineData* const data_;
552 : };
553 :
554 : namespace {
555 :
556 0 : void PrintFunctionSource(OptimizedCompilationInfo* info, Isolate* isolate,
557 : int source_id, Handle<SharedFunctionInfo> shared) {
558 0 : if (!shared->script()->IsUndefined(isolate)) {
559 0 : Handle<Script> script(Script::cast(shared->script()), isolate);
560 :
561 0 : if (!script->source()->IsUndefined(isolate)) {
562 0 : CodeTracer::Scope tracing_scope(isolate->GetCodeTracer());
563 0 : Object source_name = script->name();
564 0 : OFStream os(tracing_scope.file());
565 0 : os << "--- FUNCTION SOURCE (";
566 0 : if (source_name->IsString()) {
567 0 : os << String::cast(source_name)->ToCString().get() << ":";
568 : }
569 0 : os << shared->DebugName()->ToCString().get() << ") id{";
570 0 : os << info->optimization_id() << "," << source_id << "} start{";
571 0 : os << shared->StartPosition() << "} ---\n";
572 : {
573 : DisallowHeapAllocation no_allocation;
574 0 : int start = shared->StartPosition();
575 0 : int len = shared->EndPosition() - start;
576 : SubStringRange source(String::cast(script->source()), no_allocation,
577 0 : start, len);
578 0 : for (const auto& c : source) {
579 0 : os << AsReversiblyEscapedUC16(c);
580 : }
581 : }
582 :
583 0 : os << "\n--- END ---\n";
584 : }
585 : }
586 0 : }
587 :
588 : // Print information for the given inlining: which function was inlined and
589 : // where the inlining occurred.
590 0 : void PrintInlinedFunctionInfo(
591 0 : OptimizedCompilationInfo* info, Isolate* isolate, int source_id,
592 : int inlining_id, const OptimizedCompilationInfo::InlinedFunctionHolder& h) {
593 0 : CodeTracer::Scope tracing_scope(isolate->GetCodeTracer());
594 0 : OFStream os(tracing_scope.file());
595 0 : os << "INLINE (" << h.shared_info->DebugName()->ToCString().get() << ") id{"
596 0 : << info->optimization_id() << "," << source_id << "} AS " << inlining_id
597 0 : << " AT ";
598 0 : const SourcePosition position = h.position.position;
599 0 : if (position.IsKnown()) {
600 0 : os << "<" << position.InliningId() << ":" << position.ScriptOffset() << ">";
601 : } else {
602 0 : os << "<?>";
603 : }
604 : os << std::endl;
605 0 : }
606 :
607 : // Print the source of all functions that participated in this optimizing
608 : // compilation. For inlined functions print source position of their inlining.
609 0 : void PrintParticipatingSource(OptimizedCompilationInfo* info,
610 : Isolate* isolate) {
611 : AllowDeferredHandleDereference allow_deference_for_print_code;
612 :
613 0 : SourceIdAssigner id_assigner(info->inlined_functions().size());
614 0 : PrintFunctionSource(info, isolate, -1, info->shared_info());
615 0 : const auto& inlined = info->inlined_functions();
616 0 : for (unsigned id = 0; id < inlined.size(); id++) {
617 0 : const int source_id = id_assigner.GetIdFor(inlined[id].shared_info);
618 0 : PrintFunctionSource(info, isolate, source_id, inlined[id].shared_info);
619 0 : PrintInlinedFunctionInfo(info, isolate, source_id, id, inlined[id]);
620 0 : }
621 0 : }
622 :
623 : // Print the code after compiling it.
624 1880730 : void PrintCode(Isolate* isolate, Handle<Code> code,
625 : OptimizedCompilationInfo* info) {
626 1880730 : if (FLAG_print_opt_source && info->IsOptimizing()) {
627 0 : PrintParticipatingSource(info, isolate);
628 : }
629 :
630 : #ifdef ENABLE_DISASSEMBLER
631 : AllowDeferredHandleDereference allow_deference_for_print_code;
632 : bool print_code =
633 : FLAG_print_code ||
634 : (info->IsOptimizing() && FLAG_print_opt_code &&
635 : info->shared_info()->PassesFilter(FLAG_print_opt_code_filter));
636 : if (print_code) {
637 : std::unique_ptr<char[]> debug_name = info->GetDebugName();
638 : CodeTracer::Scope tracing_scope(isolate->GetCodeTracer());
639 : OFStream os(tracing_scope.file());
640 :
641 : // Print the source code if available.
642 : bool print_source = code->kind() == Code::OPTIMIZED_FUNCTION;
643 : if (print_source) {
644 : Handle<SharedFunctionInfo> shared = info->shared_info();
645 : if (shared->script()->IsScript() &&
646 : !Script::cast(shared->script())->source()->IsUndefined(isolate)) {
647 : os << "--- Raw source ---\n";
648 : StringCharacterStream stream(
649 : String::cast(Script::cast(shared->script())->source()),
650 : shared->StartPosition());
651 : // fun->end_position() points to the last character in the stream. We
652 : // need to compensate by adding one to calculate the length.
653 : int source_len = shared->EndPosition() - shared->StartPosition() + 1;
654 : for (int i = 0; i < source_len; i++) {
655 : if (stream.HasMore()) {
656 : os << AsReversiblyEscapedUC16(stream.GetNext());
657 : }
658 : }
659 : os << "\n\n";
660 : }
661 : }
662 : if (info->IsOptimizing()) {
663 : os << "--- Optimized code ---\n"
664 : << "optimization_id = " << info->optimization_id() << "\n";
665 : } else {
666 : os << "--- Code ---\n";
667 : }
668 : if (print_source) {
669 : Handle<SharedFunctionInfo> shared = info->shared_info();
670 : os << "source_position = " << shared->StartPosition() << "\n";
671 : }
672 : code->Disassemble(debug_name.get(), os);
673 : os << "--- End code ---\n";
674 : }
675 : #endif // ENABLE_DISASSEMBLER
676 1880730 : }
677 :
678 3123425 : void TraceSchedule(OptimizedCompilationInfo* info, PipelineData* data,
679 : Schedule* schedule, const char* phase_name) {
680 3123422 : if (info->trace_turbo_json_enabled()) {
681 : AllowHandleDereference allow_deref;
682 3 : TurboJsonFile json_of(info, std::ios_base::app);
683 3 : json_of << "{\"name\":\"" << phase_name << "\",\"type\":\"schedule\""
684 3 : << ",\"data\":\"";
685 6 : std::stringstream schedule_stream;
686 3 : schedule_stream << *schedule;
687 : std::string schedule_string(schedule_stream.str());
688 6953 : for (const auto& c : schedule_string) {
689 13894 : json_of << AsEscapedUC16ForJSON(c);
690 : }
691 6 : json_of << "\"},\n";
692 : }
693 3123422 : if (info->trace_turbo_graph_enabled() || FLAG_trace_turbo_scheduler) {
694 : AllowHandleDereference allow_deref;
695 3 : CodeTracer::Scope tracing_scope(data->GetCodeTracer());
696 6 : OFStream os(tracing_scope.file());
697 3 : os << "-- Schedule --------------------------------------\n" << *schedule;
698 : }
699 3123422 : }
700 :
701 :
702 : class SourcePositionWrapper final : public Reducer {
703 : public:
704 : SourcePositionWrapper(Reducer* reducer, SourcePositionTable* table)
705 482224 : : reducer_(reducer), table_(table) {}
706 0 : ~SourcePositionWrapper() final = default;
707 :
708 1137 : const char* reducer_name() const override { return reducer_->reducer_name(); }
709 :
710 39495661 : Reduction Reduce(Node* node) final {
711 39495661 : SourcePosition const pos = table_->GetSourcePosition(node);
712 39495648 : SourcePositionTable::Scope position(table_, pos);
713 78991297 : return reducer_->Reduce(node);
714 : }
715 :
716 623667 : void Finalize() final { reducer_->Finalize(); }
717 :
718 : private:
719 : Reducer* const reducer_;
720 : SourcePositionTable* const table_;
721 :
722 : DISALLOW_COPY_AND_ASSIGN(SourcePositionWrapper);
723 : };
724 :
725 : class NodeOriginsWrapper final : public Reducer {
726 : public:
727 : NodeOriginsWrapper(Reducer* reducer, NodeOriginTable* table)
728 43 : : reducer_(reducer), table_(table) {}
729 0 : ~NodeOriginsWrapper() final = default;
730 :
731 1137 : const char* reducer_name() const override { return reducer_->reducer_name(); }
732 :
733 1137 : Reduction Reduce(Node* node) final {
734 1137 : NodeOriginTable::Scope position(table_, reducer_name(), node);
735 2274 : return reducer_->Reduce(node);
736 : }
737 :
738 51 : void Finalize() final { reducer_->Finalize(); }
739 :
740 : private:
741 : Reducer* const reducer_;
742 : NodeOriginTable* const table_;
743 :
744 : DISALLOW_COPY_AND_ASSIGN(NodeOriginsWrapper);
745 : };
746 :
747 42905606 : void AddReducer(PipelineData* data, GraphReducer* graph_reducer,
748 : Reducer* reducer) {
749 20970536 : if (data->info()->is_source_positions_enabled()) {
750 482224 : void* const buffer = data->graph_zone()->New(sizeof(SourcePositionWrapper));
751 : SourcePositionWrapper* const wrapper =
752 482224 : new (buffer) SourcePositionWrapper(reducer, data->source_positions());
753 : reducer = wrapper;
754 : }
755 20970536 : if (data->info()->trace_turbo_json_enabled()) {
756 43 : void* const buffer = data->graph_zone()->New(sizeof(NodeOriginsWrapper));
757 : NodeOriginsWrapper* const wrapper =
758 43 : new (buffer) NodeOriginsWrapper(reducer, data->node_origins());
759 : reducer = wrapper;
760 : }
761 :
762 20970536 : graph_reducer->AddReducer(reducer);
763 20970579 : }
764 :
765 139964917 : class PipelineRunScope {
766 : public:
767 279852003 : PipelineRunScope(PipelineData* data, const char* phase_name)
768 : : phase_scope_(
769 : phase_name == nullptr ? nullptr : data->pipeline_statistics(),
770 : phase_name),
771 : zone_scope_(data->zone_stats(), ZONE_NAME),
772 139959345 : origin_scope_(data->node_origins(), phase_name) {}
773 :
774 71067342 : Zone* zone() { return zone_scope_.zone(); }
775 :
776 : private:
777 : PhaseScope phase_scope_;
778 : ZoneStats::Scope zone_scope_;
779 : NodeOriginTable::PhaseScope origin_scope_;
780 : };
781 :
782 474488 : PipelineStatistics* CreatePipelineStatistics(Handle<Script> script,
783 : OptimizedCompilationInfo* info,
784 : Isolate* isolate,
785 : ZoneStats* zone_stats) {
786 : PipelineStatistics* pipeline_statistics = nullptr;
787 :
788 474488 : if (FLAG_turbo_stats || FLAG_turbo_stats_nvp) {
789 : pipeline_statistics =
790 4 : new PipelineStatistics(info, isolate->GetTurboStatistics(), zone_stats);
791 0 : pipeline_statistics->BeginPhaseKind("initializing");
792 : }
793 :
794 474484 : if (info->trace_turbo_json_enabled()) {
795 1 : TurboJsonFile json_of(info, std::ios_base::trunc);
796 1 : json_of << "{\"function\" : ";
797 : JsonPrintFunctionSource(json_of, -1, info->GetDebugName(), script, isolate,
798 2 : info->shared_info());
799 1 : json_of << ",\n\"phases\":[";
800 : }
801 :
802 474484 : return pipeline_statistics;
803 : }
804 :
805 1059925 : PipelineStatistics* CreatePipelineStatistics(
806 : wasm::WasmEngine* wasm_engine, wasm::FunctionBody function_body,
807 : const wasm::WasmModule* wasm_module, OptimizedCompilationInfo* info,
808 : ZoneStats* zone_stats) {
809 : PipelineStatistics* pipeline_statistics = nullptr;
810 :
811 1059925 : if (FLAG_turbo_stats_wasm) {
812 : pipeline_statistics = new PipelineStatistics(
813 0 : info, wasm_engine->GetOrCreateTurboStatistics(), zone_stats);
814 0 : pipeline_statistics->BeginPhaseKind("initializing");
815 : }
816 :
817 1059954 : if (info->trace_turbo_json_enabled()) {
818 0 : TurboJsonFile json_of(info, std::ios_base::trunc);
819 0 : std::unique_ptr<char[]> function_name = info->GetDebugName();
820 0 : json_of << "{\"function\":\"" << function_name.get() << "\", \"source\":\"";
821 0 : AccountingAllocator allocator;
822 0 : std::ostringstream disassembly;
823 : std::vector<int> source_positions;
824 : wasm::PrintRawWasmCode(&allocator, function_body, wasm_module,
825 0 : wasm::kPrintLocals, disassembly, &source_positions);
826 0 : for (const auto& c : disassembly.str()) {
827 0 : json_of << AsEscapedUC16ForJSON(c);
828 : }
829 0 : json_of << "\",\n\"sourceLineToBytecodePosition\" : [";
830 : bool insert_comma = false;
831 0 : for (auto val : source_positions) {
832 0 : if (insert_comma) {
833 0 : json_of << ", ";
834 : }
835 0 : json_of << val;
836 : insert_comma = true;
837 : }
838 0 : json_of << "],\n\"phases\":[";
839 : }
840 :
841 1059954 : return pipeline_statistics;
842 : }
843 :
844 : } // namespace
845 :
846 1881480 : class PipelineCompilationJob final : public OptimizedCompilationJob {
847 : public:
848 470365 : PipelineCompilationJob(Isolate* isolate,
849 : Handle<SharedFunctionInfo> shared_info,
850 : Handle<JSFunction> function)
851 : // Note that the OptimizedCompilationInfo is not initialized at the time
852 : // we pass it to the CompilationJob constructor, but it is not
853 : // dereferenced there.
854 : : OptimizedCompilationJob(
855 : function->GetIsolate()->stack_guard()->real_climit(),
856 : &compilation_info_, "TurboFan"),
857 : zone_(function->GetIsolate()->allocator(), ZONE_NAME),
858 : zone_stats_(function->GetIsolate()->allocator()),
859 : compilation_info_(&zone_, function->GetIsolate(), shared_info,
860 : function),
861 : pipeline_statistics_(CreatePipelineStatistics(
862 : handle(Script::cast(shared_info->script()), isolate),
863 : compilation_info(), function->GetIsolate(), &zone_stats_)),
864 : data_(&zone_stats_, function->GetIsolate(), compilation_info(),
865 : pipeline_statistics_.get()),
866 : pipeline_(&data_),
867 5644408 : linkage_(nullptr) {}
868 :
869 : protected:
870 : Status PrepareJobImpl(Isolate* isolate) final;
871 : Status ExecuteJobImpl() final;
872 : Status FinalizeJobImpl(Isolate* isolate) final;
873 :
874 : // Registers weak object to optimized code dependencies.
875 : void RegisterWeakObjectsInOptimizedCode(Handle<Code> code, Isolate* isolate);
876 :
877 : private:
878 : Zone zone_;
879 : ZoneStats zone_stats_;
880 : OptimizedCompilationInfo compilation_info_;
881 : std::unique_ptr<PipelineStatistics> pipeline_statistics_;
882 : PipelineData data_;
883 : PipelineImpl pipeline_;
884 : Linkage* linkage_;
885 :
886 : DISALLOW_COPY_AND_ASSIGN(PipelineCompilationJob);
887 : };
888 :
889 452032 : PipelineCompilationJob::Status PipelineCompilationJob::PrepareJobImpl(
890 : Isolate* isolate) {
891 5960327 : if (compilation_info()->bytecode_array()->length() >
892 : kMaxBytecodeSizeForTurbofan) {
893 5 : return AbortOptimization(BailoutReason::kFunctionTooBig);
894 : }
895 :
896 452029 : if (!FLAG_always_opt) {
897 : compilation_info()->MarkAsBailoutOnUninitialized();
898 : }
899 452029 : if (FLAG_turbo_loop_peeling) {
900 : compilation_info()->MarkAsLoopPeelingEnabled();
901 : }
902 452029 : if (FLAG_turbo_inlining) {
903 : compilation_info()->MarkAsInliningEnabled();
904 : }
905 452029 : if (FLAG_inline_accessors) {
906 : compilation_info()->MarkAsAccessorInliningEnabled();
907 : }
908 :
909 : // This is the bottleneck for computing and setting poisoning level in the
910 : // optimizing compiler.
911 : PoisoningMitigationLevel load_poisoning =
912 : PoisoningMitigationLevel::kDontPoison;
913 452029 : if (FLAG_untrusted_code_mitigations) {
914 : // For full mitigations, this can be changed to
915 : // PoisoningMitigationLevel::kPoisonAll.
916 : load_poisoning = PoisoningMitigationLevel::kPoisonCriticalOnly;
917 : }
918 : compilation_info()->SetPoisoningMitigationLevel(load_poisoning);
919 :
920 452029 : if (FLAG_turbo_allocation_folding) {
921 : compilation_info()->MarkAsAllocationFoldingEnabled();
922 : }
923 :
924 904055 : if (compilation_info()->closure()->raw_feedback_cell()->map() ==
925 : ReadOnlyRoots(isolate).one_closure_cell_map()) {
926 : compilation_info()->MarkAsFunctionContextSpecializing();
927 : }
928 :
929 : data_.set_start_source_position(
930 904056 : compilation_info()->shared_info()->StartPosition());
931 :
932 452025 : linkage_ = new (compilation_info()->zone()) Linkage(
933 904055 : Linkage::ComputeIncoming(compilation_info()->zone(), compilation_info()));
934 :
935 452028 : if (!pipeline_.CreateGraph()) {
936 0 : if (isolate->has_pending_exception()) return FAILED; // Stack overflowed.
937 0 : return AbortOptimization(BailoutReason::kGraphBuildingFailed);
938 : }
939 :
940 452025 : if (compilation_info()->is_osr()) data_.InitializeOsrHelper();
941 :
942 : // Make sure that we have generated the deopt entries code. This is in order
943 : // to avoid triggering the generation of deopt entries later during code
944 : // assembly.
945 452025 : Deoptimizer::EnsureCodeForDeoptimizationEntries(isolate);
946 :
947 452029 : return SUCCEEDED;
948 : }
949 :
950 451985 : PipelineCompilationJob::Status PipelineCompilationJob::ExecuteJobImpl() {
951 451985 : if (!pipeline_.OptimizeGraph(linkage_)) return FAILED;
952 451976 : pipeline_.AssembleCode(linkage_);
953 451977 : return SUCCEEDED;
954 : }
955 :
956 451853 : PipelineCompilationJob::Status PipelineCompilationJob::FinalizeJobImpl(
957 : Isolate* isolate) {
958 451853 : MaybeHandle<Code> maybe_code = pipeline_.FinalizeCode();
959 : Handle<Code> code;
960 451874 : if (!maybe_code.ToHandle(&code)) {
961 903627 : if (compilation_info()->bailout_reason() == BailoutReason::kNoReason) {
962 9 : return AbortOptimization(BailoutReason::kCodeGenerationFailed);
963 : }
964 : return FAILED;
965 : }
966 451865 : if (!pipeline_.CommitDependencies(code)) {
967 55 : return RetryOptimization(BailoutReason::kBailedOutDueToDependencyChange);
968 : }
969 :
970 : compilation_info()->SetCode(code);
971 451809 : compilation_info()->native_context()->AddOptimizedCode(*code);
972 451810 : RegisterWeakObjectsInOptimizedCode(code, isolate);
973 451810 : return SUCCEEDED;
974 : }
975 :
976 451810 : void PipelineCompilationJob::RegisterWeakObjectsInOptimizedCode(
977 : Handle<Code> code, Isolate* isolate) {
978 : DCHECK(code->is_optimized_code());
979 : std::vector<Handle<Map>> maps;
980 : {
981 : DisallowHeapAllocation no_gc;
982 : int const mode_mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT);
983 4997589 : for (RelocIterator it(*code, mode_mask); !it.done(); it.next()) {
984 4545777 : RelocInfo::Mode mode = it.rinfo()->rmode();
985 9091556 : if (mode == RelocInfo::EMBEDDED_OBJECT &&
986 9091556 : code->IsWeakObjectInOptimizedCode(it.rinfo()->target_object())) {
987 2531466 : Handle<HeapObject> object(HeapObject::cast(it.rinfo()->target_object()),
988 : isolate);
989 5062933 : if (object->IsMap()) {
990 139575 : maps.push_back(Handle<Map>::cast(object));
991 : }
992 : }
993 : }
994 : }
995 1043195 : for (Handle<Map> map : maps) {
996 139575 : isolate->heap()->AddRetainedMap(map);
997 : }
998 451810 : code->set_can_have_weak_objects(true);
999 451810 : }
1000 :
1001 : template <typename Phase, typename... Args>
1002 68925981 : void PipelineImpl::Run(Args&&... args) {
1003 68925981 : PipelineRunScope scope(this->data_, Phase::phase_name());
1004 : Phase phase;
1005 89490647 : phase.Run(this->data_, scope.zone(), std::forward<Args>(args)...);
1006 68929808 : }
1007 :
1008 : struct GraphBuilderPhase {
1009 : static const char* phase_name() { return "bytecode graph builder"; }
1010 :
1011 1368455 : void Run(PipelineData* data, Zone* temp_zone) {
1012 : JSTypeHintLowering::Flags flags = JSTypeHintLowering::kNoFlags;
1013 456150 : if (data->info()->is_bailout_on_uninitialized()) {
1014 : flags |= JSTypeHintLowering::kBailoutOnUninitialized;
1015 : }
1016 : CallFrequency frequency = CallFrequency(1.0f);
1017 : BytecodeGraphBuilder graph_builder(
1018 : temp_zone, data->info()->bytecode_array(), data->info()->shared_info(),
1019 : handle(data->info()->closure()->feedback_vector(), data->isolate()),
1020 : data->info()->osr_offset(), data->jsgraph(), frequency,
1021 : data->source_positions(), data->native_context(),
1022 : SourcePosition::kNotInlined, flags, true,
1023 1368455 : data->info()->is_analyze_environment_liveness());
1024 456154 : graph_builder.CreateGraph();
1025 456141 : }
1026 : };
1027 :
1028 : namespace {
1029 :
1030 27289 : Maybe<OuterContext> GetModuleContext(Handle<JSFunction> closure) {
1031 27289 : Context current = closure->context();
1032 : size_t distance = 0;
1033 80036 : while (!current->IsNativeContext()) {
1034 25463 : if (current->IsModuleContext()) {
1035 : return Just(
1036 : OuterContext(handle(current, current->GetIsolate()), distance));
1037 : }
1038 25458 : current = current->previous();
1039 25458 : distance++;
1040 : }
1041 : return Nothing<OuterContext>();
1042 : }
1043 :
1044 456144 : Maybe<OuterContext> ChooseSpecializationContext(
1045 : Isolate* isolate, OptimizedCompilationInfo* info) {
1046 456144 : if (info->is_function_context_specializing()) {
1047 : DCHECK(info->has_context());
1048 428855 : return Just(OuterContext(handle(info->context(), isolate), 0));
1049 : }
1050 27289 : return GetModuleContext(info->closure());
1051 : }
1052 :
1053 : } // anonymous namespace
1054 :
1055 : struct InliningPhase {
1056 : static const char* phase_name() { return "inlining"; }
1057 :
1058 5017613 : void Run(PipelineData* data, Zone* temp_zone) {
1059 : Isolate* isolate = data->isolate();
1060 456153 : OptimizedCompilationInfo* info = data->info();
1061 : GraphReducer graph_reducer(temp_zone, data->graph(),
1062 912272 : data->jsgraph()->Dead());
1063 : DeadCodeElimination dead_code_elimination(&graph_reducer, data->graph(),
1064 456135 : data->common(), temp_zone);
1065 456153 : CheckpointElimination checkpoint_elimination(&graph_reducer);
1066 : CommonOperatorReducer common_reducer(&graph_reducer, data->graph(),
1067 : data->broker(), data->common(),
1068 456141 : data->machine(), temp_zone);
1069 : JSCallReducer call_reducer(&graph_reducer, data->jsgraph(), data->broker(),
1070 : data->info()->is_bailout_on_uninitialized()
1071 : ? JSCallReducer::kBailoutOnUninitialized
1072 : : JSCallReducer::kNoFlags,
1073 456152 : data->dependencies());
1074 : JSContextSpecialization context_specialization(
1075 : &graph_reducer, data->jsgraph(), data->broker(),
1076 : ChooseSpecializationContext(isolate, data->info()),
1077 : data->info()->is_function_context_specializing()
1078 : ? data->info()->closure()
1079 456152 : : MaybeHandle<JSFunction>());
1080 : JSNativeContextSpecialization::Flags flags =
1081 : JSNativeContextSpecialization::kNoFlags;
1082 456153 : if (data->info()->is_accessor_inlining_enabled()) {
1083 : flags |= JSNativeContextSpecialization::kAccessorInliningEnabled;
1084 : }
1085 456153 : if (data->info()->is_bailout_on_uninitialized()) {
1086 : flags |= JSNativeContextSpecialization::kBailoutOnUninitialized;
1087 : }
1088 : // Passing the OptimizedCompilationInfo's shared zone here as
1089 : // JSNativeContextSpecialization allocates out-of-heap objects
1090 : // that need to live until code generation.
1091 : JSNativeContextSpecialization native_context_specialization(
1092 : &graph_reducer, data->jsgraph(), data->broker(), flags,
1093 912307 : data->native_context(), data->dependencies(), temp_zone, info->zone());
1094 : JSInliningHeuristic inlining(&graph_reducer,
1095 : data->info()->is_inlining_enabled()
1096 : ? JSInliningHeuristic::kGeneralInlining
1097 : : JSInliningHeuristic::kRestrictedInlining,
1098 : temp_zone, data->info(), data->jsgraph(),
1099 912288 : data->broker(), data->source_positions());
1100 456148 : JSIntrinsicLowering intrinsic_lowering(&graph_reducer, data->jsgraph());
1101 456143 : AddReducer(data, &graph_reducer, &dead_code_elimination);
1102 456140 : AddReducer(data, &graph_reducer, &checkpoint_elimination);
1103 456148 : AddReducer(data, &graph_reducer, &common_reducer);
1104 456153 : AddReducer(data, &graph_reducer, &native_context_specialization);
1105 456153 : AddReducer(data, &graph_reducer, &context_specialization);
1106 456154 : AddReducer(data, &graph_reducer, &intrinsic_lowering);
1107 456149 : AddReducer(data, &graph_reducer, &call_reducer);
1108 456151 : AddReducer(data, &graph_reducer, &inlining);
1109 912292 : graph_reducer.ReduceGraph();
1110 456142 : }
1111 : };
1112 :
1113 :
1114 : struct TyperPhase {
1115 : static const char* phase_name() { return "typer"; }
1116 :
1117 2280582 : void Run(PipelineData* data, Zone* temp_zone, Typer* typer) {
1118 : NodeVector roots(temp_zone);
1119 456117 : data->jsgraph()->GetCachedNodes(&roots);
1120 :
1121 : // Make sure we always type True and False. Needed for escape analysis.
1122 912231 : roots.push_back(data->jsgraph()->TrueConstant());
1123 912231 : roots.push_back(data->jsgraph()->FalseConstant());
1124 :
1125 : LoopVariableOptimizer induction_vars(data->jsgraph()->graph(),
1126 456117 : data->common(), temp_zone);
1127 456115 : if (FLAG_turbo_loop_variable) induction_vars.Run();
1128 456096 : typer->Run(roots, &induction_vars);
1129 456114 : }
1130 : };
1131 :
1132 : struct UntyperPhase {
1133 : static const char* phase_name() { return "untyper"; }
1134 :
1135 : void Run(PipelineData* data, Zone* temp_zone) {
1136 0 : class RemoveTypeReducer final : public Reducer {
1137 : public:
1138 0 : const char* reducer_name() const override { return "RemoveTypeReducer"; }
1139 0 : Reduction Reduce(Node* node) final {
1140 0 : if (NodeProperties::IsTyped(node)) {
1141 : NodeProperties::RemoveType(node);
1142 : return Changed(node);
1143 : }
1144 : return NoChange();
1145 : }
1146 : };
1147 :
1148 : NodeVector roots(temp_zone);
1149 : data->jsgraph()->GetCachedNodes(&roots);
1150 : for (Node* node : roots) {
1151 : NodeProperties::RemoveType(node);
1152 : }
1153 :
1154 : GraphReducer graph_reducer(temp_zone, data->graph(),
1155 : data->jsgraph()->Dead());
1156 : RemoveTypeReducer remove_type_reducer;
1157 : AddReducer(data, &graph_reducer, &remove_type_reducer);
1158 : graph_reducer.ReduceGraph();
1159 : }
1160 : };
1161 :
1162 : struct SerializeStandardObjectsPhase {
1163 : static const char* phase_name() { return "serialize standard objects"; }
1164 :
1165 456154 : void Run(PipelineData* data, Zone* temp_zone) {
1166 456154 : data->broker()->SerializeStandardObjects();
1167 : }
1168 : };
1169 :
1170 : struct CopyMetadataForConcurrentCompilePhase {
1171 : static const char* phase_name() { return "serialize metadata"; }
1172 :
1173 1824606 : void Run(PipelineData* data, Zone* temp_zone) {
1174 : GraphReducer graph_reducer(temp_zone, data->graph(),
1175 912305 : data->jsgraph()->Dead());
1176 456154 : JSHeapCopyReducer heap_copy_reducer(data->broker());
1177 456153 : AddReducer(data, &graph_reducer, &heap_copy_reducer);
1178 456152 : graph_reducer.ReduceGraph();
1179 :
1180 : // Some nodes that are no longer in the graph might still be in the cache.
1181 : NodeVector cached_nodes(temp_zone);
1182 456147 : data->jsgraph()->GetCachedNodes(&cached_nodes);
1183 6975120 : for (Node* const node : cached_nodes) graph_reducer.ReduceNode(node);
1184 456144 : }
1185 : };
1186 :
1187 : // TODO(turbofan): Move all calls from CopyMetaDataForConcurrentCompilePhase
1188 : // here. Also all the calls to Serialize* methods that are currently sprinkled
1189 : // over inlining will move here as well.
1190 : struct SerializationPhase {
1191 : static const char* phase_name() { return "serialize bytecode"; }
1192 :
1193 0 : void Run(PipelineData* data, Zone* temp_zone) {
1194 : SerializerForBackgroundCompilation serializer(data->broker(), temp_zone,
1195 0 : data->info()->closure());
1196 0 : serializer.Run();
1197 0 : }
1198 : };
1199 :
1200 : struct TypedLoweringPhase {
1201 : static const char* phase_name() { return "typed lowering"; }
1202 :
1203 3648861 : void Run(PipelineData* data, Zone* temp_zone) {
1204 : GraphReducer graph_reducer(temp_zone, data->graph(),
1205 912216 : data->jsgraph()->Dead());
1206 : DeadCodeElimination dead_code_elimination(&graph_reducer, data->graph(),
1207 456117 : data->common(), temp_zone);
1208 : JSCreateLowering create_lowering(&graph_reducer, data->dependencies(),
1209 : data->jsgraph(), data->broker(),
1210 : temp_zone);
1211 : JSTypedLowering typed_lowering(&graph_reducer, data->jsgraph(),
1212 456114 : data->broker(), temp_zone);
1213 : ConstantFoldingReducer constant_folding_reducer(
1214 912186 : &graph_reducer, data->jsgraph(), data->broker());
1215 : TypedOptimization typed_optimization(&graph_reducer, data->dependencies(),
1216 912202 : data->jsgraph(), data->broker());
1217 : SimplifiedOperatorReducer simple_reducer(&graph_reducer, data->jsgraph(),
1218 912231 : data->broker());
1219 456087 : CheckpointElimination checkpoint_elimination(&graph_reducer);
1220 : CommonOperatorReducer common_reducer(&graph_reducer, data->graph(),
1221 : data->broker(), data->common(),
1222 456104 : data->machine(), temp_zone);
1223 456117 : AddReducer(data, &graph_reducer, &dead_code_elimination);
1224 456117 : AddReducer(data, &graph_reducer, &create_lowering);
1225 456117 : AddReducer(data, &graph_reducer, &constant_folding_reducer);
1226 456115 : AddReducer(data, &graph_reducer, &typed_lowering);
1227 456117 : AddReducer(data, &graph_reducer, &typed_optimization);
1228 456117 : AddReducer(data, &graph_reducer, &simple_reducer);
1229 456116 : AddReducer(data, &graph_reducer, &checkpoint_elimination);
1230 456113 : AddReducer(data, &graph_reducer, &common_reducer);
1231 912211 : graph_reducer.ReduceGraph();
1232 456116 : }
1233 : };
1234 :
1235 :
1236 : struct EscapeAnalysisPhase {
1237 : static const char* phase_name() { return "escape analysis"; }
1238 :
1239 1824411 : void Run(PipelineData* data, Zone* temp_zone) {
1240 456083 : EscapeAnalysis escape_analysis(data->jsgraph(), temp_zone);
1241 : escape_analysis.ReduceGraph();
1242 1368334 : GraphReducer reducer(temp_zone, data->graph(), data->jsgraph()->Dead());
1243 : EscapeAnalysisReducer escape_reducer(&reducer, data->jsgraph(),
1244 : escape_analysis.analysis_result(),
1245 1368322 : temp_zone);
1246 456101 : AddReducer(data, &reducer, &escape_reducer);
1247 456117 : reducer.ReduceGraph();
1248 : // TODO(tebbi): Turn this into a debug mode check once we have confidence.
1249 456095 : escape_reducer.VerifyReplacement();
1250 456116 : }
1251 : };
1252 :
1253 : struct SimplifiedLoweringPhase {
1254 : static const char* phase_name() { return "simplified lowering"; }
1255 :
1256 912234 : void Run(PipelineData* data, Zone* temp_zone) {
1257 : SimplifiedLowering lowering(data->jsgraph(), data->broker(), temp_zone,
1258 : data->source_positions(), data->node_origins(),
1259 912234 : data->info()->GetPoisoningMitigationLevel());
1260 456106 : lowering.LowerAllNodes();
1261 456111 : }
1262 : };
1263 :
1264 : struct LoopPeelingPhase {
1265 : static const char* phase_name() { return "loop peeling"; }
1266 :
1267 1807851 : void Run(PipelineData* data, Zone* temp_zone) {
1268 451968 : GraphTrimmer trimmer(temp_zone, data->graph());
1269 : NodeVector roots(temp_zone);
1270 451964 : data->jsgraph()->GetCachedNodes(&roots);
1271 451965 : trimmer.TrimGraph(roots.begin(), roots.end());
1272 :
1273 : LoopTree* loop_tree =
1274 451968 : LoopFinder::BuildLoopTree(data->jsgraph()->graph(), temp_zone);
1275 : LoopPeeler(data->graph(), data->common(), loop_tree, temp_zone,
1276 : data->source_positions(), data->node_origins())
1277 451951 : .PeelInnerLoopsOfTree();
1278 451967 : }
1279 : };
1280 :
1281 : struct LoopExitEliminationPhase {
1282 : static const char* phase_name() { return "loop exit elimination"; }
1283 :
1284 4149 : void Run(PipelineData* data, Zone* temp_zone) {
1285 4149 : LoopPeeler::EliminateLoopExits(data->graph(), temp_zone);
1286 : }
1287 : };
1288 :
1289 : struct GenericLoweringPhase {
1290 : static const char* phase_name() { return "generic lowering"; }
1291 :
1292 1368342 : void Run(PipelineData* data, Zone* temp_zone) {
1293 : GraphReducer graph_reducer(temp_zone, data->graph(),
1294 912225 : data->jsgraph()->Dead());
1295 912227 : JSGenericLowering generic_lowering(data->jsgraph());
1296 456112 : AddReducer(data, &graph_reducer, &generic_lowering);
1297 912194 : graph_reducer.ReduceGraph();
1298 456116 : }
1299 : };
1300 :
1301 : struct EarlyOptimizationPhase {
1302 : static const char* phase_name() { return "early optimization"; }
1303 :
1304 3192754 : void Run(PipelineData* data, Zone* temp_zone) {
1305 : GraphReducer graph_reducer(temp_zone, data->graph(),
1306 912191 : data->jsgraph()->Dead());
1307 : DeadCodeElimination dead_code_elimination(&graph_reducer, data->graph(),
1308 456117 : data->common(), temp_zone);
1309 : SimplifiedOperatorReducer simple_reducer(&graph_reducer, data->jsgraph(),
1310 912228 : data->broker());
1311 912226 : RedundancyElimination redundancy_elimination(&graph_reducer, temp_zone);
1312 912207 : ValueNumberingReducer value_numbering(temp_zone, data->graph()->zone());
1313 912225 : MachineOperatorReducer machine_reducer(&graph_reducer, data->jsgraph());
1314 : CommonOperatorReducer common_reducer(&graph_reducer, data->graph(),
1315 : data->broker(), data->common(),
1316 456102 : data->machine(), temp_zone);
1317 456112 : AddReducer(data, &graph_reducer, &dead_code_elimination);
1318 456116 : AddReducer(data, &graph_reducer, &simple_reducer);
1319 456116 : AddReducer(data, &graph_reducer, &redundancy_elimination);
1320 456116 : AddReducer(data, &graph_reducer, &machine_reducer);
1321 456109 : AddReducer(data, &graph_reducer, &common_reducer);
1322 456117 : AddReducer(data, &graph_reducer, &value_numbering);
1323 912226 : graph_reducer.ReduceGraph();
1324 456117 : }
1325 : };
1326 :
1327 : struct ControlFlowOptimizationPhase {
1328 : static const char* phase_name() { return "control flow optimization"; }
1329 :
1330 456110 : void Run(PipelineData* data, Zone* temp_zone) {
1331 : ControlFlowOptimizer optimizer(data->graph(), data->common(),
1332 456110 : data->machine(), temp_zone);
1333 456117 : optimizer.Optimize();
1334 456115 : }
1335 : };
1336 :
1337 : struct EffectControlLinearizationPhase {
1338 : static const char* phase_name() { return "effect linearization"; }
1339 :
1340 4561085 : void Run(PipelineData* data, Zone* temp_zone) {
1341 : {
1342 : // The scheduler requires the graphs to be trimmed, so trim now.
1343 : // TODO(jarin) Remove the trimming once the scheduler can handle untrimmed
1344 : // graphs.
1345 456103 : GraphTrimmer trimmer(temp_zone, data->graph());
1346 : NodeVector roots(temp_zone);
1347 456117 : data->jsgraph()->GetCachedNodes(&roots);
1348 456112 : trimmer.TrimGraph(roots.begin(), roots.end());
1349 :
1350 : // Schedule the graph without node splitting so that we can
1351 : // fix the effect and control flow for nodes with low-level side
1352 : // effects (such as changing representation to tagged or
1353 : // 'floating' allocation regions.)
1354 : Schedule* schedule = Scheduler::ComputeSchedule(temp_zone, data->graph(),
1355 456117 : Scheduler::kTempSchedule);
1356 456115 : if (FLAG_turbo_verify) ScheduleVerifier::Run(schedule);
1357 : TraceSchedule(data->info(), data, schedule,
1358 456115 : "effect linearization schedule");
1359 :
1360 : EffectControlLinearizer::MaskArrayIndexEnable mask_array_index =
1361 456097 : (data->info()->GetPoisoningMitigationLevel() !=
1362 : PoisoningMitigationLevel::kDontPoison)
1363 : ? EffectControlLinearizer::kMaskArrayIndex
1364 456097 : : EffectControlLinearizer::kDoNotMaskArrayIndex;
1365 : // Post-pass for wiring the control/effects
1366 : // - connect allocating representation changes into the control&effect
1367 : // chains and lower them,
1368 : // - get rid of the region markers,
1369 : // - introduce effect phis and rewire effects to get SSA again.
1370 : EffectControlLinearizer linearizer(
1371 : data->jsgraph(), schedule, temp_zone, data->source_positions(),
1372 456097 : data->node_origins(), mask_array_index);
1373 456082 : linearizer.Run();
1374 : }
1375 : {
1376 : // The {EffectControlLinearizer} might leave {Dead} nodes behind, so we
1377 : // run {DeadCodeElimination} to prune these parts of the graph.
1378 : // Also, the following store-store elimination phase greatly benefits from
1379 : // doing a common operator reducer and dead code elimination just before
1380 : // it, to eliminate conditional deopts with a constant condition.
1381 : GraphReducer graph_reducer(temp_zone, data->graph(),
1382 912223 : data->jsgraph()->Dead());
1383 : DeadCodeElimination dead_code_elimination(&graph_reducer, data->graph(),
1384 456099 : data->common(), temp_zone);
1385 : CommonOperatorReducer common_reducer(&graph_reducer, data->graph(),
1386 : data->broker(), data->common(),
1387 456117 : data->machine(), temp_zone);
1388 456117 : AddReducer(data, &graph_reducer, &dead_code_elimination);
1389 456096 : AddReducer(data, &graph_reducer, &common_reducer);
1390 912226 : graph_reducer.ReduceGraph();
1391 : }
1392 456117 : }
1393 : };
1394 :
1395 : struct StoreStoreEliminationPhase {
1396 : static const char* phase_name() { return "store-store elimination"; }
1397 :
1398 1368351 : void Run(PipelineData* data, Zone* temp_zone) {
1399 456117 : GraphTrimmer trimmer(temp_zone, data->graph());
1400 : NodeVector roots(temp_zone);
1401 456117 : data->jsgraph()->GetCachedNodes(&roots);
1402 456117 : trimmer.TrimGraph(roots.begin(), roots.end());
1403 :
1404 456117 : StoreStoreElimination::Run(data->jsgraph(), temp_zone);
1405 456113 : }
1406 : };
1407 :
1408 : struct LoadEliminationPhase {
1409 : static const char* phase_name() { return "load elimination"; }
1410 :
1411 4561008 : void Run(PipelineData* data, Zone* temp_zone) {
1412 : GraphReducer graph_reducer(temp_zone, data->graph(),
1413 912193 : data->jsgraph()->Dead());
1414 : BranchElimination branch_condition_elimination(&graph_reducer,
1415 912174 : data->jsgraph(), temp_zone);
1416 : DeadCodeElimination dead_code_elimination(&graph_reducer, data->graph(),
1417 456103 : data->common(), temp_zone);
1418 912189 : RedundancyElimination redundancy_elimination(&graph_reducer, temp_zone);
1419 : LoadElimination load_elimination(&graph_reducer, data->jsgraph(),
1420 : temp_zone);
1421 456097 : CheckpointElimination checkpoint_elimination(&graph_reducer);
1422 912202 : ValueNumberingReducer value_numbering(temp_zone, data->graph()->zone());
1423 : CommonOperatorReducer common_reducer(&graph_reducer, data->graph(),
1424 : data->broker(), data->common(),
1425 456093 : data->machine(), temp_zone);
1426 : TypedOptimization typed_optimization(&graph_reducer, data->dependencies(),
1427 912208 : data->jsgraph(), data->broker());
1428 : ConstantFoldingReducer constant_folding_reducer(
1429 912211 : &graph_reducer, data->jsgraph(), data->broker());
1430 : TypeNarrowingReducer type_narrowing_reducer(&graph_reducer, data->jsgraph(),
1431 912210 : data->broker());
1432 456100 : AddReducer(data, &graph_reducer, &branch_condition_elimination);
1433 456105 : AddReducer(data, &graph_reducer, &dead_code_elimination);
1434 456105 : AddReducer(data, &graph_reducer, &redundancy_elimination);
1435 456106 : AddReducer(data, &graph_reducer, &load_elimination);
1436 456102 : AddReducer(data, &graph_reducer, &type_narrowing_reducer);
1437 456103 : AddReducer(data, &graph_reducer, &constant_folding_reducer);
1438 456106 : AddReducer(data, &graph_reducer, &typed_optimization);
1439 456099 : AddReducer(data, &graph_reducer, &checkpoint_elimination);
1440 456100 : AddReducer(data, &graph_reducer, &common_reducer);
1441 456105 : AddReducer(data, &graph_reducer, &value_numbering);
1442 912180 : graph_reducer.ReduceGraph();
1443 456104 : }
1444 : };
1445 :
1446 : struct MemoryOptimizationPhase {
1447 : static const char* phase_name() { return "memory optimization"; }
1448 :
1449 2092127 : void Run(PipelineData* data, Zone* temp_zone) {
1450 : // The memory optimizer requires the graphs to be trimmed, so trim now.
1451 523031 : GraphTrimmer trimmer(temp_zone, data->graph());
1452 : NodeVector roots(temp_zone);
1453 523032 : data->jsgraph()->GetCachedNodes(&roots);
1454 523032 : trimmer.TrimGraph(roots.begin(), roots.end());
1455 :
1456 : // Optimize allocations and load/store operations.
1457 : MemoryOptimizer optimizer(
1458 : data->jsgraph(), temp_zone, data->info()->GetPoisoningMitigationLevel(),
1459 : data->info()->is_allocation_folding_enabled()
1460 : ? MemoryOptimizer::AllocationFolding::kDoAllocationFolding
1461 1046064 : : MemoryOptimizer::AllocationFolding::kDontAllocationFolding);
1462 1046034 : optimizer.Optimize();
1463 523025 : }
1464 : };
1465 :
1466 : struct LateOptimizationPhase {
1467 : static const char* phase_name() { return "late optimization"; }
1468 :
1469 3648906 : void Run(PipelineData* data, Zone* temp_zone) {
1470 : GraphReducer graph_reducer(temp_zone, data->graph(),
1471 912209 : data->jsgraph()->Dead());
1472 : BranchElimination branch_condition_elimination(&graph_reducer,
1473 912227 : data->jsgraph(), temp_zone);
1474 : DeadCodeElimination dead_code_elimination(&graph_reducer, data->graph(),
1475 456116 : data->common(), temp_zone);
1476 912225 : ValueNumberingReducer value_numbering(temp_zone, data->graph()->zone());
1477 912212 : MachineOperatorReducer machine_reducer(&graph_reducer, data->jsgraph());
1478 : CommonOperatorReducer common_reducer(&graph_reducer, data->graph(),
1479 : data->broker(), data->common(),
1480 456115 : data->machine(), temp_zone);
1481 : SelectLowering select_lowering(data->jsgraph()->graph(),
1482 912231 : data->jsgraph()->common());
1483 456087 : AddReducer(data, &graph_reducer, &branch_condition_elimination);
1484 456109 : AddReducer(data, &graph_reducer, &dead_code_elimination);
1485 456116 : AddReducer(data, &graph_reducer, &machine_reducer);
1486 456116 : AddReducer(data, &graph_reducer, &common_reducer);
1487 456112 : AddReducer(data, &graph_reducer, &select_lowering);
1488 456116 : AddReducer(data, &graph_reducer, &value_numbering);
1489 912220 : graph_reducer.ReduceGraph();
1490 456117 : }
1491 : };
1492 :
1493 : struct CsaOptimizationPhase {
1494 : static const char* phase_name() { return "csa optimization"; }
1495 :
1496 401490 : void Run(PipelineData* data, Zone* temp_zone) {
1497 : GraphReducer graph_reducer(temp_zone, data->graph(),
1498 133830 : data->jsgraph()->Dead());
1499 : BranchElimination branch_condition_elimination(&graph_reducer,
1500 133830 : data->jsgraph(), temp_zone);
1501 : DeadCodeElimination dead_code_elimination(&graph_reducer, data->graph(),
1502 66915 : data->common(), temp_zone);
1503 133830 : MachineOperatorReducer machine_reducer(&graph_reducer, data->jsgraph());
1504 : CommonOperatorReducer common_reducer(&graph_reducer, data->graph(),
1505 : data->broker(), data->common(),
1506 66915 : data->machine(), temp_zone);
1507 66915 : AddReducer(data, &graph_reducer, &branch_condition_elimination);
1508 66915 : AddReducer(data, &graph_reducer, &dead_code_elimination);
1509 66915 : AddReducer(data, &graph_reducer, &machine_reducer);
1510 66915 : AddReducer(data, &graph_reducer, &common_reducer);
1511 133830 : graph_reducer.ReduceGraph();
1512 66915 : }
1513 : };
1514 :
1515 : struct EarlyGraphTrimmingPhase {
1516 : static const char* phase_name() { return "early trimming"; }
1517 912295 : void Run(PipelineData* data, Zone* temp_zone) {
1518 456149 : GraphTrimmer trimmer(temp_zone, data->graph());
1519 : NodeVector roots(temp_zone);
1520 456146 : data->jsgraph()->GetCachedNodes(&roots);
1521 456141 : trimmer.TrimGraph(roots.begin(), roots.end());
1522 456146 : }
1523 : };
1524 :
1525 :
1526 : struct LateGraphTrimmingPhase {
1527 : static const char* phase_name() { return "late graph trimming"; }
1528 5334664 : void Run(PipelineData* data, Zone* temp_zone) {
1529 2667188 : GraphTrimmer trimmer(temp_zone, data->graph());
1530 : NodeVector roots(temp_zone);
1531 2667476 : if (data->jsgraph()) {
1532 1598643 : data->jsgraph()->GetCachedNodes(&roots);
1533 : }
1534 2667471 : trimmer.TrimGraph(roots.begin(), roots.end());
1535 2667337 : }
1536 : };
1537 :
1538 :
1539 : struct ComputeSchedulePhase {
1540 : static const char* phase_name() { return "scheduling"; }
1541 :
1542 5334518 : void Run(PipelineData* data, Zone* temp_zone) {
1543 : Schedule* schedule = Scheduler::ComputeSchedule(
1544 : temp_zone, data->graph(), data->info()->is_splitting_enabled()
1545 : ? Scheduler::kSplitNodes
1546 5334518 : : Scheduler::kNoFlags);
1547 2667311 : if (FLAG_turbo_verify) ScheduleVerifier::Run(schedule);
1548 : data->set_schedule(schedule);
1549 2667311 : }
1550 : };
1551 :
1552 : struct InstructionRangesAsJSON {
1553 : const InstructionSequence* sequence;
1554 : const ZoneVector<std::pair<int, int>>* instr_origins;
1555 : };
1556 :
1557 2 : std::ostream& operator<<(std::ostream& out, const InstructionRangesAsJSON& s) {
1558 4 : const int max = static_cast<int>(s.sequence->LastInstructionIndex());
1559 :
1560 2 : out << ", \"nodeIdToInstructionRange\": {";
1561 : bool need_comma = false;
1562 304 : for (size_t i = 0; i < s.instr_origins->size(); ++i) {
1563 302 : std::pair<int, int> offset = (*s.instr_origins)[i];
1564 150 : if (offset.first == -1) continue;
1565 125 : const int first = max - offset.first + 1;
1566 125 : const int second = max - offset.second + 1;
1567 125 : if (need_comma) out << ", ";
1568 250 : out << "\"" << i << "\": [" << first << ", " << second << "]";
1569 : need_comma = true;
1570 : }
1571 2 : out << "}";
1572 2 : out << ", \"blockIdtoInstructionRange\": {";
1573 : need_comma = false;
1574 44 : for (auto block : s.sequence->instruction_blocks()) {
1575 19 : if (need_comma) out << ", ";
1576 38 : out << "\"" << block->rpo_number() << "\": [" << block->code_start() << ", "
1577 19 : << block->code_end() << "]";
1578 : need_comma = true;
1579 : }
1580 2 : out << "}";
1581 2 : return out;
1582 : }
1583 :
1584 : struct InstructionSelectionPhase {
1585 : static const char* phase_name() { return "select instructions"; }
1586 :
1587 14747787 : void Run(PipelineData* data, Zone* temp_zone, Linkage* linkage) {
1588 : InstructionSelector selector(
1589 : temp_zone, data->graph()->NodeCount(), linkage, data->sequence(),
1590 : data->schedule(), data->source_positions(), data->frame(),
1591 : data->info()->switch_jump_table_enabled()
1592 : ? InstructionSelector::kEnableSwitchJumpTable
1593 : : InstructionSelector::kDisableSwitchJumpTable,
1594 : data->info()->is_source_positions_enabled()
1595 : ? InstructionSelector::kAllSourcePositions
1596 : : InstructionSelector::kCallSourcePositions,
1597 : InstructionSelector::SupportedFeatures(),
1598 : FLAG_turbo_instruction_scheduling
1599 : ? InstructionSelector::kEnableScheduling
1600 : : InstructionSelector::kDisableScheduling,
1601 1880854 : !data->isolate() || data->isolate()->serializer_enabled() ||
1602 : data->isolate()->ShouldLoadConstantsFromRootList()
1603 : ? InstructionSelector::kDisableRootsRelativeAddressing
1604 : : InstructionSelector::kEnableRootsRelativeAddressing,
1605 : data->info()->GetPoisoningMitigationLevel(),
1606 : data->info()->trace_turbo_json_enabled()
1607 : ? InstructionSelector::kEnableTraceTurboJson
1608 19457554 : : InstructionSelector::kDisableTraceTurboJson);
1609 2949635 : if (!selector.SelectInstructions()) {
1610 : data->set_compilation_failed();
1611 : }
1612 2949627 : if (data->info()->trace_turbo_json_enabled()) {
1613 2 : TurboJsonFile json_of(data->info(), std::ios_base::app);
1614 2 : json_of << "{\"name\":\"" << phase_name()
1615 2 : << "\",\"type\":\"instructions\""
1616 : << InstructionRangesAsJSON{data->sequence(),
1617 6 : &selector.instr_origins()}
1618 4 : << "},\n";
1619 : }
1620 2949627 : }
1621 : };
1622 :
1623 :
1624 : struct MeetRegisterConstraintsPhase {
1625 : static const char* phase_name() { return "meet register constraints"; }
1626 :
1627 2949608 : void Run(PipelineData* data, Zone* temp_zone) {
1628 2949608 : ConstraintBuilder builder(data->register_allocation_data());
1629 2949437 : builder.MeetRegisterConstraints();
1630 2949466 : }
1631 : };
1632 :
1633 :
1634 : struct ResolvePhisPhase {
1635 : static const char* phase_name() { return "resolve phis"; }
1636 :
1637 2949678 : void Run(PipelineData* data, Zone* temp_zone) {
1638 2949678 : ConstraintBuilder builder(data->register_allocation_data());
1639 2949649 : builder.ResolvePhis();
1640 2949522 : }
1641 : };
1642 :
1643 :
1644 : struct BuildLiveRangesPhase {
1645 : static const char* phase_name() { return "build live ranges"; }
1646 :
1647 2949490 : void Run(PipelineData* data, Zone* temp_zone) {
1648 2949490 : LiveRangeBuilder builder(data->register_allocation_data(), temp_zone);
1649 2949494 : builder.BuildLiveRanges();
1650 2949496 : }
1651 : };
1652 :
1653 : struct BuildBundlesPhase {
1654 : static const char* phase_name() { return "build live range bundles"; }
1655 :
1656 2949658 : void Run(PipelineData* data, Zone* temp_zone) {
1657 : BundleBuilder builder(data->register_allocation_data());
1658 2949658 : builder.BuildBundles();
1659 : }
1660 : };
1661 :
1662 : struct SplinterLiveRangesPhase {
1663 : static const char* phase_name() { return "splinter live ranges"; }
1664 :
1665 2949652 : void Run(PipelineData* data, Zone* temp_zone) {
1666 : LiveRangeSeparator live_range_splinterer(data->register_allocation_data(),
1667 : temp_zone);
1668 2949652 : live_range_splinterer.Splinter();
1669 : }
1670 : };
1671 :
1672 :
1673 : template <typename RegAllocator>
1674 : struct AllocateGeneralRegistersPhase {
1675 : static const char* phase_name() { return "allocate general registers"; }
1676 :
1677 2949547 : void Run(PipelineData* data, Zone* temp_zone) {
1678 : RegAllocator allocator(data->register_allocation_data(), GENERAL_REGISTERS,
1679 2949547 : temp_zone);
1680 2949712 : allocator.AllocateRegisters();
1681 2949506 : }
1682 : };
1683 :
1684 : template <typename RegAllocator>
1685 : struct AllocateFPRegistersPhase {
1686 : static const char* phase_name() { return "allocate f.p. registers"; }
1687 :
1688 208084 : void Run(PipelineData* data, Zone* temp_zone) {
1689 : RegAllocator allocator(data->register_allocation_data(), FP_REGISTERS,
1690 208084 : temp_zone);
1691 208107 : allocator.AllocateRegisters();
1692 208107 : }
1693 : };
1694 :
1695 :
1696 : struct MergeSplintersPhase {
1697 : static const char* phase_name() { return "merge splintered ranges"; }
1698 2949682 : void Run(PipelineData* pipeline_data, Zone* temp_zone) {
1699 : RegisterAllocationData* data = pipeline_data->register_allocation_data();
1700 : LiveRangeMerger live_range_merger(data, temp_zone);
1701 2949682 : live_range_merger.Merge();
1702 : }
1703 : };
1704 :
1705 :
1706 : struct LocateSpillSlotsPhase {
1707 : static const char* phase_name() { return "locate spill slots"; }
1708 :
1709 2949701 : void Run(PipelineData* data, Zone* temp_zone) {
1710 2949701 : SpillSlotLocator locator(data->register_allocation_data());
1711 2949675 : locator.LocateSpillSlots();
1712 2949663 : }
1713 : };
1714 :
1715 :
1716 : struct AssignSpillSlotsPhase {
1717 : static const char* phase_name() { return "assign spill slots"; }
1718 :
1719 2949520 : void Run(PipelineData* data, Zone* temp_zone) {
1720 2949520 : OperandAssigner assigner(data->register_allocation_data());
1721 2949507 : assigner.AssignSpillSlots();
1722 2949607 : }
1723 : };
1724 :
1725 :
1726 : struct CommitAssignmentPhase {
1727 : static const char* phase_name() { return "commit assignment"; }
1728 :
1729 2949688 : void Run(PipelineData* data, Zone* temp_zone) {
1730 2949688 : OperandAssigner assigner(data->register_allocation_data());
1731 2949683 : assigner.CommitAssignment();
1732 2949505 : }
1733 : };
1734 :
1735 :
1736 : struct PopulateReferenceMapsPhase {
1737 : static const char* phase_name() { return "populate pointer maps"; }
1738 :
1739 2949673 : void Run(PipelineData* data, Zone* temp_zone) {
1740 2949673 : ReferenceMapPopulator populator(data->register_allocation_data());
1741 2949666 : populator.PopulateReferenceMaps();
1742 2949491 : }
1743 : };
1744 :
1745 :
1746 : struct ConnectRangesPhase {
1747 : static const char* phase_name() { return "connect ranges"; }
1748 :
1749 2949690 : void Run(PipelineData* data, Zone* temp_zone) {
1750 2949690 : LiveRangeConnector connector(data->register_allocation_data());
1751 2949681 : connector.ConnectRanges(temp_zone);
1752 2949314 : }
1753 : };
1754 :
1755 :
1756 : struct ResolveControlFlowPhase {
1757 : static const char* phase_name() { return "resolve control flow"; }
1758 :
1759 2949661 : void Run(PipelineData* data, Zone* temp_zone) {
1760 2949661 : LiveRangeConnector connector(data->register_allocation_data());
1761 2949657 : connector.ResolveControlFlow(temp_zone);
1762 2949704 : }
1763 : };
1764 :
1765 :
1766 : struct OptimizeMovesPhase {
1767 : static const char* phase_name() { return "optimize moves"; }
1768 :
1769 2949665 : void Run(PipelineData* data, Zone* temp_zone) {
1770 2949665 : MoveOptimizer move_optimizer(temp_zone, data->sequence());
1771 2949470 : move_optimizer.Run();
1772 2949591 : }
1773 : };
1774 :
1775 :
1776 : struct FrameElisionPhase {
1777 : static const char* phase_name() { return "frame elision"; }
1778 :
1779 2949679 : void Run(PipelineData* data, Zone* temp_zone) {
1780 2949679 : FrameElider(data->sequence()).Run();
1781 2949370 : }
1782 : };
1783 :
1784 :
1785 : struct JumpThreadingPhase {
1786 : static const char* phase_name() { return "jump threading"; }
1787 :
1788 6522663 : void Run(PipelineData* data, Zone* temp_zone, bool frame_at_start) {
1789 : ZoneVector<RpoNumber> result(temp_zone);
1790 2949629 : if (JumpThreading::ComputeForwarding(temp_zone, result, data->sequence(),
1791 2949629 : frame_at_start)) {
1792 623405 : JumpThreading::ApplyForwarding(temp_zone, result, data->sequence());
1793 : }
1794 2949444 : }
1795 : };
1796 :
1797 : struct AssembleCodePhase {
1798 : static const char* phase_name() { return "assemble code"; }
1799 :
1800 2949564 : void Run(PipelineData* data, Zone* temp_zone) {
1801 2949564 : data->code_generator()->AssembleCode();
1802 : }
1803 : };
1804 :
1805 : struct FinalizeCodePhase {
1806 : static const char* phase_name() { return "finalize code"; }
1807 :
1808 1880741 : void Run(PipelineData* data, Zone* temp_zone) {
1809 1880741 : data->set_code(data->code_generator()->FinalizeCode());
1810 : }
1811 : };
1812 :
1813 :
1814 : struct PrintGraphPhase {
1815 : static const char* phase_name() { return nullptr; }
1816 :
1817 57 : void Run(PipelineData* data, Zone* temp_zone, const char* phase) {
1818 : OptimizedCompilationInfo* info = data->info();
1819 : Graph* graph = data->graph();
1820 :
1821 19 : if (info->trace_turbo_json_enabled()) { // Print JSON.
1822 : AllowHandleDereference allow_deref;
1823 :
1824 19 : TurboJsonFile json_of(info, std::ios_base::app);
1825 19 : json_of << "{\"name\":\"" << phase << "\",\"type\":\"graph\",\"data\":"
1826 38 : << AsJSON(*graph, data->source_positions(), data->node_origins())
1827 38 : << "},\n";
1828 : }
1829 :
1830 19 : if (info->trace_turbo_scheduled_enabled()) {
1831 0 : AccountingAllocator allocator;
1832 : Schedule* schedule = data->schedule();
1833 0 : if (schedule == nullptr) {
1834 : schedule = Scheduler::ComputeSchedule(temp_zone, data->graph(),
1835 0 : Scheduler::kNoFlags);
1836 : }
1837 :
1838 : AllowHandleDereference allow_deref;
1839 0 : CodeTracer::Scope tracing_scope(data->GetCodeTracer());
1840 0 : OFStream os(tracing_scope.file());
1841 0 : os << "-- Graph after " << phase << " -- " << std::endl;
1842 0 : os << AsScheduledGraph(schedule);
1843 19 : } else if (info->trace_turbo_graph_enabled()) { // Simple textual RPO.
1844 : AllowHandleDereference allow_deref;
1845 19 : CodeTracer::Scope tracing_scope(data->GetCodeTracer());
1846 38 : OFStream os(tracing_scope.file());
1847 19 : os << "-- Graph after " << phase << " -- " << std::endl;
1848 19 : os << AsRPO(*graph);
1849 : }
1850 19 : }
1851 : };
1852 :
1853 :
1854 : struct VerifyGraphPhase {
1855 : static const char* phase_name() { return nullptr; }
1856 :
1857 134374 : void Run(PipelineData* data, Zone* temp_zone, const bool untyped,
1858 : bool values_only = false) {
1859 : Verifier::CodeType code_type;
1860 67187 : switch (data->info()->code_kind()) {
1861 : case Code::WASM_FUNCTION:
1862 : case Code::WASM_TO_JS_FUNCTION:
1863 : case Code::JS_TO_WASM_FUNCTION:
1864 : case Code::WASM_INTERPRETER_ENTRY:
1865 : case Code::C_WASM_ENTRY:
1866 : code_type = Verifier::kWasm;
1867 : break;
1868 : default:
1869 : code_type = Verifier::kDefault;
1870 : }
1871 : Verifier::Run(data->graph(), !untyped ? Verifier::TYPED : Verifier::UNTYPED,
1872 : values_only ? Verifier::kValuesOnly : Verifier::kAll,
1873 134374 : code_type);
1874 67187 : }
1875 : };
1876 :
1877 13530253 : void PipelineImpl::RunPrintAndVerify(const char* phase, bool untyped) {
1878 27060651 : if (info()->trace_turbo_json_enabled() ||
1879 : info()->trace_turbo_graph_enabled()) {
1880 19 : Run<PrintGraphPhase>(phase);
1881 : }
1882 13530451 : if (FLAG_turbo_verify) {
1883 272 : Run<VerifyGraphPhase>(untyped);
1884 : }
1885 13530451 : }
1886 :
1887 456149 : bool PipelineImpl::CreateGraph() {
1888 2736905 : PipelineData* data = this->data_;
1889 :
1890 : data->BeginPhaseKind("graph creation");
1891 :
1892 912303 : if (info()->trace_turbo_json_enabled() ||
1893 : info()->trace_turbo_graph_enabled()) {
1894 1 : CodeTracer::Scope tracing_scope(data->GetCodeTracer());
1895 2 : OFStream os(tracing_scope.file());
1896 1 : os << "---------------------------------------------------\n"
1897 3 : << "Begin compiling method " << info()->GetDebugName().get()
1898 1 : << " using Turbofan" << std::endl;
1899 : }
1900 456151 : if (info()->trace_turbo_json_enabled()) {
1901 1 : TurboCfgFile tcf(isolate());
1902 1 : tcf << AsC1VCompilation(info());
1903 : }
1904 :
1905 456151 : data->source_positions()->AddDecorator();
1906 456152 : if (data->info()->trace_turbo_json_enabled()) {
1907 1 : data->node_origins()->AddDecorator();
1908 : }
1909 :
1910 456145 : if (FLAG_concurrent_inlining) {
1911 0 : data->broker()->StartSerializing();
1912 0 : Run<SerializeStandardObjectsPhase>();
1913 0 : Run<SerializationPhase>();
1914 : } else {
1915 456145 : data->broker()->SetNativeContextRef();
1916 : }
1917 :
1918 456150 : Run<GraphBuilderPhase>();
1919 456154 : RunPrintAndVerify(GraphBuilderPhase::phase_name(), true);
1920 :
1921 456148 : if (FLAG_concurrent_inlining) {
1922 0 : Run<CopyMetadataForConcurrentCompilePhase>();
1923 : }
1924 :
1925 : // Perform function context specialization and inlining (if enabled).
1926 456148 : Run<InliningPhase>();
1927 456149 : RunPrintAndVerify(InliningPhase::phase_name(), true);
1928 :
1929 : // Remove dead->live edges from the graph.
1930 456151 : Run<EarlyGraphTrimmingPhase>();
1931 456152 : RunPrintAndVerify(EarlyGraphTrimmingPhase::phase_name(), true);
1932 :
1933 : // Determine the Typer operation flags.
1934 : {
1935 1279104 : if (is_sloppy(info()->shared_info()->language_mode()) &&
1936 822953 : info()->shared_info()->IsUserJavaScript()) {
1937 : // Sloppy mode functions always have an Object for this.
1938 : data->AddTyperFlag(Typer::kThisIsReceiver);
1939 : }
1940 456153 : if (IsClassConstructor(info()->shared_info()->kind())) {
1941 : // Class constructors cannot be [[Call]]ed.
1942 : data->AddTyperFlag(Typer::kNewTargetIsReceiver);
1943 : }
1944 : }
1945 :
1946 : // Run the type-sensitive lowerings and optimizations on the graph.
1947 : {
1948 456153 : if (FLAG_concurrent_inlining) {
1949 : // TODO(neis): Remove CopyMetadataForConcurrentCompilePhase call once
1950 : // brokerization of JSNativeContextSpecialization is complete.
1951 0 : Run<CopyMetadataForConcurrentCompilePhase>();
1952 0 : data->broker()->StopSerializing();
1953 : } else {
1954 456153 : data->broker()->StartSerializing();
1955 456153 : Run<SerializeStandardObjectsPhase>();
1956 456152 : Run<CopyMetadataForConcurrentCompilePhase>();
1957 456153 : data->broker()->StopSerializing();
1958 : }
1959 : }
1960 :
1961 : data->EndPhaseKind();
1962 :
1963 456153 : return true;
1964 : }
1965 :
1966 456106 : bool PipelineImpl::OptimizeGraph(Linkage* linkage) {
1967 2280539 : PipelineData* data = this->data_;
1968 :
1969 : data->BeginPhaseKind("lowering");
1970 :
1971 : // Type the graph and keep the Typer running such that new nodes get
1972 : // automatically typed when they are created.
1973 456106 : Run<TyperPhase>(data->CreateTyper());
1974 456113 : RunPrintAndVerify(TyperPhase::phase_name());
1975 456114 : Run<TypedLoweringPhase>();
1976 456115 : RunPrintAndVerify(TypedLoweringPhase::phase_name());
1977 :
1978 456114 : if (data->info()->is_loop_peeling_enabled()) {
1979 451965 : Run<LoopPeelingPhase>();
1980 451968 : RunPrintAndVerify(LoopPeelingPhase::phase_name(), true);
1981 : } else {
1982 4149 : Run<LoopExitEliminationPhase>();
1983 4149 : RunPrintAndVerify(LoopExitEliminationPhase::phase_name(), true);
1984 : }
1985 :
1986 456094 : if (FLAG_turbo_load_elimination) {
1987 456083 : Run<LoadEliminationPhase>();
1988 456105 : RunPrintAndVerify(LoadEliminationPhase::phase_name());
1989 : }
1990 456115 : data->DeleteTyper();
1991 :
1992 456117 : if (FLAG_turbo_escape) {
1993 456117 : Run<EscapeAnalysisPhase>();
1994 456113 : if (data->compilation_failed()) {
1995 : info()->AbortOptimization(
1996 : BailoutReason::kCyclicObjectStateDetectedInEscapeAnalysis);
1997 : data->EndPhaseKind();
1998 : return false;
1999 : }
2000 456113 : RunPrintAndVerify(EscapeAnalysisPhase::phase_name());
2001 : }
2002 :
2003 : // Perform simplified lowering. This has to run w/o the Typer decorator,
2004 : // because we cannot compute meaningful types anyways, and the computed types
2005 : // might even conflict with the representation/truncation logic.
2006 456101 : Run<SimplifiedLoweringPhase>();
2007 456116 : RunPrintAndVerify(SimplifiedLoweringPhase::phase_name(), true);
2008 :
2009 : // From now on it is invalid to look at types on the nodes, because the types
2010 : // on the nodes might not make sense after representation selection due to the
2011 : // way we handle truncations; if we'd want to look at types afterwards we'd
2012 : // essentially need to re-type (large portions of) the graph.
2013 :
2014 : // In order to catch bugs related to type access after this point, we now
2015 : // remove the types from the nodes (currently only in Debug builds).
2016 : #ifdef DEBUG
2017 : Run<UntyperPhase>();
2018 : RunPrintAndVerify(UntyperPhase::phase_name(), true);
2019 : #endif
2020 :
2021 : // Run generic lowering pass.
2022 456116 : Run<GenericLoweringPhase>();
2023 456113 : RunPrintAndVerify(GenericLoweringPhase::phase_name(), true);
2024 :
2025 : data->BeginPhaseKind("block building");
2026 :
2027 : // Run early optimization pass.
2028 456116 : Run<EarlyOptimizationPhase>();
2029 456114 : RunPrintAndVerify(EarlyOptimizationPhase::phase_name(), true);
2030 :
2031 456116 : Run<EffectControlLinearizationPhase>();
2032 456115 : RunPrintAndVerify(EffectControlLinearizationPhase::phase_name(), true);
2033 :
2034 456116 : if (FLAG_turbo_store_elimination) {
2035 456114 : Run<StoreStoreEliminationPhase>();
2036 456115 : RunPrintAndVerify(StoreStoreEliminationPhase::phase_name(), true);
2037 : }
2038 :
2039 : // Optimize control flow.
2040 456108 : if (FLAG_turbo_cf_optimization) {
2041 456108 : Run<ControlFlowOptimizationPhase>();
2042 456117 : RunPrintAndVerify(ControlFlowOptimizationPhase::phase_name(), true);
2043 : }
2044 :
2045 : // Optimize memory access and allocation operations.
2046 456117 : Run<MemoryOptimizationPhase>();
2047 : // TODO(jarin, rossberg): Remove UNTYPED once machine typing works.
2048 456114 : RunPrintAndVerify(MemoryOptimizationPhase::phase_name(), true);
2049 :
2050 : // Lower changes that have been inserted before.
2051 456117 : Run<LateOptimizationPhase>();
2052 : // TODO(jarin, rossberg): Remove UNTYPED once machine typing works.
2053 456113 : RunPrintAndVerify(LateOptimizationPhase::phase_name(), true);
2054 :
2055 456114 : data->source_positions()->RemoveDecorator();
2056 456091 : if (data->info()->trace_turbo_json_enabled()) {
2057 1 : data->node_origins()->RemoveDecorator();
2058 : }
2059 :
2060 456091 : ComputeScheduledGraph();
2061 :
2062 456109 : return SelectInstructions(linkage);
2063 : }
2064 :
2065 66915 : MaybeHandle<Code> Pipeline::GenerateCodeForCodeStub(
2066 200745 : Isolate* isolate, CallDescriptor* call_descriptor, Graph* graph,
2067 : Code::Kind kind, const char* debug_name, int32_t builtin_index,
2068 : PoisoningMitigationLevel poisoning_level, const AssemblerOptions& options) {
2069 66915 : OptimizedCompilationInfo info(CStrVector(debug_name), graph->zone(), kind);
2070 : info.set_builtin_index(builtin_index);
2071 :
2072 66915 : if (poisoning_level != PoisoningMitigationLevel::kDontPoison) {
2073 : info.SetPoisoningMitigationLevel(poisoning_level);
2074 : }
2075 :
2076 : // Construct a pipeline for scheduling and code generation.
2077 133830 : ZoneStats zone_stats(isolate->allocator());
2078 66915 : NodeOriginTable node_origins(graph);
2079 66915 : SourcePositionTable source_positions(graph);
2080 : JumpOptimizationInfo jump_opt;
2081 : bool should_optimize_jumps =
2082 66915 : isolate->serializer_enabled() && FLAG_turbo_rewrite_far_jumps;
2083 : PipelineData data(&zone_stats, &info, isolate, graph, nullptr,
2084 : &source_positions, &node_origins,
2085 133830 : should_optimize_jumps ? &jump_opt : nullptr, options);
2086 : data.set_verify_graph(FLAG_verify_csa);
2087 : std::unique_ptr<PipelineStatistics> pipeline_statistics;
2088 66915 : if (FLAG_turbo_stats || FLAG_turbo_stats_nvp) {
2089 : pipeline_statistics.reset(new PipelineStatistics(
2090 0 : &info, isolate->GetTurboStatistics(), &zone_stats));
2091 0 : pipeline_statistics->BeginPhaseKind("stub codegen");
2092 : }
2093 :
2094 : PipelineImpl pipeline(&data);
2095 :
2096 133830 : if (info.trace_turbo_json_enabled() || info.trace_turbo_graph_enabled()) {
2097 0 : CodeTracer::Scope tracing_scope(data.GetCodeTracer());
2098 0 : OFStream os(tracing_scope.file());
2099 0 : os << "---------------------------------------------------\n"
2100 0 : << "Begin compiling " << debug_name << " using Turbofan" << std::endl;
2101 0 : if (info.trace_turbo_json_enabled()) {
2102 0 : TurboJsonFile json_of(&info, std::ios_base::trunc);
2103 0 : json_of << "{\"function\" : ";
2104 : JsonPrintFunctionSource(json_of, -1, info.GetDebugName(),
2105 : Handle<Script>(), isolate,
2106 0 : Handle<SharedFunctionInfo>());
2107 0 : json_of << ",\n\"phases\":[";
2108 : }
2109 0 : pipeline.Run<PrintGraphPhase>("Machine");
2110 : }
2111 :
2112 : // Optimize memory access and allocation operations.
2113 66915 : pipeline.Run<MemoryOptimizationPhase>();
2114 66915 : pipeline.RunPrintAndVerify(MemoryOptimizationPhase::phase_name(), true);
2115 :
2116 66915 : pipeline.Run<CsaOptimizationPhase>();
2117 66915 : pipeline.RunPrintAndVerify(CsaOptimizationPhase::phase_name(), true);
2118 :
2119 66915 : pipeline.Run<VerifyGraphPhase>(true);
2120 66915 : pipeline.ComputeScheduledGraph();
2121 : DCHECK_NOT_NULL(data.schedule());
2122 :
2123 : // First run code generation on a copy of the pipeline, in order to be able to
2124 : // repeat it for jump optimization. The first run has to happen on a temporary
2125 : // pipeline to avoid deletion of zones on the main pipeline.
2126 : PipelineData second_data(&zone_stats, &info, isolate, data.graph(),
2127 : data.schedule(), data.source_positions(),
2128 : data.node_origins(), data.jump_optimization_info(),
2129 133830 : options);
2130 : second_data.set_verify_graph(FLAG_verify_csa);
2131 : PipelineImpl second_pipeline(&second_data);
2132 : Handle<Code> code =
2133 133830 : second_pipeline.GenerateCode(call_descriptor).ToHandleChecked();
2134 :
2135 66915 : if (jump_opt.is_optimizable()) {
2136 : jump_opt.set_optimizing();
2137 111216 : code = pipeline.GenerateCode(call_descriptor).ToHandleChecked();
2138 : }
2139 :
2140 133830 : return code;
2141 : }
2142 :
2143 : // static
2144 8553 : wasm::WasmCode* Pipeline::GenerateCodeForWasmNativeStub(
2145 : wasm::WasmEngine* wasm_engine, CallDescriptor* call_descriptor,
2146 8553 : MachineGraph* mcgraph, Code::Kind kind, int wasm_kind,
2147 : const char* debug_name, const AssemblerOptions& options,
2148 : wasm::NativeModule* native_module, SourcePositionTable* source_positions) {
2149 17106 : Graph* graph = mcgraph->graph();
2150 8553 : OptimizedCompilationInfo info(CStrVector(debug_name), graph->zone(), kind);
2151 : // Construct a pipeline for scheduling and code generation.
2152 17106 : ZoneStats zone_stats(wasm_engine->allocator());
2153 8553 : NodeOriginTable* node_positions = new (graph->zone()) NodeOriginTable(graph);
2154 : PipelineData data(&zone_stats, wasm_engine, &info, mcgraph, nullptr,
2155 17106 : source_positions, node_positions, options);
2156 : std::unique_ptr<PipelineStatistics> pipeline_statistics;
2157 8553 : if (FLAG_turbo_stats || FLAG_turbo_stats_nvp) {
2158 : pipeline_statistics.reset(new PipelineStatistics(
2159 0 : &info, wasm_engine->GetOrCreateTurboStatistics(), &zone_stats));
2160 0 : pipeline_statistics->BeginPhaseKind("wasm stub codegen");
2161 : }
2162 :
2163 : PipelineImpl pipeline(&data);
2164 :
2165 17106 : if (info.trace_turbo_json_enabled() || info.trace_turbo_graph_enabled()) {
2166 0 : CodeTracer::Scope tracing_scope(data.GetCodeTracer());
2167 0 : OFStream os(tracing_scope.file());
2168 0 : os << "---------------------------------------------------\n"
2169 0 : << "Begin compiling method " << info.GetDebugName().get()
2170 0 : << " using Turbofan" << std::endl;
2171 : }
2172 :
2173 8553 : if (info.trace_turbo_graph_enabled()) { // Simple textual RPO.
2174 0 : StdoutStream{} << "-- wasm stub " << Code::Kind2String(kind) << " graph -- "
2175 : << std::endl
2176 0 : << AsRPO(*graph);
2177 : }
2178 :
2179 8553 : if (info.trace_turbo_json_enabled()) {
2180 0 : TurboJsonFile json_of(&info, std::ios_base::trunc);
2181 0 : json_of << "{\"function\":\"" << info.GetDebugName().get()
2182 0 : << "\", \"source\":\"\",\n\"phases\":[";
2183 : }
2184 :
2185 8553 : pipeline.RunPrintAndVerify("machine", true);
2186 8553 : pipeline.ComputeScheduledGraph();
2187 :
2188 : Linkage linkage(call_descriptor);
2189 8553 : if (!pipeline.SelectInstructions(&linkage)) return nullptr;
2190 8553 : pipeline.AssembleCode(&linkage);
2191 :
2192 17106 : CodeGenerator* code_generator = pipeline.code_generator();
2193 8553 : CodeDesc code_desc;
2194 8553 : code_generator->tasm()->GetCode(nullptr, &code_desc);
2195 :
2196 : wasm::WasmCode* code = native_module->AddCode(
2197 : wasm::WasmCode::kAnonymousFuncIndex, code_desc,
2198 8553 : code_generator->frame()->GetTotalFrameSlotCount(),
2199 : code_generator->GetSafepointTableOffset(),
2200 : code_generator->GetHandlerTableOffset(),
2201 : code_generator->GetProtectedInstructions(),
2202 : code_generator->GetSourcePositionTable(),
2203 51318 : static_cast<wasm::WasmCode::Kind>(wasm_kind), wasm::WasmCode::kOther);
2204 :
2205 8553 : if (info.trace_turbo_json_enabled()) {
2206 0 : TurboJsonFile json_of(&info, std::ios_base::app);
2207 0 : json_of << "{\"name\":\"disassembly\",\"type\":\"disassembly\",\"data\":\"";
2208 : #ifdef ENABLE_DISASSEMBLER
2209 : std::stringstream disassembler_stream;
2210 : Disassembler::Decode(
2211 : nullptr, &disassembler_stream, code->instructions().start(),
2212 : code->instructions().start() + code->safepoint_table_offset(),
2213 : CodeReference(code));
2214 : for (auto const c : disassembler_stream.str()) {
2215 : json_of << AsEscapedUC16ForJSON(c);
2216 : }
2217 : #endif // ENABLE_DISASSEMBLER
2218 0 : json_of << "\"}\n]";
2219 0 : json_of << "\n}";
2220 : }
2221 :
2222 17106 : if (info.trace_turbo_json_enabled() || info.trace_turbo_graph_enabled()) {
2223 0 : CodeTracer::Scope tracing_scope(data.GetCodeTracer());
2224 0 : OFStream os(tracing_scope.file());
2225 0 : os << "---------------------------------------------------\n"
2226 0 : << "Finished compiling method " << info.GetDebugName().get()
2227 0 : << " using Turbofan" << std::endl;
2228 : }
2229 :
2230 17106 : return code;
2231 : }
2232 :
2233 : // static
2234 157755 : MaybeHandle<Code> Pipeline::GenerateCodeForWasmHeapStub(
2235 473277 : Isolate* isolate, CallDescriptor* call_descriptor, Graph* graph,
2236 : Code::Kind kind, const char* debug_name, const AssemblerOptions& options,
2237 : SourcePositionTable* source_positions) {
2238 157755 : OptimizedCompilationInfo info(CStrVector(debug_name), graph->zone(), kind);
2239 : // Construct a pipeline for scheduling and code generation.
2240 315532 : ZoneStats zone_stats(isolate->allocator());
2241 157759 : NodeOriginTable* node_positions = new (graph->zone()) NodeOriginTable(graph);
2242 : PipelineData data(&zone_stats, &info, isolate, graph, nullptr,
2243 315517 : source_positions, node_positions, nullptr, options);
2244 : std::unique_ptr<PipelineStatistics> pipeline_statistics;
2245 157765 : if (FLAG_turbo_stats || FLAG_turbo_stats_nvp) {
2246 : pipeline_statistics.reset(new PipelineStatistics(
2247 2 : &info, isolate->GetTurboStatistics(), &zone_stats));
2248 0 : pipeline_statistics->BeginPhaseKind("wasm stub codegen");
2249 : }
2250 :
2251 : PipelineImpl pipeline(&data);
2252 :
2253 315527 : if (info.trace_turbo_json_enabled() ||
2254 : info.trace_turbo_graph_enabled()) {
2255 1 : CodeTracer::Scope tracing_scope(data.GetCodeTracer());
2256 2 : OFStream os(tracing_scope.file());
2257 1 : os << "---------------------------------------------------\n"
2258 3 : << "Begin compiling method " << info.GetDebugName().get()
2259 1 : << " using Turbofan" << std::endl;
2260 : }
2261 :
2262 157763 : if (info.trace_turbo_graph_enabled()) { // Simple textual RPO.
2263 3 : StdoutStream{} << "-- wasm stub " << Code::Kind2String(kind) << " graph -- "
2264 : << std::endl
2265 2 : << AsRPO(*graph);
2266 : }
2267 :
2268 157763 : if (info.trace_turbo_json_enabled()) {
2269 1 : TurboJsonFile json_of(&info, std::ios_base::trunc);
2270 3 : json_of << "{\"function\":\"" << info.GetDebugName().get()
2271 2 : << "\", \"source\":\"\",\n\"phases\":[";
2272 : }
2273 :
2274 157763 : pipeline.RunPrintAndVerify("machine", true);
2275 157761 : pipeline.ComputeScheduledGraph();
2276 :
2277 : Handle<Code> code;
2278 473279 : if (pipeline.GenerateCode(call_descriptor).ToHandle(&code) &&
2279 157756 : pipeline.CommitDependencies(code)) {
2280 157763 : return code;
2281 : }
2282 157754 : return MaybeHandle<Code>();
2283 : }
2284 :
2285 : // static
2286 4125 : MaybeHandle<Code> Pipeline::GenerateCodeForTesting(
2287 4125 : OptimizedCompilationInfo* info, Isolate* isolate) {
2288 4125 : ZoneStats zone_stats(isolate->allocator());
2289 : std::unique_ptr<PipelineStatistics> pipeline_statistics(
2290 : CreatePipelineStatistics(Handle<Script>::null(), info, isolate,
2291 4125 : &zone_stats));
2292 8250 : PipelineData data(&zone_stats, isolate, info, pipeline_statistics.get());
2293 : PipelineImpl pipeline(&data);
2294 :
2295 4125 : Linkage linkage(Linkage::ComputeIncoming(data.instruction_zone(), info));
2296 4125 : Deoptimizer::EnsureCodeForDeoptimizationEntries(isolate);
2297 :
2298 4125 : if (!pipeline.CreateGraph()) return MaybeHandle<Code>();
2299 4125 : if (!pipeline.OptimizeGraph(&linkage)) return MaybeHandle<Code>();
2300 4125 : pipeline.AssembleCode(&linkage);
2301 : Handle<Code> code;
2302 12375 : if (pipeline.FinalizeCode().ToHandle(&code) &&
2303 4125 : pipeline.CommitDependencies(code)) {
2304 4125 : return code;
2305 : }
2306 4125 : return MaybeHandle<Code>();
2307 : }
2308 :
2309 : // static
2310 1144452 : MaybeHandle<Code> Pipeline::GenerateCodeForTesting(
2311 2288904 : OptimizedCompilationInfo* info, Isolate* isolate,
2312 : CallDescriptor* call_descriptor, Graph* graph,
2313 : const AssemblerOptions& options, Schedule* schedule) {
2314 : // Construct a pipeline for scheduling and code generation.
2315 1144452 : ZoneStats zone_stats(isolate->allocator());
2316 1144452 : NodeOriginTable* node_positions = new (info->zone()) NodeOriginTable(graph);
2317 : PipelineData data(&zone_stats, info, isolate, graph, schedule, nullptr,
2318 2288904 : node_positions, nullptr, options);
2319 : std::unique_ptr<PipelineStatistics> pipeline_statistics;
2320 1144452 : if (FLAG_turbo_stats || FLAG_turbo_stats_nvp) {
2321 : pipeline_statistics.reset(new PipelineStatistics(
2322 0 : info, isolate->GetTurboStatistics(), &zone_stats));
2323 0 : pipeline_statistics->BeginPhaseKind("test codegen");
2324 : }
2325 :
2326 : PipelineImpl pipeline(&data);
2327 :
2328 1144452 : if (info->trace_turbo_json_enabled()) {
2329 0 : TurboJsonFile json_of(info, std::ios_base::trunc);
2330 0 : json_of << "{\"function\":\"" << info->GetDebugName().get()
2331 0 : << "\", \"source\":\"\",\n\"phases\":[";
2332 : }
2333 : // TODO(rossberg): Should this really be untyped?
2334 1144452 : pipeline.RunPrintAndVerify("machine", true);
2335 :
2336 : // Ensure we have a schedule.
2337 1144452 : if (data.schedule() == nullptr) {
2338 917845 : pipeline.ComputeScheduledGraph();
2339 : }
2340 :
2341 : Handle<Code> code;
2342 3433356 : if (pipeline.GenerateCode(call_descriptor).ToHandle(&code) &&
2343 1144452 : pipeline.CommitDependencies(code)) {
2344 1144452 : return code;
2345 : }
2346 1144452 : return MaybeHandle<Code>();
2347 : }
2348 :
2349 : // static
2350 470360 : OptimizedCompilationJob* Pipeline::NewCompilationJob(
2351 : Isolate* isolate, Handle<JSFunction> function, bool has_script) {
2352 : Handle<SharedFunctionInfo> shared =
2353 940733 : handle(function->shared(), function->GetIsolate());
2354 470367 : return new PipelineCompilationJob(isolate, shared, function);
2355 : }
2356 :
2357 : // static
2358 1059973 : wasm::WasmCode* Pipeline::GenerateCodeForWasmFunction(
2359 : OptimizedCompilationInfo* info, wasm::WasmEngine* wasm_engine,
2360 : MachineGraph* mcgraph, CallDescriptor* call_descriptor,
2361 : SourcePositionTable* source_positions, NodeOriginTable* node_origins,
2362 : wasm::FunctionBody function_body, wasm::NativeModule* native_module,
2363 : int function_index) {
2364 1059973 : ZoneStats zone_stats(wasm_engine->allocator());
2365 : std::unique_ptr<PipelineStatistics> pipeline_statistics(
2366 : CreatePipelineStatistics(wasm_engine, function_body,
2367 1059924 : native_module->module(), info, &zone_stats));
2368 : PipelineData data(&zone_stats, wasm_engine, info, mcgraph,
2369 : pipeline_statistics.get(), source_positions, node_origins,
2370 2120194 : WasmAssemblerOptions());
2371 :
2372 : PipelineImpl pipeline(&data);
2373 :
2374 3180782 : if (data.info()->trace_turbo_json_enabled() ||
2375 : data.info()->trace_turbo_graph_enabled()) {
2376 0 : CodeTracer::Scope tracing_scope(data.GetCodeTracer());
2377 0 : OFStream os(tracing_scope.file());
2378 0 : os << "---------------------------------------------------\n"
2379 0 : << "Begin compiling method " << data.info()->GetDebugName().get()
2380 0 : << " using Turbofan" << std::endl;
2381 : }
2382 :
2383 1060261 : pipeline.RunPrintAndVerify("Machine", true);
2384 :
2385 : data.BeginPhaseKind("wasm optimization");
2386 1060194 : const bool is_asm_js = native_module->module()->origin == wasm::kAsmJsOrigin;
2387 1060194 : if (FLAG_turbo_splitting && !is_asm_js) {
2388 1050188 : data.info()->MarkAsSplittingEnabled();
2389 : }
2390 1060194 : if (FLAG_wasm_opt || is_asm_js) {
2391 9999 : PipelineRunScope scope(&data, "wasm full optimization");
2392 : GraphReducer graph_reducer(scope.zone(), data.graph(),
2393 29967 : data.mcgraph()->Dead());
2394 : DeadCodeElimination dead_code_elimination(&graph_reducer, data.graph(),
2395 9989 : data.common(), scope.zone());
2396 29967 : ValueNumberingReducer value_numbering(scope.zone(), data.graph()->zone());
2397 : const bool allow_signalling_nan = is_asm_js;
2398 : MachineOperatorReducer machine_reducer(&graph_reducer, data.mcgraph(),
2399 19978 : allow_signalling_nan);
2400 : CommonOperatorReducer common_reducer(&graph_reducer, data.graph(),
2401 : data.broker(), data.common(),
2402 9989 : data.machine(), scope.zone());
2403 9989 : AddReducer(&data, &graph_reducer, &dead_code_elimination);
2404 9989 : AddReducer(&data, &graph_reducer, &machine_reducer);
2405 9989 : AddReducer(&data, &graph_reducer, &common_reducer);
2406 9989 : AddReducer(&data, &graph_reducer, &value_numbering);
2407 19978 : graph_reducer.ReduceGraph();
2408 : } else {
2409 1050195 : PipelineRunScope scope(&data, "wasm base optimization");
2410 : GraphReducer graph_reducer(scope.zone(), data.graph(),
2411 3150367 : data.mcgraph()->Dead());
2412 3150253 : ValueNumberingReducer value_numbering(scope.zone(), data.graph()->zone());
2413 1049982 : AddReducer(&data, &graph_reducer, &value_numbering);
2414 2100030 : graph_reducer.ReduceGraph();
2415 : }
2416 1060288 : pipeline.RunPrintAndVerify("wasm optimization", true);
2417 :
2418 1060270 : if (data.node_origins()) {
2419 0 : data.node_origins()->RemoveDecorator();
2420 : }
2421 :
2422 1060270 : pipeline.ComputeScheduledGraph();
2423 :
2424 : Linkage linkage(call_descriptor);
2425 1060144 : if (!pipeline.SelectInstructions(&linkage)) return nullptr;
2426 1060182 : pipeline.AssembleCode(&linkage);
2427 :
2428 2120295 : CodeGenerator* code_generator = pipeline.code_generator();
2429 1060285 : CodeDesc code_desc;
2430 1060285 : code_generator->tasm()->GetCode(nullptr, &code_desc);
2431 :
2432 : wasm::WasmCode* code = native_module->AddCode(
2433 : function_index, code_desc,
2434 1060147 : code_generator->frame()->GetTotalFrameSlotCount(),
2435 : code_generator->GetSafepointTableOffset(),
2436 : code_generator->GetHandlerTableOffset(),
2437 : code_generator->GetProtectedInstructions(),
2438 : code_generator->GetSourcePositionTable(), wasm::WasmCode::kFunction,
2439 6361437 : wasm::WasmCode::kTurbofan);
2440 :
2441 2120552 : if (data.info()->trace_turbo_json_enabled()) {
2442 0 : TurboJsonFile json_of(data.info(), std::ios_base::app);
2443 0 : json_of << "{\"name\":\"disassembly\",\"type\":\"disassembly\",\"data\":\"";
2444 : #ifdef ENABLE_DISASSEMBLER
2445 : std::stringstream disassembler_stream;
2446 : Disassembler::Decode(
2447 : nullptr, &disassembler_stream, code->instructions().start(),
2448 : code->instructions().start() + code->safepoint_table_offset(),
2449 : CodeReference(code));
2450 : for (auto const c : disassembler_stream.str()) {
2451 : json_of << AsEscapedUC16ForJSON(c);
2452 : }
2453 : #endif // ENABLE_DISASSEMBLER
2454 0 : json_of << "\"}\n]";
2455 0 : json_of << "\n}";
2456 : }
2457 :
2458 3180835 : if (data.info()->trace_turbo_json_enabled() ||
2459 : data.info()->trace_turbo_graph_enabled()) {
2460 0 : CodeTracer::Scope tracing_scope(data.GetCodeTracer());
2461 0 : OFStream os(tracing_scope.file());
2462 0 : os << "---------------------------------------------------\n"
2463 0 : << "Finished compiling method " << data.info()->GetDebugName().get()
2464 0 : << " using Turbofan" << std::endl;
2465 : }
2466 :
2467 2120527 : return code;
2468 : }
2469 :
2470 42 : bool Pipeline::AllocateRegistersForTesting(const RegisterConfiguration* config,
2471 126 : InstructionSequence* sequence,
2472 : bool run_verifier) {
2473 : OptimizedCompilationInfo info(ArrayVector("testing"), sequence->zone(),
2474 42 : Code::STUB);
2475 84 : ZoneStats zone_stats(sequence->isolate()->allocator());
2476 84 : PipelineData data(&zone_stats, &info, sequence->isolate(), sequence);
2477 42 : data.InitializeFrameData(nullptr);
2478 : PipelineImpl pipeline(&data);
2479 42 : pipeline.AllocateRegisters(config, nullptr, run_verifier);
2480 84 : return !data.compilation_failed();
2481 : }
2482 :
2483 2667288 : void PipelineImpl::ComputeScheduledGraph() {
2484 5334768 : PipelineData* data = this->data_;
2485 :
2486 : // We should only schedule the graph if it is not scheduled yet.
2487 : DCHECK_NULL(data->schedule());
2488 :
2489 2667288 : Run<LateGraphTrimmingPhase>();
2490 2667486 : RunPrintAndVerify(LateGraphTrimmingPhase::phase_name(), true);
2491 :
2492 2667486 : Run<ComputeSchedulePhase>();
2493 2667480 : TraceSchedule(data->info(), data, data->schedule(), "schedule");
2494 2667325 : }
2495 :
2496 2949447 : bool PipelineImpl::SelectInstructions(Linkage* linkage) {
2497 5898862 : auto call_descriptor = linkage->GetIncomingDescriptor();
2498 14747296 : PipelineData* data = this->data_;
2499 :
2500 : // We should have a scheduled graph.
2501 : DCHECK_NOT_NULL(data->graph());
2502 : DCHECK_NOT_NULL(data->schedule());
2503 :
2504 2949447 : if (FLAG_turbo_profiling) {
2505 : data->set_profiler_data(BasicBlockInstrumentor::Instrument(
2506 10 : info(), data->graph(), data->schedule(), data->isolate()));
2507 : }
2508 :
2509 : bool verify_stub_graph = data->verify_graph();
2510 : // Jump optimization runs instruction selection twice, but the instruction
2511 : // selector mutates nodes like swapping the inputs of a load, which can
2512 : // violate the machine graph verification rules. So we skip the second
2513 : // verification on a graph that already verified before.
2514 120568 : auto jump_opt = data->jump_optimization_info();
2515 3070015 : if (jump_opt && jump_opt->is_optimizing()) {
2516 : verify_stub_graph = false;
2517 : }
2518 8848425 : if (verify_stub_graph ||
2519 2949531 : (FLAG_turbo_verify_machine_graph != nullptr &&
2520 0 : (!strcmp(FLAG_turbo_verify_machine_graph, "*") ||
2521 0 : !strcmp(FLAG_turbo_verify_machine_graph, data->debug_name())))) {
2522 0 : if (FLAG_trace_verify_csa) {
2523 : AllowHandleDereference allow_deref;
2524 0 : CodeTracer::Scope tracing_scope(data->GetCodeTracer());
2525 0 : OFStream os(tracing_scope.file());
2526 0 : os << "--------------------------------------------------\n"
2527 0 : << "--- Verifying " << data->debug_name() << " generated by TurboFan\n"
2528 0 : << "--------------------------------------------------\n"
2529 0 : << *data->schedule()
2530 0 : << "--------------------------------------------------\n"
2531 0 : << "--- End of " << data->debug_name() << " generated by TurboFan\n"
2532 0 : << "--------------------------------------------------\n";
2533 : }
2534 0 : Zone temp_zone(data->allocator(), ZONE_NAME);
2535 : MachineGraphVerifier::Run(
2536 : data->graph(), data->schedule(), linkage,
2537 : data->info()->IsNotOptimizedFunctionOrWasmFunction(),
2538 0 : data->debug_name(), &temp_zone);
2539 : }
2540 :
2541 2949447 : data->InitializeInstructionSequence(call_descriptor);
2542 :
2543 2949448 : data->InitializeFrameData(call_descriptor);
2544 : // Select and schedule instructions covering the scheduled graph.
2545 2949600 : Run<InstructionSelectionPhase>(linkage);
2546 2949695 : if (data->compilation_failed()) {
2547 : info()->AbortOptimization(BailoutReason::kCodeGenerationFailed);
2548 : data->EndPhaseKind();
2549 : return false;
2550 : }
2551 :
2552 2949688 : if (info()->trace_turbo_json_enabled() && !data->MayHaveUnverifiableGraph()) {
2553 : AllowHandleDereference allow_deref;
2554 1 : TurboCfgFile tcf(isolate());
2555 : tcf << AsC1V("CodeGen", data->schedule(), data->source_positions(),
2556 1 : data->sequence());
2557 : }
2558 :
2559 2949686 : if (info()->trace_turbo_json_enabled()) {
2560 2 : std::ostringstream source_position_output;
2561 : // Output source position information before the graph is deleted.
2562 2949507 : if (data_->source_positions() != nullptr) {
2563 1 : data_->source_positions()->PrintJson(source_position_output);
2564 : } else {
2565 1 : source_position_output << "{}";
2566 : }
2567 2 : source_position_output << ",\n\"NodeOrigins\" : ";
2568 4 : data_->node_origins()->PrintJson(source_position_output);
2569 4 : data_->set_source_position_output(source_position_output.str());
2570 : }
2571 :
2572 2949686 : data->DeleteGraphZone();
2573 :
2574 : data->BeginPhaseKind("register allocation");
2575 :
2576 2949415 : bool run_verifier = FLAG_turbo_verify_allocation;
2577 :
2578 : // Allocate registers.
2579 2949415 : if (call_descriptor->HasRestrictedAllocatableRegisters()) {
2580 : RegList registers = call_descriptor->AllocatableRegisters();
2581 : DCHECK_LT(0, NumRegs(registers));
2582 : std::unique_ptr<const RegisterConfiguration> config;
2583 224 : config.reset(RegisterConfiguration::RestrictGeneralRegisters(registers));
2584 224 : AllocateRegisters(config.get(), call_descriptor, run_verifier);
2585 2949191 : } else if (data->info()->GetPoisoningMitigationLevel() !=
2586 : PoisoningMitigationLevel::kDontPoison) {
2587 : #ifdef V8_TARGET_ARCH_IA32
2588 : FATAL("Poisoning is not supported on ia32.");
2589 : #else
2590 : AllocateRegisters(RegisterConfiguration::Poisoning(), call_descriptor,
2591 0 : run_verifier);
2592 : #endif // V8_TARGET_ARCH_IA32
2593 : } else {
2594 : AllocateRegisters(RegisterConfiguration::Default(), call_descriptor,
2595 2949191 : run_verifier);
2596 : }
2597 :
2598 : // Verify the instruction sequence has the same hash in two stages.
2599 2949653 : VerifyGeneratedCodeIsIdempotent();
2600 :
2601 2949439 : Run<FrameElisionPhase>();
2602 2949503 : if (data->compilation_failed()) {
2603 : info()->AbortOptimization(
2604 : BailoutReason::kNotEnoughVirtualRegistersRegalloc);
2605 : data->EndPhaseKind();
2606 : return false;
2607 : }
2608 :
2609 : // TODO(mtrofin): move this off to the register allocator.
2610 : bool generate_frame_at_start =
2611 8848509 : data_->sequence()->instruction_blocks().front()->must_construct_frame();
2612 : // Optimimize jumps.
2613 2949503 : if (FLAG_turbo_jt) {
2614 2949431 : Run<JumpThreadingPhase>(generate_frame_at_start);
2615 : }
2616 :
2617 : data->EndPhaseKind();
2618 :
2619 : return true;
2620 : }
2621 :
2622 2949450 : void PipelineImpl::VerifyGeneratedCodeIsIdempotent() {
2623 3070018 : PipelineData* data = this->data_;
2624 176176 : JumpOptimizationInfo* jump_opt = data->jump_optimization_info();
2625 5898900 : if (jump_opt == nullptr) return;
2626 :
2627 241136 : InstructionSequence* code = data->sequence();
2628 : int instruction_blocks = code->InstructionBlockCount();
2629 : int virtual_registers = code->VirtualRegisterCount();
2630 : size_t hash_code = base::hash_combine(instruction_blocks, virtual_registers);
2631 54190222 : for (auto instr : *code) {
2632 : hash_code = base::hash_combine(hash_code, instr->opcode(),
2633 : instr->InputCount(), instr->OutputCount());
2634 : }
2635 12503898 : for (int i = 0; i < virtual_registers; i++) {
2636 12503898 : hash_code = base::hash_combine(hash_code, code->GetRepresentation(i));
2637 : }
2638 120568 : if (jump_opt->is_collecting()) {
2639 : jump_opt->set_hash_code(hash_code);
2640 : } else {
2641 55608 : CHECK_EQ(hash_code, jump_opt->hash_code());
2642 : }
2643 : }
2644 :
2645 : struct InstructionStartsAsJSON {
2646 : const ZoneVector<int>* instr_starts;
2647 : };
2648 :
2649 2 : std::ostream& operator<<(std::ostream& out, const InstructionStartsAsJSON& s) {
2650 2 : out << ", \"instructionOffsetToPCOffset\": {";
2651 : bool need_comma = false;
2652 130 : for (size_t i = 0; i < s.instr_starts->size(); ++i) {
2653 63 : if (need_comma) out << ", ";
2654 191 : int offset = (*s.instr_starts)[i];
2655 126 : out << "\"" << i << "\":" << offset;
2656 : need_comma = true;
2657 : }
2658 2 : out << "}";
2659 2 : return out;
2660 : }
2661 :
2662 2949415 : void PipelineImpl::AssembleCode(Linkage* linkage) {
2663 5898948 : PipelineData* data = this->data_;
2664 : data->BeginPhaseKind("code generation");
2665 2949415 : data->InitializeCodeGenerator(linkage);
2666 :
2667 2949403 : Run<AssembleCodePhase>();
2668 2949531 : if (data->info()->trace_turbo_json_enabled()) {
2669 2 : TurboJsonFile json_of(data->info(), std::ios_base::app);
2670 2 : json_of << "{\"name\":\"code generation\""
2671 2 : << ", \"type\":\"instructions\""
2672 4 : << InstructionStartsAsJSON{&data->code_generator()->instr_starts()};
2673 2 : json_of << "},\n";
2674 : }
2675 2949531 : data->DeleteInstructionZone();
2676 2949679 : }
2677 :
2678 : struct BlockStartsAsJSON {
2679 : const ZoneVector<int>* block_starts;
2680 : };
2681 :
2682 2 : std::ostream& operator<<(std::ostream& out, const BlockStartsAsJSON& s) {
2683 2 : out << ", \"blockIdToOffset\": {";
2684 : bool need_comma = false;
2685 42 : for (size_t i = 0; i < s.block_starts->size(); ++i) {
2686 19 : if (need_comma) out << ", ";
2687 59 : int offset = (*s.block_starts)[i];
2688 38 : out << "\"" << i << "\":" << offset;
2689 : need_comma = true;
2690 : }
2691 2 : out << "},";
2692 2 : return out;
2693 : }
2694 :
2695 1880707 : MaybeHandle<Code> PipelineImpl::FinalizeCode() {
2696 1880713 : PipelineData* data = this->data_;
2697 1880707 : if (data->broker()) {
2698 455981 : data->broker()->Retire();
2699 : }
2700 1880715 : Run<FinalizeCodePhase>();
2701 :
2702 : MaybeHandle<Code> maybe_code = data->code();
2703 : Handle<Code> code;
2704 1880741 : if (!maybe_code.ToHandle(&code)) {
2705 9 : return maybe_code;
2706 : }
2707 :
2708 : if (data->profiler_data()) {
2709 : #ifdef ENABLE_DISASSEMBLER
2710 : std::ostringstream os;
2711 : code->Disassemble(nullptr, os);
2712 : data->profiler_data()->SetCode(&os);
2713 : #endif // ENABLE_DISASSEMBLER
2714 : }
2715 :
2716 : info()->SetCode(code);
2717 1880732 : PrintCode(isolate(), code, info());
2718 :
2719 1880725 : if (info()->trace_turbo_json_enabled()) {
2720 2 : TurboJsonFile json_of(info(), std::ios_base::app);
2721 :
2722 2 : json_of << "{\"name\":\"disassembly\",\"type\":\"disassembly\""
2723 6 : << BlockStartsAsJSON{&data->code_generator()->block_starts()}
2724 2 : << "\"data\":\"";
2725 : #ifdef ENABLE_DISASSEMBLER
2726 : std::stringstream disassembly_stream;
2727 : code->Disassemble(nullptr, disassembly_stream);
2728 : std::string disassembly_string(disassembly_stream.str());
2729 : for (const auto& c : disassembly_string) {
2730 : json_of << AsEscapedUC16ForJSON(c);
2731 : }
2732 : #endif // ENABLE_DISASSEMBLER
2733 2 : json_of << "\"}\n],\n";
2734 2 : json_of << "\"nodePositions\":";
2735 2 : json_of << data->source_position_output() << ",\n";
2736 2 : JsonPrintAllSourceWithPositions(json_of, data->info(), isolate());
2737 2 : json_of << "\n}";
2738 : }
2739 3761449 : if (info()->trace_turbo_json_enabled() ||
2740 : info()->trace_turbo_graph_enabled()) {
2741 2 : CodeTracer::Scope tracing_scope(data->GetCodeTracer());
2742 4 : OFStream os(tracing_scope.file());
2743 2 : os << "---------------------------------------------------\n"
2744 6 : << "Finished compiling method " << info()->GetDebugName().get()
2745 2 : << " using Turbofan" << std::endl;
2746 : }
2747 1880725 : return code;
2748 : }
2749 :
2750 1424733 : MaybeHandle<Code> PipelineImpl::GenerateCode(CallDescriptor* call_descriptor) {
2751 : Linkage linkage(call_descriptor);
2752 :
2753 : // Perform instruction selection and register allocation.
2754 1424733 : if (!SelectInstructions(&linkage)) return MaybeHandle<Code>();
2755 :
2756 : // Generate the final machine code.
2757 1424732 : AssembleCode(&linkage);
2758 1424740 : return FinalizeCode();
2759 : }
2760 :
2761 1758197 : bool PipelineImpl::CommitDependencies(Handle<Code> code) {
2762 2214187 : return data_->dependencies() == nullptr ||
2763 2214187 : data_->dependencies()->Commit(code);
2764 : }
2765 :
2766 : namespace {
2767 :
2768 5898950 : void TraceSequence(OptimizedCompilationInfo* info, PipelineData* data,
2769 : const char* phase_name) {
2770 5898938 : if (info->trace_turbo_json_enabled()) {
2771 : AllowHandleDereference allow_deref;
2772 4 : TurboJsonFile json_of(info, std::ios_base::app);
2773 4 : json_of << "{\"name\":\"" << phase_name << "\",\"type\":\"sequence\",";
2774 4 : json_of << InstructionSequenceAsJSON{data->sequence()};
2775 4 : json_of << "},\n";
2776 : }
2777 5898938 : if (info->trace_turbo_graph_enabled()) {
2778 : AllowHandleDereference allow_deref;
2779 4 : CodeTracer::Scope tracing_scope(data->GetCodeTracer());
2780 8 : OFStream os(tracing_scope.file());
2781 4 : os << "----- Instruction sequence " << phase_name << " -----\n"
2782 4 : << *data->sequence();
2783 : }
2784 5898938 : }
2785 :
2786 : } // namespace
2787 :
2788 2949493 : void PipelineImpl::AllocateRegisters(const RegisterConfiguration* config,
2789 : CallDescriptor* call_descriptor,
2790 : bool run_verifier) {
2791 5904284 : PipelineData* data = this->data_;
2792 : // Don't track usage for this zone in compiler stats.
2793 : std::unique_ptr<Zone> verifier_zone;
2794 : RegisterAllocatorVerifier* verifier = nullptr;
2795 2949493 : if (run_verifier) {
2796 42 : verifier_zone.reset(new Zone(data->allocator(), ZONE_NAME));
2797 : verifier = new (verifier_zone.get()) RegisterAllocatorVerifier(
2798 42 : verifier_zone.get(), config, data->sequence());
2799 : }
2800 :
2801 : #ifdef DEBUG
2802 : data_->sequence()->ValidateEdgeSplitForm();
2803 : data_->sequence()->ValidateDeferredBlockEntryPaths();
2804 : data_->sequence()->ValidateDeferredBlockExitPaths();
2805 : #endif
2806 :
2807 2949493 : data->InitializeRegisterAllocationData(config, call_descriptor);
2808 2954574 : if (info()->is_osr()) data->osr_helper()->SetupFrame(data->frame());
2809 :
2810 2949657 : Run<MeetRegisterConstraintsPhase>();
2811 2949622 : Run<ResolvePhisPhase>();
2812 2949707 : Run<BuildLiveRangesPhase>();
2813 2949680 : Run<BuildBundlesPhase>();
2814 :
2815 2949695 : TraceSequence(info(), data, "before register allocation");
2816 2949305 : if (verifier != nullptr) {
2817 42 : CHECK(!data->register_allocation_data()->ExistsUseWithoutDefinition());
2818 42 : CHECK(data->register_allocation_data()
2819 : ->RangesDefinedInDeferredStayInDeferred());
2820 : }
2821 :
2822 2949307 : if (info()->trace_turbo_json_enabled() && !data->MayHaveUnverifiableGraph()) {
2823 1 : TurboCfgFile tcf(isolate());
2824 : tcf << AsC1VRegisterAllocationData("PreAllocation",
2825 1 : data->register_allocation_data());
2826 : }
2827 :
2828 2949305 : if (FLAG_turbo_preprocess_ranges) {
2829 2949252 : Run<SplinterLiveRangesPhase>();
2830 2949702 : if (info()->trace_turbo_json_enabled() &&
2831 : !data->MayHaveUnverifiableGraph()) {
2832 1 : TurboCfgFile tcf(isolate());
2833 : tcf << AsC1VRegisterAllocationData("PostSplinter",
2834 1 : data->register_allocation_data());
2835 : }
2836 : }
2837 :
2838 2949753 : Run<AllocateGeneralRegistersPhase<LinearScanAllocator>>();
2839 :
2840 2949697 : if (data->sequence()->HasFPVirtualRegisters()) {
2841 208103 : Run<AllocateFPRegistersPhase<LinearScanAllocator>>();
2842 : }
2843 :
2844 2949730 : if (FLAG_turbo_preprocess_ranges) {
2845 2949730 : Run<MergeSplintersPhase>();
2846 : }
2847 :
2848 2949716 : Run<AssignSpillSlotsPhase>();
2849 :
2850 2949714 : Run<CommitAssignmentPhase>();
2851 :
2852 : // TODO(chromium:725559): remove this check once
2853 : // we understand the cause of the bug. We keep just the
2854 : // check at the end of the allocation.
2855 2949709 : if (verifier != nullptr) {
2856 42 : verifier->VerifyAssignment("Immediately after CommitAssignmentPhase.");
2857 : }
2858 :
2859 2949709 : Run<PopulateReferenceMapsPhase>();
2860 2949619 : Run<ConnectRangesPhase>();
2861 2949705 : Run<ResolveControlFlowPhase>();
2862 2949731 : if (FLAG_turbo_move_optimization) {
2863 2949721 : Run<OptimizeMovesPhase>();
2864 : }
2865 :
2866 2949739 : Run<LocateSpillSlotsPhase>();
2867 :
2868 2949730 : TraceSequence(info(), data, "after register allocation");
2869 :
2870 2949549 : if (verifier != nullptr) {
2871 42 : verifier->VerifyAssignment("End of regalloc pipeline.");
2872 42 : verifier->VerifyGapMoves();
2873 : }
2874 :
2875 2949648 : if (info()->trace_turbo_json_enabled() && !data->MayHaveUnverifiableGraph()) {
2876 1 : TurboCfgFile tcf(isolate());
2877 : tcf << AsC1VRegisterAllocationData("CodeGen",
2878 1 : data->register_allocation_data());
2879 : }
2880 :
2881 2949646 : data->DeleteRegisterAllocationZone();
2882 2949731 : }
2883 :
2884 46841703 : OptimizedCompilationInfo* PipelineImpl::info() const { return data_->info(); }
2885 :
2886 1880739 : Isolate* PipelineImpl::isolate() const { return data_->isolate(); }
2887 :
2888 0 : CodeGenerator* PipelineImpl::code_generator() const {
2889 1068838 : return data_->code_generator();
2890 : }
2891 :
2892 : } // namespace compiler
2893 : } // namespace internal
2894 183867 : } // namespace v8
|