Line data Source code
1 : // Copyright 2014 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #include "src/compiler/pipeline.h"
6 :
7 : #include <fstream> // NOLINT(readability/streams)
8 : #include <iostream>
9 : #include <memory>
10 : #include <sstream>
11 :
12 : #include "src/assembler-inl.h"
13 : #include "src/base/adapters.h"
14 : #include "src/base/optional.h"
15 : #include "src/base/platform/elapsed-timer.h"
16 : #include "src/bootstrapper.h"
17 : #include "src/code-tracer.h"
18 : #include "src/compiler.h"
19 : #include "src/compiler/backend/code-generator.h"
20 : #include "src/compiler/backend/frame-elider.h"
21 : #include "src/compiler/backend/instruction-selector.h"
22 : #include "src/compiler/backend/instruction.h"
23 : #include "src/compiler/backend/jump-threading.h"
24 : #include "src/compiler/backend/live-range-separator.h"
25 : #include "src/compiler/backend/move-optimizer.h"
26 : #include "src/compiler/backend/register-allocator-verifier.h"
27 : #include "src/compiler/backend/register-allocator.h"
28 : #include "src/compiler/basic-block-instrumentor.h"
29 : #include "src/compiler/branch-elimination.h"
30 : #include "src/compiler/bytecode-graph-builder.h"
31 : #include "src/compiler/checkpoint-elimination.h"
32 : #include "src/compiler/common-operator-reducer.h"
33 : #include "src/compiler/compilation-dependencies.h"
34 : #include "src/compiler/compiler-source-position-table.h"
35 : #include "src/compiler/constant-folding-reducer.h"
36 : #include "src/compiler/control-flow-optimizer.h"
37 : #include "src/compiler/dead-code-elimination.h"
38 : #include "src/compiler/effect-control-linearizer.h"
39 : #include "src/compiler/escape-analysis-reducer.h"
40 : #include "src/compiler/escape-analysis.h"
41 : #include "src/compiler/graph-trimmer.h"
42 : #include "src/compiler/graph-visualizer.h"
43 : #include "src/compiler/js-call-reducer.h"
44 : #include "src/compiler/js-context-specialization.h"
45 : #include "src/compiler/js-create-lowering.h"
46 : #include "src/compiler/js-generic-lowering.h"
47 : #include "src/compiler/js-heap-broker.h"
48 : #include "src/compiler/js-heap-copy-reducer.h"
49 : #include "src/compiler/js-inlining-heuristic.h"
50 : #include "src/compiler/js-intrinsic-lowering.h"
51 : #include "src/compiler/js-native-context-specialization.h"
52 : #include "src/compiler/js-typed-lowering.h"
53 : #include "src/compiler/load-elimination.h"
54 : #include "src/compiler/loop-analysis.h"
55 : #include "src/compiler/loop-peeling.h"
56 : #include "src/compiler/loop-variable-optimizer.h"
57 : #include "src/compiler/machine-graph-verifier.h"
58 : #include "src/compiler/machine-operator-reducer.h"
59 : #include "src/compiler/memory-optimizer.h"
60 : #include "src/compiler/node-origin-table.h"
61 : #include "src/compiler/osr.h"
62 : #include "src/compiler/pipeline-statistics.h"
63 : #include "src/compiler/redundancy-elimination.h"
64 : #include "src/compiler/schedule.h"
65 : #include "src/compiler/scheduler.h"
66 : #include "src/compiler/select-lowering.h"
67 : #include "src/compiler/serializer-for-background-compilation.h"
68 : #include "src/compiler/simplified-lowering.h"
69 : #include "src/compiler/simplified-operator-reducer.h"
70 : #include "src/compiler/simplified-operator.h"
71 : #include "src/compiler/store-store-elimination.h"
72 : #include "src/compiler/type-narrowing-reducer.h"
73 : #include "src/compiler/typed-optimization.h"
74 : #include "src/compiler/typer.h"
75 : #include "src/compiler/value-numbering-reducer.h"
76 : #include "src/compiler/verifier.h"
77 : #include "src/compiler/wasm-compiler.h"
78 : #include "src/compiler/zone-stats.h"
79 : #include "src/disassembler.h"
80 : #include "src/isolate-inl.h"
81 : #include "src/objects/shared-function-info.h"
82 : #include "src/optimized-compilation-info.h"
83 : #include "src/ostreams.h"
84 : #include "src/parsing/parse-info.h"
85 : #include "src/register-configuration.h"
86 : #include "src/tracing/trace-event.h"
87 : #include "src/tracing/traced-value.h"
88 : #include "src/utils.h"
89 : #include "src/wasm/function-body-decoder.h"
90 : #include "src/wasm/function-compiler.h"
91 : #include "src/wasm/wasm-engine.h"
92 :
93 : namespace v8 {
94 : namespace internal {
95 : namespace compiler {
96 :
97 : class PipelineData {
98 : public:
99 : // For main entry point.
100 483294 : PipelineData(ZoneStats* zone_stats, Isolate* isolate,
101 : OptimizedCompilationInfo* info,
102 : PipelineStatistics* pipeline_statistics)
103 : : isolate_(isolate),
104 : allocator_(isolate->allocator()),
105 : info_(info),
106 : debug_name_(info_->GetDebugName()),
107 : may_have_unverifiable_graph_(false),
108 : zone_stats_(zone_stats),
109 : pipeline_statistics_(pipeline_statistics),
110 : graph_zone_scope_(zone_stats_, ZONE_NAME),
111 : graph_zone_(graph_zone_scope_.zone()),
112 483293 : instruction_zone_scope_(zone_stats_, ZONE_NAME),
113 : instruction_zone_(instruction_zone_scope_.zone()),
114 483295 : codegen_zone_scope_(zone_stats_, ZONE_NAME),
115 : codegen_zone_(codegen_zone_scope_.zone()),
116 483295 : register_allocation_zone_scope_(zone_stats_, ZONE_NAME),
117 : register_allocation_zone_(register_allocation_zone_scope_.zone()),
118 3383056 : assembler_options_(AssemblerOptions::Default(isolate)) {
119 : PhaseScope scope(pipeline_statistics, "V8.TFInitPipelineData");
120 966591 : graph_ = new (graph_zone_) Graph(graph_zone_);
121 966584 : source_positions_ = new (graph_zone_) SourcePositionTable(graph_);
122 : node_origins_ = info->trace_turbo_json_enabled()
123 2 : ? new (graph_zone_) NodeOriginTable(graph_)
124 966586 : : nullptr;
125 966586 : simplified_ = new (graph_zone_) SimplifiedOperatorBuilder(graph_zone_);
126 : machine_ = new (graph_zone_) MachineOperatorBuilder(
127 : graph_zone_, MachineType::PointerRepresentation(),
128 966587 : InstructionSelector::SupportedMachineOperatorFlags(),
129 966589 : InstructionSelector::AlignmentRequirements());
130 966589 : common_ = new (graph_zone_) CommonOperatorBuilder(graph_zone_);
131 966583 : javascript_ = new (graph_zone_) JSOperatorBuilder(graph_zone_);
132 : jsgraph_ = new (graph_zone_)
133 966583 : JSGraph(isolate_, graph_, common_, javascript_, simplified_, machine_);
134 966586 : broker_ = new (info_->zone()) JSHeapBroker(isolate_, info_->zone());
135 : dependencies_ =
136 966585 : new (info_->zone()) CompilationDependencies(broker_, info_->zone());
137 483290 : }
138 :
139 : // For WebAssembly compile entry point.
140 994170 : PipelineData(ZoneStats* zone_stats, wasm::WasmEngine* wasm_engine,
141 : OptimizedCompilationInfo* info, MachineGraph* mcgraph,
142 : PipelineStatistics* pipeline_statistics,
143 : SourcePositionTable* source_positions,
144 : NodeOriginTable* node_origins,
145 : const AssemblerOptions& assembler_options)
146 : : isolate_(nullptr),
147 : wasm_engine_(wasm_engine),
148 : allocator_(wasm_engine->allocator()),
149 : info_(info),
150 : debug_name_(info_->GetDebugName()),
151 : may_have_unverifiable_graph_(false),
152 : zone_stats_(zone_stats),
153 : pipeline_statistics_(pipeline_statistics),
154 : graph_zone_scope_(zone_stats_, ZONE_NAME),
155 : graph_zone_(graph_zone_scope_.zone()),
156 : graph_(mcgraph->graph()),
157 : source_positions_(source_positions),
158 : node_origins_(node_origins),
159 : machine_(mcgraph->machine()),
160 : common_(mcgraph->common()),
161 : mcgraph_(mcgraph),
162 995941 : instruction_zone_scope_(zone_stats_, ZONE_NAME),
163 : instruction_zone_(instruction_zone_scope_.zone()),
164 995965 : codegen_zone_scope_(zone_stats_, ZONE_NAME),
165 : codegen_zone_(codegen_zone_scope_.zone()),
166 996088 : register_allocation_zone_scope_(zone_stats_, ZONE_NAME),
167 : register_allocation_zone_(register_allocation_zone_scope_.zone()),
168 7964594 : assembler_options_(assembler_options) {}
169 :
170 : // For CodeStubAssembler and machine graph testing entry point.
171 1193086 : PipelineData(ZoneStats* zone_stats, OptimizedCompilationInfo* info,
172 : Isolate* isolate, Graph* graph, Schedule* schedule,
173 : SourcePositionTable* source_positions,
174 : NodeOriginTable* node_origins, JumpOptimizationInfo* jump_opt,
175 : const AssemblerOptions& assembler_options)
176 : : isolate_(isolate),
177 : allocator_(isolate->allocator()),
178 : info_(info),
179 : debug_name_(info_->GetDebugName()),
180 : zone_stats_(zone_stats),
181 : graph_zone_scope_(zone_stats_, ZONE_NAME),
182 : graph_zone_(graph_zone_scope_.zone()),
183 : graph_(graph),
184 : source_positions_(source_positions),
185 : node_origins_(node_origins),
186 : schedule_(schedule),
187 1193086 : instruction_zone_scope_(zone_stats_, ZONE_NAME),
188 : instruction_zone_(instruction_zone_scope_.zone()),
189 1193087 : codegen_zone_scope_(zone_stats_, ZONE_NAME),
190 : codegen_zone_(codegen_zone_scope_.zone()),
191 1193087 : register_allocation_zone_scope_(zone_stats_, ZONE_NAME),
192 : register_allocation_zone_(register_allocation_zone_scope_.zone()),
193 : jump_optimization_info_(jump_opt),
194 8351607 : assembler_options_(assembler_options) {
195 2386174 : simplified_ = new (graph_zone_) SimplifiedOperatorBuilder(graph_zone_);
196 : machine_ = new (graph_zone_) MachineOperatorBuilder(
197 : graph_zone_, MachineType::PointerRepresentation(),
198 2386174 : InstructionSelector::SupportedMachineOperatorFlags(),
199 2386174 : InstructionSelector::AlignmentRequirements());
200 2386174 : common_ = new (graph_zone_) CommonOperatorBuilder(graph_zone_);
201 2386174 : javascript_ = new (graph_zone_) JSOperatorBuilder(graph_zone_);
202 : jsgraph_ = new (graph_zone_)
203 2386174 : JSGraph(isolate_, graph_, common_, javascript_, simplified_, machine_);
204 1193087 : }
205 :
206 : // For register allocation testing entry point.
207 42 : PipelineData(ZoneStats* zone_stats, OptimizedCompilationInfo* info,
208 : Isolate* isolate, InstructionSequence* sequence)
209 : : isolate_(isolate),
210 : allocator_(isolate->allocator()),
211 : info_(info),
212 : debug_name_(info_->GetDebugName()),
213 : zone_stats_(zone_stats),
214 : graph_zone_scope_(zone_stats_, ZONE_NAME),
215 : instruction_zone_scope_(zone_stats_, ZONE_NAME),
216 : instruction_zone_(sequence->zone()),
217 : sequence_(sequence),
218 : codegen_zone_scope_(zone_stats_, ZONE_NAME),
219 : codegen_zone_(codegen_zone_scope_.zone()),
220 42 : register_allocation_zone_scope_(zone_stats_, ZONE_NAME),
221 : register_allocation_zone_(register_allocation_zone_scope_.zone()),
222 294 : assembler_options_(AssemblerOptions::Default(isolate)) {}
223 :
224 5341847 : ~PipelineData() {
225 : // Must happen before zones are destroyed.
226 2669288 : delete code_generator_;
227 2672536 : code_generator_ = nullptr;
228 2672536 : DeleteTyper();
229 :
230 2669849 : DeleteRegisterAllocationZone();
231 2669966 : DeleteInstructionZone();
232 2670069 : DeleteCodegenZone();
233 2672607 : DeleteGraphZone();
234 2672559 : }
235 :
236 : Isolate* isolate() const { return isolate_; }
237 : AccountingAllocator* allocator() const { return allocator_; }
238 : OptimizedCompilationInfo* info() const { return info_; }
239 : ZoneStats* zone_stats() const { return zone_stats_; }
240 : CompilationDependencies* dependencies() const { return dependencies_; }
241 : PipelineStatistics* pipeline_statistics() { return pipeline_statistics_; }
242 : OsrHelper* osr_helper() { return &(*osr_helper_); }
243 : bool compilation_failed() const { return compilation_failed_; }
244 9 : void set_compilation_failed() { compilation_failed_ = true; }
245 :
246 : bool verify_graph() const { return verify_graph_; }
247 134536 : void set_verify_graph(bool value) { verify_graph_ = value; }
248 :
249 : MaybeHandle<Code> code() { return code_; }
250 : void set_code(MaybeHandle<Code> code) {
251 : DCHECK(code_.is_null());
252 1589816 : code_ = code;
253 : }
254 :
255 : CodeGenerator* code_generator() const { return code_generator_; }
256 :
257 : // RawMachineAssembler generally produces graphs which cannot be verified.
258 : bool MayHaveUnverifiableGraph() const { return may_have_unverifiable_graph_; }
259 :
260 : Zone* graph_zone() const { return graph_zone_; }
261 : Graph* graph() const { return graph_; }
262 : SourcePositionTable* source_positions() const { return source_positions_; }
263 : NodeOriginTable* node_origins() const { return node_origins_; }
264 : MachineOperatorBuilder* machine() const { return machine_; }
265 : CommonOperatorBuilder* common() const { return common_; }
266 : JSOperatorBuilder* javascript() const { return javascript_; }
267 : JSGraph* jsgraph() const { return jsgraph_; }
268 : MachineGraph* mcgraph() const { return mcgraph_; }
269 928403 : Handle<Context> native_context() const {
270 1856808 : return handle(info()->native_context(), isolate());
271 : }
272 : Handle<JSGlobalObject> global_object() const {
273 : return handle(info()->global_object(), isolate());
274 : }
275 :
276 : JSHeapBroker* broker() const { return broker_; }
277 :
278 : Schedule* schedule() const { return schedule_; }
279 : void set_schedule(Schedule* schedule) {
280 : DCHECK(!schedule_);
281 2404569 : schedule_ = schedule;
282 : }
283 : void reset_schedule() { schedule_ = nullptr; }
284 :
285 : Zone* instruction_zone() const { return instruction_zone_; }
286 : Zone* codegen_zone() const { return codegen_zone_; }
287 : InstructionSequence* sequence() const { return sequence_; }
288 : Frame* frame() const { return frame_; }
289 464163 : std::vector<Handle<Map>>* embedded_maps() { return &embedded_maps_; }
290 :
291 : Zone* register_allocation_zone() const { return register_allocation_zone_; }
292 : RegisterAllocationData* register_allocation_data() const {
293 : return register_allocation_data_;
294 : }
295 :
296 : BasicBlockProfiler::Data* profiler_data() const { return profiler_data_; }
297 : void set_profiler_data(BasicBlockProfiler::Data* profiler_data) {
298 8 : profiler_data_ = profiler_data;
299 : }
300 :
301 : std::string const& source_position_output() const {
302 : return source_position_output_;
303 : }
304 : void set_source_position_output(std::string const& source_position_output) {
305 9 : source_position_output_ = source_position_output;
306 : }
307 :
308 : JumpOptimizationInfo* jump_optimization_info() const {
309 : return jump_optimization_info_;
310 : }
311 :
312 : const AssemblerOptions& assembler_options() const {
313 : return assembler_options_;
314 : }
315 :
316 : CodeTracer* GetCodeTracer() const {
317 : return wasm_engine_ == nullptr ? isolate_->GetCodeTracer()
318 88 : : wasm_engine_->GetCodeTracer();
319 : }
320 :
321 464166 : Typer* CreateTyper() {
322 : DCHECK_NULL(typer_);
323 464166 : typer_ = new Typer(broker(), typer_flags_, graph());
324 464166 : return typer_;
325 : }
326 :
327 : void AddTyperFlag(Typer::Flag flag) {
328 : DCHECK_NULL(typer_);
329 : typer_flags_ |= flag;
330 : }
331 :
332 3134072 : void DeleteTyper() {
333 3134072 : delete typer_;
334 3134072 : typer_ = nullptr;
335 3134072 : }
336 :
337 5313558 : void DeleteGraphZone() {
338 5313558 : if (graph_zone_ == nullptr) return;
339 : graph_zone_scope_.Destroy();
340 2672523 : graph_zone_ = nullptr;
341 2672523 : graph_ = nullptr;
342 2672523 : source_positions_ = nullptr;
343 2672523 : node_origins_ = nullptr;
344 2672523 : simplified_ = nullptr;
345 2672523 : machine_ = nullptr;
346 2672523 : common_ = nullptr;
347 2672523 : javascript_ = nullptr;
348 2672523 : jsgraph_ = nullptr;
349 2672523 : mcgraph_ = nullptr;
350 2672523 : schedule_ = nullptr;
351 : }
352 :
353 5312289 : void DeleteInstructionZone() {
354 5312289 : if (instruction_zone_ == nullptr) return;
355 : instruction_zone_scope_.Destroy();
356 2672538 : instruction_zone_ = nullptr;
357 2672538 : sequence_ = nullptr;
358 : }
359 :
360 2670206 : void DeleteCodegenZone() {
361 2670206 : if (codegen_zone_ == nullptr) return;
362 : codegen_zone_scope_.Destroy();
363 2672637 : codegen_zone_ = nullptr;
364 2672637 : dependencies_ = nullptr;
365 2672637 : broker_ = nullptr;
366 2672637 : frame_ = nullptr;
367 : }
368 :
369 5311310 : void DeleteRegisterAllocationZone() {
370 5311310 : if (register_allocation_zone_ == nullptr) return;
371 : register_allocation_zone_scope_.Destroy();
372 2672653 : register_allocation_zone_ = nullptr;
373 2672653 : register_allocation_data_ = nullptr;
374 : }
375 :
376 2640486 : void InitializeInstructionSequence(const CallDescriptor* call_descriptor) {
377 : DCHECK_NULL(sequence_);
378 : InstructionBlocks* instruction_blocks =
379 : InstructionSequence::InstructionBlocksFor(instruction_zone(),
380 2640486 : schedule());
381 : sequence_ = new (instruction_zone())
382 2642325 : InstructionSequence(isolate(), instruction_zone(), instruction_blocks);
383 5287097 : if (call_descriptor && call_descriptor->RequiresFrameAsIncoming()) {
384 2538443 : sequence_->instruction_blocks()[0]->mark_needs_frame();
385 : } else {
386 : DCHECK_EQ(0u, call_descriptor->CalleeSavedFPRegisters());
387 : DCHECK_EQ(0u, call_descriptor->CalleeSavedRegisters());
388 : }
389 2643547 : }
390 :
391 2641453 : void InitializeFrameData(CallDescriptor* call_descriptor) {
392 : DCHECK_NULL(frame_);
393 : int fixed_frame_size = 0;
394 2641453 : if (call_descriptor != nullptr) {
395 2642043 : fixed_frame_size = call_descriptor->CalculateFixedFrameSize();
396 : }
397 2643419 : frame_ = new (codegen_zone()) Frame(fixed_frame_size);
398 2641028 : }
399 :
400 2640945 : void InitializeRegisterAllocationData(const RegisterConfiguration* config,
401 : CallDescriptor* call_descriptor,
402 : RegisterAllocationFlags flags) {
403 : DCHECK_NULL(register_allocation_data_);
404 : register_allocation_data_ = new (register_allocation_zone())
405 : RegisterAllocationData(config, register_allocation_zone(), frame(),
406 5284277 : sequence(), flags, debug_name());
407 2642773 : }
408 :
409 : void InitializeOsrHelper() {
410 : DCHECK(!osr_helper_.has_value());
411 : osr_helper_.emplace(info());
412 : }
413 :
414 : void set_start_source_position(int position) {
415 : DCHECK_EQ(start_source_position_, kNoSourcePosition);
416 460824 : start_source_position_ = position;
417 : }
418 :
419 2640279 : void InitializeCodeGenerator(Linkage* linkage,
420 : std::unique_ptr<AssemblerBuffer> buffer) {
421 : DCHECK_NULL(code_generator_);
422 :
423 : code_generator_ = new CodeGenerator(
424 : codegen_zone(), frame(), linkage, sequence(), info(), isolate(),
425 : osr_helper_, start_source_position_, jump_optimization_info_,
426 : info()->GetPoisoningMitigationLevel(), assembler_options_,
427 7921148 : info_->builtin_index(), std::move(buffer));
428 2640590 : }
429 :
430 : void BeginPhaseKind(const char* phase_kind_name) {
431 7294791 : if (pipeline_statistics() != nullptr) {
432 0 : pipeline_statistics()->BeginPhaseKind(phase_kind_name);
433 : }
434 : }
435 :
436 : void EndPhaseKind() {
437 5749833 : if (pipeline_statistics() != nullptr) {
438 0 : pipeline_statistics()->EndPhaseKind();
439 : }
440 : }
441 :
442 : const char* debug_name() const { return debug_name_.get(); }
443 :
444 : private:
445 : Isolate* const isolate_;
446 : wasm::WasmEngine* const wasm_engine_ = nullptr;
447 : AccountingAllocator* const allocator_;
448 : OptimizedCompilationInfo* const info_;
449 : std::unique_ptr<char[]> debug_name_;
450 : bool may_have_unverifiable_graph_ = true;
451 : ZoneStats* const zone_stats_;
452 : PipelineStatistics* pipeline_statistics_ = nullptr;
453 : bool compilation_failed_ = false;
454 : bool verify_graph_ = false;
455 : int start_source_position_ = kNoSourcePosition;
456 : base::Optional<OsrHelper> osr_helper_;
457 : MaybeHandle<Code> code_;
458 : CodeGenerator* code_generator_ = nullptr;
459 : Typer* typer_ = nullptr;
460 : Typer::Flags typer_flags_ = Typer::kNoFlags;
461 :
462 : // All objects in the following group of fields are allocated in graph_zone_.
463 : // They are all set to nullptr when the graph_zone_ is destroyed.
464 : ZoneStats::Scope graph_zone_scope_;
465 : Zone* graph_zone_ = nullptr;
466 : Graph* graph_ = nullptr;
467 : SourcePositionTable* source_positions_ = nullptr;
468 : NodeOriginTable* node_origins_ = nullptr;
469 : SimplifiedOperatorBuilder* simplified_ = nullptr;
470 : MachineOperatorBuilder* machine_ = nullptr;
471 : CommonOperatorBuilder* common_ = nullptr;
472 : JSOperatorBuilder* javascript_ = nullptr;
473 : JSGraph* jsgraph_ = nullptr;
474 : MachineGraph* mcgraph_ = nullptr;
475 : Schedule* schedule_ = nullptr;
476 :
477 : // All objects in the following group of fields are allocated in
478 : // instruction_zone_. They are all set to nullptr when the instruction_zone_
479 : // is destroyed.
480 : ZoneStats::Scope instruction_zone_scope_;
481 : Zone* instruction_zone_;
482 : InstructionSequence* sequence_ = nullptr;
483 :
484 : // All objects in the following group of fields are allocated in
485 : // codegen_zone_. They are all set to nullptr when the codegen_zone_
486 : // is destroyed.
487 : ZoneStats::Scope codegen_zone_scope_;
488 : Zone* codegen_zone_;
489 : CompilationDependencies* dependencies_ = nullptr;
490 : JSHeapBroker* broker_ = nullptr;
491 : Frame* frame_ = nullptr;
492 :
493 : // embedded_maps_ keeps track of maps we've embedded as Uint32 constants.
494 : // We do this in order to notify the garbage collector at code-gen time.
495 : std::vector<Handle<Map>> embedded_maps_;
496 :
497 : // All objects in the following group of fields are allocated in
498 : // register_allocation_zone_. They are all set to nullptr when the zone is
499 : // destroyed.
500 : ZoneStats::Scope register_allocation_zone_scope_;
501 : Zone* register_allocation_zone_;
502 : RegisterAllocationData* register_allocation_data_ = nullptr;
503 :
504 : // Basic block profiling support.
505 : BasicBlockProfiler::Data* profiler_data_ = nullptr;
506 :
507 : // Source position output for --trace-turbo.
508 : std::string source_position_output_;
509 :
510 : JumpOptimizationInfo* jump_optimization_info_ = nullptr;
511 : AssemblerOptions assembler_options_;
512 :
513 : DISALLOW_COPY_AND_ASSIGN(PipelineData);
514 : };
515 :
516 : class PipelineImpl final {
517 : public:
518 2672327 : explicit PipelineImpl(PipelineData* data) : data_(data) {}
519 :
520 : // Helpers for executing pipeline phases.
521 : template <typename Phase, typename... Args>
522 : void Run(Args&&... args);
523 :
524 : // Step A. Run the graph creation and initial optimization passes.
525 : bool CreateGraph();
526 :
527 : // B. Run the concurrent optimization passes.
528 : bool OptimizeGraph(Linkage* linkage);
529 :
530 : // Substep B.1. Produce a scheduled graph.
531 : void ComputeScheduledGraph();
532 :
533 : // Substep B.2. Select instructions from a scheduled graph.
534 : bool SelectInstructions(Linkage* linkage);
535 :
536 : // Step C. Run the code assembly pass.
537 : void AssembleCode(Linkage* linkage,
538 : std::unique_ptr<AssemblerBuffer> buffer = {});
539 :
540 : // Step D. Run the code finalization pass.
541 : MaybeHandle<Code> FinalizeCode(bool retire_broker = true);
542 :
543 : // Step E. Install any code dependencies.
544 : bool CommitDependencies(Handle<Code> code);
545 :
546 : void VerifyGeneratedCodeIsIdempotent();
547 : void RunPrintAndVerify(const char* phase, bool untyped = false);
548 : bool SelectInstructionsAndAssemble(CallDescriptor* call_descriptor);
549 : MaybeHandle<Code> GenerateCode(CallDescriptor* call_descriptor);
550 : void AllocateRegisters(const RegisterConfiguration* config,
551 : CallDescriptor* call_descriptor, bool run_verifier);
552 :
553 : OptimizedCompilationInfo* info() const;
554 : Isolate* isolate() const;
555 : CodeGenerator* code_generator() const;
556 :
557 : private:
558 : PipelineData* const data_;
559 : };
560 :
561 : namespace {
562 :
563 0 : void PrintFunctionSource(OptimizedCompilationInfo* info, Isolate* isolate,
564 : int source_id, Handle<SharedFunctionInfo> shared) {
565 0 : if (!shared->script()->IsUndefined(isolate)) {
566 0 : Handle<Script> script(Script::cast(shared->script()), isolate);
567 :
568 0 : if (!script->source()->IsUndefined(isolate)) {
569 0 : CodeTracer::Scope tracing_scope(isolate->GetCodeTracer());
570 : Object source_name = script->name();
571 0 : OFStream os(tracing_scope.file());
572 0 : os << "--- FUNCTION SOURCE (";
573 0 : if (source_name->IsString()) {
574 0 : os << String::cast(source_name)->ToCString().get() << ":";
575 : }
576 0 : os << shared->DebugName()->ToCString().get() << ") id{";
577 0 : os << info->optimization_id() << "," << source_id << "} start{";
578 0 : os << shared->StartPosition() << "} ---\n";
579 : {
580 : DisallowHeapAllocation no_allocation;
581 0 : int start = shared->StartPosition();
582 0 : int len = shared->EndPosition() - start;
583 : SubStringRange source(String::cast(script->source()), no_allocation,
584 : start, len);
585 0 : for (const auto& c : source) {
586 0 : os << AsReversiblyEscapedUC16(c);
587 : }
588 : }
589 :
590 0 : os << "\n--- END ---\n";
591 : }
592 : }
593 0 : }
594 :
595 : // Print information for the given inlining: which function was inlined and
596 : // where the inlining occurred.
597 0 : void PrintInlinedFunctionInfo(
598 : OptimizedCompilationInfo* info, Isolate* isolate, int source_id,
599 : int inlining_id, const OptimizedCompilationInfo::InlinedFunctionHolder& h) {
600 0 : CodeTracer::Scope tracing_scope(isolate->GetCodeTracer());
601 0 : OFStream os(tracing_scope.file());
602 0 : os << "INLINE (" << h.shared_info->DebugName()->ToCString().get() << ") id{"
603 0 : << info->optimization_id() << "," << source_id << "} AS " << inlining_id
604 0 : << " AT ";
605 0 : const SourcePosition position = h.position.position;
606 0 : if (position.IsKnown()) {
607 0 : os << "<" << position.InliningId() << ":" << position.ScriptOffset() << ">";
608 : } else {
609 0 : os << "<?>";
610 : }
611 : os << std::endl;
612 0 : }
613 :
614 : // Print the source of all functions that participated in this optimizing
615 : // compilation. For inlined functions print source position of their inlining.
616 0 : void PrintParticipatingSource(OptimizedCompilationInfo* info,
617 : Isolate* isolate) {
618 : AllowDeferredHandleDereference allow_deference_for_print_code;
619 :
620 0 : SourceIdAssigner id_assigner(info->inlined_functions().size());
621 0 : PrintFunctionSource(info, isolate, -1, info->shared_info());
622 : const auto& inlined = info->inlined_functions();
623 0 : for (unsigned id = 0; id < inlined.size(); id++) {
624 0 : const int source_id = id_assigner.GetIdFor(inlined[id].shared_info);
625 0 : PrintFunctionSource(info, isolate, source_id, inlined[id].shared_info);
626 0 : PrintInlinedFunctionInfo(info, isolate, source_id, id, inlined[id]);
627 : }
628 0 : }
629 :
630 : // Print the code after compiling it.
631 1589815 : void PrintCode(Isolate* isolate, Handle<Code> code,
632 : OptimizedCompilationInfo* info) {
633 1589815 : if (FLAG_print_opt_source && info->IsOptimizing()) {
634 0 : PrintParticipatingSource(info, isolate);
635 : }
636 :
637 : #ifdef ENABLE_DISASSEMBLER
638 : AllowDeferredHandleDereference allow_deference_for_print_code;
639 : bool print_code =
640 : FLAG_print_code ||
641 : (info->IsOptimizing() && FLAG_print_opt_code &&
642 : info->shared_info()->PassesFilter(FLAG_print_opt_code_filter));
643 : if (print_code) {
644 : std::unique_ptr<char[]> debug_name = info->GetDebugName();
645 : CodeTracer::Scope tracing_scope(isolate->GetCodeTracer());
646 : OFStream os(tracing_scope.file());
647 :
648 : // Print the source code if available.
649 : bool print_source = code->kind() == Code::OPTIMIZED_FUNCTION;
650 : if (print_source) {
651 : Handle<SharedFunctionInfo> shared = info->shared_info();
652 : if (shared->script()->IsScript() &&
653 : !Script::cast(shared->script())->source()->IsUndefined(isolate)) {
654 : os << "--- Raw source ---\n";
655 : StringCharacterStream stream(
656 : String::cast(Script::cast(shared->script())->source()),
657 : shared->StartPosition());
658 : // fun->end_position() points to the last character in the stream. We
659 : // need to compensate by adding one to calculate the length.
660 : int source_len = shared->EndPosition() - shared->StartPosition() + 1;
661 : for (int i = 0; i < source_len; i++) {
662 : if (stream.HasMore()) {
663 : os << AsReversiblyEscapedUC16(stream.GetNext());
664 : }
665 : }
666 : os << "\n\n";
667 : }
668 : }
669 : if (info->IsOptimizing()) {
670 : os << "--- Optimized code ---\n"
671 : << "optimization_id = " << info->optimization_id() << "\n";
672 : } else {
673 : os << "--- Code ---\n";
674 : }
675 : if (print_source) {
676 : Handle<SharedFunctionInfo> shared = info->shared_info();
677 : os << "source_position = " << shared->StartPosition() << "\n";
678 : }
679 : code->Disassemble(debug_name.get(), os);
680 : os << "--- End code ---\n";
681 : }
682 : #endif // ENABLE_DISASSEMBLER
683 1589815 : }
684 :
685 2866490 : void TraceSchedule(OptimizedCompilationInfo* info, PipelineData* data,
686 : Schedule* schedule, const char* phase_name) {
687 2866490 : if (info->trace_turbo_json_enabled()) {
688 : AllowHandleDereference allow_deref;
689 20 : TurboJsonFile json_of(info, std::ios_base::app);
690 10 : json_of << "{\"name\":\"" << phase_name << "\",\"type\":\"schedule\""
691 10 : << ",\"data\":\"";
692 20 : std::stringstream schedule_stream;
693 10 : schedule_stream << *schedule;
694 : std::string schedule_string(schedule_stream.str());
695 6543 : for (const auto& c : schedule_string) {
696 13066 : json_of << AsEscapedUC16ForJSON(c);
697 : }
698 10 : json_of << "\"},\n";
699 : }
700 2866490 : if (info->trace_turbo_graph_enabled() || FLAG_trace_turbo_scheduler) {
701 : AllowHandleDereference allow_deref;
702 10 : CodeTracer::Scope tracing_scope(data->GetCodeTracer());
703 20 : OFStream os(tracing_scope.file());
704 10 : os << "-- Schedule --------------------------------------\n" << *schedule;
705 : }
706 2866490 : }
707 :
708 :
709 : class SourcePositionWrapper final : public Reducer {
710 : public:
711 : SourcePositionWrapper(Reducer* reducer, SourcePositionTable* table)
712 511128 : : reducer_(reducer), table_(table) {}
713 0 : ~SourcePositionWrapper() final = default;
714 :
715 1227 : const char* reducer_name() const override { return reducer_->reducer_name(); }
716 :
717 43604772 : Reduction Reduce(Node* node) final {
718 43604772 : SourcePosition const pos = table_->GetSourcePosition(node);
719 43604729 : SourcePositionTable::Scope position(table_, pos);
720 87209475 : return reducer_->Reduce(node);
721 : }
722 :
723 645861 : void Finalize() final { reducer_->Finalize(); }
724 :
725 : private:
726 : Reducer* const reducer_;
727 : SourcePositionTable* const table_;
728 :
729 : DISALLOW_COPY_AND_ASSIGN(SourcePositionWrapper);
730 : };
731 :
732 : class NodeOriginsWrapper final : public Reducer {
733 : public:
734 : NodeOriginsWrapper(Reducer* reducer, NodeOriginTable* table)
735 53 : : reducer_(reducer), table_(table) {}
736 0 : ~NodeOriginsWrapper() final = default;
737 :
738 1323 : const char* reducer_name() const override { return reducer_->reducer_name(); }
739 :
740 1323 : Reduction Reduce(Node* node) final {
741 1323 : NodeOriginTable::Scope position(table_, reducer_name(), node);
742 2646 : return reducer_->Reduce(node);
743 : }
744 :
745 61 : void Finalize() final { reducer_->Finalize(); }
746 :
747 : private:
748 : Reducer* const reducer_;
749 : NodeOriginTable* const table_;
750 :
751 : DISALLOW_COPY_AND_ASSIGN(NodeOriginsWrapper);
752 : };
753 :
754 21798435 : void AddReducer(PipelineData* data, GraphReducer* graph_reducer,
755 : Reducer* reducer) {
756 21798435 : if (data->info()->is_source_positions_enabled()) {
757 : void* const buffer = data->graph_zone()->New(sizeof(SourcePositionWrapper));
758 : SourcePositionWrapper* const wrapper =
759 511128 : new (buffer) SourcePositionWrapper(reducer, data->source_positions());
760 : reducer = wrapper;
761 : }
762 21798435 : if (data->info()->trace_turbo_json_enabled()) {
763 : void* const buffer = data->graph_zone()->New(sizeof(NodeOriginsWrapper));
764 : NodeOriginsWrapper* const wrapper =
765 53 : new (buffer) NodeOriginsWrapper(reducer, data->node_origins());
766 : reducer = wrapper;
767 : }
768 :
769 21798435 : graph_reducer->AddReducer(reducer);
770 21798471 : }
771 :
772 133037626 : class PipelineRunScope {
773 : public:
774 66498587 : PipelineRunScope(PipelineData* data, const char* phase_name)
775 : : phase_scope_(data->pipeline_statistics(), phase_name),
776 : zone_scope_(data->zone_stats(), ZONE_NAME),
777 66501441 : origin_scope_(data->node_origins(), phase_name) {}
778 :
779 : Zone* zone() { return zone_scope_.zone(); }
780 :
781 : private:
782 : PhaseScope phase_scope_;
783 : ZoneStats::Scope zone_scope_;
784 : NodeOriginTable::PhaseScope origin_scope_;
785 : };
786 :
787 483294 : PipelineStatistics* CreatePipelineStatistics(Handle<Script> script,
788 : OptimizedCompilationInfo* info,
789 : Isolate* isolate,
790 : ZoneStats* zone_stats) {
791 : PipelineStatistics* pipeline_statistics = nullptr;
792 :
793 : bool tracing_enabled;
794 966588 : TRACE_EVENT_CATEGORY_GROUP_ENABLED(TRACE_DISABLED_BY_DEFAULT("v8.turbofan"),
795 : &tracing_enabled);
796 483294 : if (tracing_enabled || FLAG_turbo_stats || FLAG_turbo_stats_nvp) {
797 : pipeline_statistics =
798 0 : new PipelineStatistics(info, isolate->GetTurboStatistics(), zone_stats);
799 0 : pipeline_statistics->BeginPhaseKind("V8.TFInitializing");
800 : }
801 :
802 483296 : if (info->trace_turbo_json_enabled()) {
803 2 : TurboJsonFile json_of(info, std::ios_base::trunc);
804 1 : json_of << "{\"function\" : ";
805 2 : JsonPrintFunctionSource(json_of, -1, info->GetDebugName(), script, isolate,
806 1 : info->shared_info());
807 1 : json_of << ",\n\"phases\":[";
808 : }
809 :
810 483296 : return pipeline_statistics;
811 : }
812 :
813 617024 : PipelineStatistics* CreatePipelineStatistics(
814 : wasm::WasmEngine* wasm_engine, wasm::FunctionBody function_body,
815 : const wasm::WasmModule* wasm_module, OptimizedCompilationInfo* info,
816 : ZoneStats* zone_stats) {
817 : PipelineStatistics* pipeline_statistics = nullptr;
818 :
819 : bool tracing_enabled;
820 1234027 : TRACE_EVENT_CATEGORY_GROUP_ENABLED(TRACE_DISABLED_BY_DEFAULT("v8.wasm"),
821 : &tracing_enabled);
822 617003 : if (tracing_enabled || FLAG_turbo_stats_wasm) {
823 : pipeline_statistics = new PipelineStatistics(
824 0 : info, wasm_engine->GetOrCreateTurboStatistics(), zone_stats);
825 0 : pipeline_statistics->BeginPhaseKind("V8.WasmInitializing");
826 : }
827 :
828 617142 : if (info->trace_turbo_json_enabled()) {
829 16 : TurboJsonFile json_of(info, std::ios_base::trunc);
830 8 : std::unique_ptr<char[]> function_name = info->GetDebugName();
831 8 : json_of << "{\"function\":\"" << function_name.get() << "\", \"source\":\"";
832 16 : AccountingAllocator allocator;
833 16 : std::ostringstream disassembly;
834 : std::vector<int> source_positions;
835 : wasm::PrintRawWasmCode(&allocator, function_body, wasm_module,
836 8 : wasm::kPrintLocals, disassembly, &source_positions);
837 960 : for (const auto& c : disassembly.str()) {
838 1904 : json_of << AsEscapedUC16ForJSON(c);
839 : }
840 8 : json_of << "\",\n\"sourceLineToBytecodePosition\" : [";
841 : bool insert_comma = false;
842 72 : for (auto val : source_positions) {
843 64 : if (insert_comma) {
844 56 : json_of << ", ";
845 : }
846 64 : json_of << val;
847 : insert_comma = true;
848 : }
849 8 : json_of << "],\n\"phases\":[";
850 : }
851 :
852 617142 : return pipeline_statistics;
853 : }
854 :
855 : } // namespace
856 :
857 : class PipelineCompilationJob final : public OptimizedCompilationJob {
858 : public:
859 : PipelineCompilationJob(Isolate* isolate,
860 : Handle<SharedFunctionInfo> shared_info,
861 : Handle<JSFunction> function);
862 : ~PipelineCompilationJob();
863 :
864 : protected:
865 : Status PrepareJobImpl(Isolate* isolate) final;
866 : Status ExecuteJobImpl() final;
867 : Status FinalizeJobImpl(Isolate* isolate) final;
868 :
869 : // Registers weak object to optimized code dependencies.
870 : void RegisterWeakObjectsInOptimizedCode(Handle<Code> code, Isolate* isolate);
871 :
872 : private:
873 : Zone zone_;
874 : ZoneStats zone_stats_;
875 : OptimizedCompilationInfo compilation_info_;
876 : std::unique_ptr<PipelineStatistics> pipeline_statistics_;
877 : PipelineData data_;
878 : PipelineImpl pipeline_;
879 : Linkage* linkage_;
880 :
881 : DISALLOW_COPY_AND_ASSIGN(PipelineCompilationJob);
882 : };
883 :
884 479915 : PipelineCompilationJob::PipelineCompilationJob(
885 : Isolate* isolate, Handle<SharedFunctionInfo> shared_info,
886 : Handle<JSFunction> function)
887 : // Note that the OptimizedCompilationInfo is not initialized at the time
888 : // we pass it to the CompilationJob constructor, but it is not
889 : // dereferenced there.
890 : : OptimizedCompilationJob(
891 : function->GetIsolate()->stack_guard()->real_climit(),
892 : &compilation_info_, "TurboFan"),
893 : zone_(function->GetIsolate()->allocator(), ZONE_NAME),
894 : zone_stats_(function->GetIsolate()->allocator()),
895 : compilation_info_(&zone_, function->GetIsolate(), shared_info, function),
896 : pipeline_statistics_(CreatePipelineStatistics(
897 : handle(Script::cast(shared_info->script()), isolate),
898 : compilation_info(), function->GetIsolate(), &zone_stats_)),
899 : data_(&zone_stats_, function->GetIsolate(), compilation_info(),
900 : pipeline_statistics_.get()),
901 : pipeline_(&data_),
902 4799143 : linkage_(nullptr) {
903 1439730 : TRACE_EVENT_WITH_FLOW1(
904 : TRACE_DISABLED_BY_DEFAULT("v8.compile"), "v8.optimizingCompile.start",
905 : this, TRACE_EVENT_FLAG_FLOW_OUT, "function", shared_info->TraceIDRef());
906 479909 : }
907 :
908 1919663 : PipelineCompilationJob::~PipelineCompilationJob() {
909 1439748 : TRACE_EVENT_WITH_FLOW1(TRACE_DISABLED_BY_DEFAULT("v8.compile"),
910 : "v8.optimizingCompile.end", this,
911 : TRACE_EVENT_FLAG_FLOW_IN, "compilationInfo",
912 : compilation_info()->ToTracedValue());
913 959832 : }
914 :
915 460852 : PipelineCompilationJob::Status PipelineCompilationJob::PrepareJobImpl(
916 : Isolate* isolate) {
917 1382558 : TRACE_EVENT_WITH_FLOW1(
918 : TRACE_DISABLED_BY_DEFAULT("v8.compile"), "v8.optimizingCompile.prepare",
919 : this, TRACE_EVENT_FLAG_FLOW_IN | TRACE_EVENT_FLAG_FLOW_OUT, "function",
920 : compilation_info()->shared_info()->TraceIDRef());
921 460852 : if (compilation_info()->bytecode_array()->length() >
922 : FLAG_max_optimized_bytecode_size) {
923 30 : return AbortOptimization(BailoutReason::kFunctionTooBig);
924 : }
925 :
926 460822 : if (!FLAG_always_opt) {
927 : compilation_info()->MarkAsBailoutOnUninitialized();
928 : }
929 460822 : if (FLAG_turbo_loop_peeling) {
930 : compilation_info()->MarkAsLoopPeelingEnabled();
931 : }
932 460822 : if (FLAG_turbo_inlining) {
933 : compilation_info()->MarkAsInliningEnabled();
934 : }
935 460822 : if (FLAG_inline_accessors) {
936 : compilation_info()->MarkAsAccessorInliningEnabled();
937 : }
938 :
939 : // This is the bottleneck for computing and setting poisoning level in the
940 : // optimizing compiler.
941 : PoisoningMitigationLevel load_poisoning =
942 : PoisoningMitigationLevel::kDontPoison;
943 460822 : if (FLAG_untrusted_code_mitigations) {
944 : // For full mitigations, this can be changed to
945 : // PoisoningMitigationLevel::kPoisonAll.
946 : load_poisoning = PoisoningMitigationLevel::kPoisonCriticalOnly;
947 : }
948 : compilation_info()->SetPoisoningMitigationLevel(load_poisoning);
949 :
950 460822 : if (FLAG_turbo_allocation_folding) {
951 : compilation_info()->MarkAsAllocationFoldingEnabled();
952 : }
953 :
954 460822 : if (compilation_info()->closure()->raw_feedback_cell()->map() ==
955 : ReadOnlyRoots(isolate).one_closure_cell_map()) {
956 : compilation_info()->MarkAsFunctionContextSpecializing();
957 : }
958 :
959 460822 : if (compilation_info()->is_source_positions_enabled()) {
960 : SharedFunctionInfo::EnsureSourcePositionsAvailable(
961 11368 : isolate, compilation_info()->shared_info());
962 : }
963 :
964 921646 : data_.set_start_source_position(
965 : compilation_info()->shared_info()->StartPosition());
966 :
967 : linkage_ = new (compilation_info()->zone()) Linkage(
968 921647 : Linkage::ComputeIncoming(compilation_info()->zone(), compilation_info()));
969 :
970 460823 : if (!pipeline_.CreateGraph()) {
971 0 : if (isolate->has_pending_exception()) return FAILED; // Stack overflowed.
972 0 : return AbortOptimization(BailoutReason::kGraphBuildingFailed);
973 : }
974 :
975 460823 : if (compilation_info()->is_osr()) data_.InitializeOsrHelper();
976 :
977 : // Make sure that we have generated the deopt entries code. This is in order
978 : // to avoid triggering the generation of deopt entries later during code
979 : // assembly.
980 460823 : Deoptimizer::EnsureCodeForDeoptimizationEntries(isolate);
981 :
982 460824 : return SUCCEEDED;
983 : }
984 :
985 460786 : PipelineCompilationJob::Status PipelineCompilationJob::ExecuteJobImpl() {
986 1382359 : TRACE_EVENT_WITH_FLOW1(
987 : TRACE_DISABLED_BY_DEFAULT("v8.compile"), "v8.optimizingCompile.execute",
988 : this, TRACE_EVENT_FLAG_FLOW_IN | TRACE_EVENT_FLAG_FLOW_OUT, "function",
989 : compilation_info()->shared_info()->TraceIDRef());
990 460786 : if (!pipeline_.OptimizeGraph(linkage_)) return FAILED;
991 921548 : pipeline_.AssembleCode(linkage_);
992 460776 : return SUCCEEDED;
993 : }
994 :
995 460613 : PipelineCompilationJob::Status PipelineCompilationJob::FinalizeJobImpl(
996 : Isolate* isolate) {
997 1381843 : TRACE_EVENT_WITH_FLOW1(
998 : TRACE_DISABLED_BY_DEFAULT("v8.compile"), "v8.optimizingCompile.finalize",
999 : this, TRACE_EVENT_FLAG_FLOW_IN | TRACE_EVENT_FLAG_FLOW_OUT, "function",
1000 : compilation_info()->shared_info()->TraceIDRef());
1001 460613 : MaybeHandle<Code> maybe_code = pipeline_.FinalizeCode();
1002 : Handle<Code> code;
1003 460617 : if (!maybe_code.ToHandle(&code)) {
1004 0 : if (compilation_info()->bailout_reason() == BailoutReason::kNoReason) {
1005 0 : return AbortOptimization(BailoutReason::kCodeGenerationFailed);
1006 : }
1007 : return FAILED;
1008 : }
1009 460617 : if (!pipeline_.CommitDependencies(code)) {
1010 66 : return RetryOptimization(BailoutReason::kBailedOutDueToDependencyChange);
1011 : }
1012 :
1013 : compilation_info()->SetCode(code);
1014 460551 : compilation_info()->native_context()->AddOptimizedCode(*code);
1015 460551 : RegisterWeakObjectsInOptimizedCode(code, isolate);
1016 460551 : return SUCCEEDED;
1017 : }
1018 :
1019 460551 : void PipelineCompilationJob::RegisterWeakObjectsInOptimizedCode(
1020 : Handle<Code> code, Isolate* isolate) {
1021 : DCHECK(code->is_optimized_code());
1022 : std::vector<Handle<Map>> retained_maps;
1023 : {
1024 : DisallowHeapAllocation no_gc;
1025 : int const mode_mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT);
1026 5553126 : for (RelocIterator it(*code, mode_mask); !it.done(); it.next()) {
1027 : RelocInfo::Mode mode = it.rinfo()->rmode();
1028 10185149 : if (mode == RelocInfo::EMBEDDED_OBJECT &&
1029 5092574 : code->IsWeakObjectInOptimizedCode(it.rinfo()->target_object())) {
1030 : Handle<HeapObject> object(HeapObject::cast(it.rinfo()->target_object()),
1031 : isolate);
1032 2676487 : if (object->IsMap()) {
1033 134312 : retained_maps.push_back(Handle<Map>::cast(object));
1034 : }
1035 : }
1036 : }
1037 : }
1038 :
1039 594863 : for (Handle<Map> map : retained_maps) {
1040 134312 : isolate->heap()->AddRetainedMap(map);
1041 : }
1042 :
1043 : // Additionally, gather embedded maps if we have any.
1044 460551 : for (Handle<Map> map : *data_.embedded_maps()) {
1045 0 : if (code->IsWeakObjectInOptimizedCode(*map)) {
1046 0 : isolate->heap()->AddRetainedMap(map);
1047 : }
1048 : }
1049 :
1050 460551 : code->set_can_have_weak_objects(true);
1051 460550 : }
1052 :
1053 : template <typename Phase, typename... Args>
1054 65914850 : void PipelineImpl::Run(Args&&... args) {
1055 131842563 : PipelineRunScope scope(this->data_, Phase::phase_name());
1056 : Phase phase;
1057 71746957 : phase.Run(this->data_, scope.zone(), std::forward<Args>(args)...);
1058 65935299 : }
1059 :
1060 : struct GraphBuilderPhase {
1061 : static const char* phase_name() { return "V8.TFBytecodeGraphBuilder"; }
1062 :
1063 464201 : void Run(PipelineData* data, Zone* temp_zone) {
1064 : JSTypeHintLowering::Flags flags = JSTypeHintLowering::kNoFlags;
1065 464201 : if (data->info()->is_bailout_on_uninitialized()) {
1066 : flags |= JSTypeHintLowering::kBailoutOnUninitialized;
1067 : }
1068 : CallFrequency frequency = CallFrequency(1.0f);
1069 : BytecodeGraphBuilder graph_builder(
1070 : temp_zone, data->info()->bytecode_array(), data->info()->shared_info(),
1071 : handle(data->info()->closure()->feedback_vector(), data->isolate()),
1072 : data->info()->osr_offset(), data->jsgraph(), frequency,
1073 : data->source_positions(), data->native_context(),
1074 : SourcePosition::kNotInlined, flags, true,
1075 1392606 : data->info()->is_analyze_environment_liveness());
1076 464204 : graph_builder.CreateGraph();
1077 464202 : }
1078 : };
1079 :
1080 : namespace {
1081 :
1082 26551 : Maybe<OuterContext> GetModuleContext(Handle<JSFunction> closure) {
1083 : Context current = closure->context();
1084 : size_t distance = 0;
1085 78189 : while (!current->IsNativeContext()) {
1086 25824 : if (current->IsModuleContext()) {
1087 : return Just(
1088 : OuterContext(handle(current, current->GetIsolate()), distance));
1089 : }
1090 : current = current->previous();
1091 25819 : distance++;
1092 : }
1093 : return Nothing<OuterContext>();
1094 : }
1095 :
1096 464202 : Maybe<OuterContext> ChooseSpecializationContext(
1097 : Isolate* isolate, OptimizedCompilationInfo* info) {
1098 464202 : if (info->is_function_context_specializing()) {
1099 : DCHECK(info->has_context());
1100 437651 : return Just(OuterContext(handle(info->context(), isolate), 0));
1101 : }
1102 26551 : return GetModuleContext(info->closure());
1103 : }
1104 :
1105 : } // anonymous namespace
1106 :
1107 : struct InliningPhase {
1108 : static const char* phase_name() { return "V8.TFInlining"; }
1109 :
1110 464200 : void Run(PipelineData* data, Zone* temp_zone) {
1111 : Isolate* isolate = data->isolate();
1112 : OptimizedCompilationInfo* info = data->info();
1113 : GraphReducer graph_reducer(temp_zone, data->graph(),
1114 928402 : data->jsgraph()->Dead());
1115 : DeadCodeElimination dead_code_elimination(&graph_reducer, data->graph(),
1116 464202 : data->common(), temp_zone);
1117 464204 : CheckpointElimination checkpoint_elimination(&graph_reducer);
1118 : CommonOperatorReducer common_reducer(&graph_reducer, data->graph(),
1119 : data->broker(), data->common(),
1120 464200 : data->machine(), temp_zone);
1121 : JSCallReducer call_reducer(&graph_reducer, data->jsgraph(), data->broker(),
1122 : data->info()->is_bailout_on_uninitialized()
1123 : ? JSCallReducer::kBailoutOnUninitialized
1124 : : JSCallReducer::kNoFlags,
1125 464203 : data->dependencies());
1126 : JSContextSpecialization context_specialization(
1127 : &graph_reducer, data->jsgraph(), data->broker(),
1128 : ChooseSpecializationContext(isolate, data->info()),
1129 : data->info()->is_function_context_specializing()
1130 : ? data->info()->closure()
1131 464203 : : MaybeHandle<JSFunction>());
1132 : JSNativeContextSpecialization::Flags flags =
1133 : JSNativeContextSpecialization::kNoFlags;
1134 464202 : if (data->info()->is_accessor_inlining_enabled()) {
1135 : flags |= JSNativeContextSpecialization::kAccessorInliningEnabled;
1136 : }
1137 464202 : if (data->info()->is_bailout_on_uninitialized()) {
1138 : flags |= JSNativeContextSpecialization::kBailoutOnUninitialized;
1139 : }
1140 : // Passing the OptimizedCompilationInfo's shared zone here as
1141 : // JSNativeContextSpecialization allocates out-of-heap objects
1142 : // that need to live until code generation.
1143 : JSNativeContextSpecialization native_context_specialization(
1144 : &graph_reducer, data->jsgraph(), data->broker(), flags,
1145 464202 : data->native_context(), data->dependencies(), temp_zone, info->zone());
1146 : JSInliningHeuristic inlining(&graph_reducer,
1147 : data->info()->is_inlining_enabled()
1148 : ? JSInliningHeuristic::kGeneralInlining
1149 : : JSInliningHeuristic::kRestrictedInlining,
1150 : temp_zone, data->info(), data->jsgraph(),
1151 928403 : data->broker(), data->source_positions());
1152 464204 : JSIntrinsicLowering intrinsic_lowering(&graph_reducer, data->jsgraph());
1153 464202 : AddReducer(data, &graph_reducer, &dead_code_elimination);
1154 464202 : AddReducer(data, &graph_reducer, &checkpoint_elimination);
1155 464204 : AddReducer(data, &graph_reducer, &common_reducer);
1156 464204 : AddReducer(data, &graph_reducer, &native_context_specialization);
1157 464204 : AddReducer(data, &graph_reducer, &context_specialization);
1158 464204 : AddReducer(data, &graph_reducer, &intrinsic_lowering);
1159 464203 : AddReducer(data, &graph_reducer, &call_reducer);
1160 464203 : AddReducer(data, &graph_reducer, &inlining);
1161 464204 : graph_reducer.ReduceGraph();
1162 464203 : }
1163 : };
1164 :
1165 :
1166 : struct TyperPhase {
1167 : static const char* phase_name() { return "V8.TFTyper"; }
1168 :
1169 464166 : void Run(PipelineData* data, Zone* temp_zone, Typer* typer) {
1170 : NodeVector roots(temp_zone);
1171 464166 : data->jsgraph()->GetCachedNodes(&roots);
1172 :
1173 : // Make sure we always type True and False. Needed for escape analysis.
1174 928334 : roots.push_back(data->jsgraph()->TrueConstant());
1175 928334 : roots.push_back(data->jsgraph()->FalseConstant());
1176 :
1177 : LoopVariableOptimizer induction_vars(data->jsgraph()->graph(),
1178 464167 : data->common(), temp_zone);
1179 464166 : if (FLAG_turbo_loop_variable) induction_vars.Run();
1180 464165 : typer->Run(roots, &induction_vars);
1181 464167 : }
1182 : };
1183 :
1184 : struct UntyperPhase {
1185 : static const char* phase_name() { return "V8.TFUntyper"; }
1186 :
1187 : void Run(PipelineData* data, Zone* temp_zone) {
1188 : class RemoveTypeReducer final : public Reducer {
1189 : public:
1190 0 : const char* reducer_name() const override { return "RemoveTypeReducer"; }
1191 0 : Reduction Reduce(Node* node) final {
1192 0 : if (NodeProperties::IsTyped(node)) {
1193 : NodeProperties::RemoveType(node);
1194 : return Changed(node);
1195 : }
1196 : return NoChange();
1197 : }
1198 : };
1199 :
1200 : NodeVector roots(temp_zone);
1201 : data->jsgraph()->GetCachedNodes(&roots);
1202 : for (Node* node : roots) {
1203 : NodeProperties::RemoveType(node);
1204 : }
1205 :
1206 : GraphReducer graph_reducer(temp_zone, data->graph(),
1207 : data->jsgraph()->Dead());
1208 : RemoveTypeReducer remove_type_reducer;
1209 : AddReducer(data, &graph_reducer, &remove_type_reducer);
1210 : graph_reducer.ReduceGraph();
1211 : }
1212 : };
1213 :
1214 : struct SerializeStandardObjectsPhase {
1215 : static const char* phase_name() { return "V8.TFSerializeStandardObjects"; }
1216 :
1217 : void Run(PipelineData* data, Zone* temp_zone) {
1218 464204 : data->broker()->SerializeStandardObjects();
1219 : }
1220 : };
1221 :
1222 : struct CopyMetadataForConcurrentCompilePhase {
1223 : static const char* phase_name() { return "V8.TFSerializeMetadata"; }
1224 :
1225 464287 : void Run(PipelineData* data, Zone* temp_zone) {
1226 : GraphReducer graph_reducer(temp_zone, data->graph(),
1227 928575 : data->jsgraph()->Dead());
1228 464287 : JSHeapCopyReducer heap_copy_reducer(data->broker());
1229 464287 : AddReducer(data, &graph_reducer, &heap_copy_reducer);
1230 464287 : graph_reducer.ReduceGraph();
1231 :
1232 : // Some nodes that are no longer in the graph might still be in the cache.
1233 : NodeVector cached_nodes(temp_zone);
1234 464287 : data->jsgraph()->GetCachedNodes(&cached_nodes);
1235 5855920 : for (Node* const node : cached_nodes) graph_reducer.ReduceNode(node);
1236 464288 : }
1237 : };
1238 :
1239 : // TODO(turbofan): Move all calls from CopyMetaDataForConcurrentCompilePhase
1240 : // here. Also all the calls to Serialize* methods that are currently sprinkled
1241 : // over inlining will move here as well.
1242 : struct SerializationPhase {
1243 : static const char* phase_name() { return "V8.TFSerializeBytecode"; }
1244 :
1245 84 : void Run(PipelineData* data, Zone* temp_zone) {
1246 : SerializerForBackgroundCompilation serializer(data->broker(), temp_zone,
1247 84 : data->info()->closure());
1248 84 : serializer.Run();
1249 84 : }
1250 : };
1251 :
1252 : struct TypedLoweringPhase {
1253 : static const char* phase_name() { return "V8.TFTypedLowering"; }
1254 :
1255 464166 : void Run(PipelineData* data, Zone* temp_zone) {
1256 : GraphReducer graph_reducer(temp_zone, data->graph(),
1257 928329 : data->jsgraph()->Dead());
1258 : DeadCodeElimination dead_code_elimination(&graph_reducer, data->graph(),
1259 464167 : data->common(), temp_zone);
1260 : JSCreateLowering create_lowering(&graph_reducer, data->dependencies(),
1261 : data->jsgraph(), data->broker(),
1262 : temp_zone);
1263 : JSTypedLowering typed_lowering(&graph_reducer, data->jsgraph(),
1264 464167 : data->broker(), temp_zone);
1265 : ConstantFoldingReducer constant_folding_reducer(
1266 928331 : &graph_reducer, data->jsgraph(), data->broker());
1267 : TypedOptimization typed_optimization(&graph_reducer, data->dependencies(),
1268 928334 : data->jsgraph(), data->broker());
1269 : SimplifiedOperatorReducer simple_reducer(&graph_reducer, data->jsgraph(),
1270 928333 : data->broker());
1271 464165 : CheckpointElimination checkpoint_elimination(&graph_reducer);
1272 : CommonOperatorReducer common_reducer(&graph_reducer, data->graph(),
1273 : data->broker(), data->common(),
1274 464167 : data->machine(), temp_zone);
1275 464167 : AddReducer(data, &graph_reducer, &dead_code_elimination);
1276 464167 : AddReducer(data, &graph_reducer, &create_lowering);
1277 464167 : AddReducer(data, &graph_reducer, &constant_folding_reducer);
1278 464168 : AddReducer(data, &graph_reducer, &typed_lowering);
1279 464168 : AddReducer(data, &graph_reducer, &typed_optimization);
1280 464168 : AddReducer(data, &graph_reducer, &simple_reducer);
1281 464168 : AddReducer(data, &graph_reducer, &checkpoint_elimination);
1282 464168 : AddReducer(data, &graph_reducer, &common_reducer);
1283 464168 : graph_reducer.ReduceGraph();
1284 464165 : }
1285 : };
1286 :
1287 :
1288 : struct EscapeAnalysisPhase {
1289 : static const char* phase_name() { return "V8.TFEscapeAnalysis"; }
1290 :
1291 464162 : void Run(PipelineData* data, Zone* temp_zone) {
1292 464162 : EscapeAnalysis escape_analysis(data->jsgraph(), temp_zone);
1293 : escape_analysis.ReduceGraph();
1294 928330 : GraphReducer reducer(temp_zone, data->graph(), data->jsgraph()->Dead());
1295 : EscapeAnalysisReducer escape_reducer(&reducer, data->jsgraph(),
1296 : escape_analysis.analysis_result(),
1297 928331 : temp_zone);
1298 464165 : AddReducer(data, &reducer, &escape_reducer);
1299 464168 : reducer.ReduceGraph();
1300 : // TODO(tebbi): Turn this into a debug mode check once we have confidence.
1301 464167 : escape_reducer.VerifyReplacement();
1302 464166 : }
1303 : };
1304 :
1305 : struct SimplifiedLoweringPhase {
1306 : static const char* phase_name() { return "V8.TFSimplifiedLowering"; }
1307 :
1308 464166 : void Run(PipelineData* data, Zone* temp_zone) {
1309 : SimplifiedLowering lowering(data->jsgraph(), data->broker(), temp_zone,
1310 : data->source_positions(), data->node_origins(),
1311 464166 : data->info()->GetPoisoningMitigationLevel());
1312 464167 : lowering.LowerAllNodes();
1313 464164 : }
1314 : };
1315 :
1316 : struct LoopPeelingPhase {
1317 : static const char* phase_name() { return "V8.TFLoopPeeling"; }
1318 :
1319 460763 : void Run(PipelineData* data, Zone* temp_zone) {
1320 921525 : GraphTrimmer trimmer(temp_zone, data->graph());
1321 : NodeVector roots(temp_zone);
1322 460764 : data->jsgraph()->GetCachedNodes(&roots);
1323 460764 : trimmer.TrimGraph(roots.begin(), roots.end());
1324 :
1325 : LoopTree* loop_tree =
1326 460763 : LoopFinder::BuildLoopTree(data->jsgraph()->graph(), temp_zone);
1327 : LoopPeeler(data->graph(), data->common(), loop_tree, temp_zone,
1328 : data->source_positions(), data->node_origins())
1329 460763 : .PeelInnerLoopsOfTree();
1330 460763 : }
1331 : };
1332 :
1333 : struct LoopExitEliminationPhase {
1334 : static const char* phase_name() { return "V8.TFLoopExitElimination"; }
1335 :
1336 : void Run(PipelineData* data, Zone* temp_zone) {
1337 3404 : LoopPeeler::EliminateLoopExits(data->graph(), temp_zone);
1338 : }
1339 : };
1340 :
1341 : struct GenericLoweringPhase {
1342 : static const char* phase_name() { return "V8.TFGenericLowering"; }
1343 :
1344 464167 : void Run(PipelineData* data, Zone* temp_zone) {
1345 : GraphReducer graph_reducer(temp_zone, data->graph(),
1346 928334 : data->jsgraph()->Dead());
1347 928331 : JSGenericLowering generic_lowering(data->jsgraph(), &graph_reducer);
1348 464164 : AddReducer(data, &graph_reducer, &generic_lowering);
1349 464165 : graph_reducer.ReduceGraph();
1350 464168 : }
1351 : };
1352 :
1353 : struct EarlyOptimizationPhase {
1354 : static const char* phase_name() { return "V8.TFEarlyOptimization"; }
1355 :
1356 464161 : void Run(PipelineData* data, Zone* temp_zone) {
1357 : GraphReducer graph_reducer(temp_zone, data->graph(),
1358 928325 : data->jsgraph()->Dead());
1359 : DeadCodeElimination dead_code_elimination(&graph_reducer, data->graph(),
1360 464167 : data->common(), temp_zone);
1361 : SimplifiedOperatorReducer simple_reducer(&graph_reducer, data->jsgraph(),
1362 928329 : data->broker());
1363 928325 : RedundancyElimination redundancy_elimination(&graph_reducer, temp_zone);
1364 928324 : ValueNumberingReducer value_numbering(temp_zone, data->graph()->zone());
1365 928326 : MachineOperatorReducer machine_reducer(&graph_reducer, data->jsgraph());
1366 : CommonOperatorReducer common_reducer(&graph_reducer, data->graph(),
1367 : data->broker(), data->common(),
1368 464161 : data->machine(), temp_zone);
1369 464165 : AddReducer(data, &graph_reducer, &dead_code_elimination);
1370 464164 : AddReducer(data, &graph_reducer, &simple_reducer);
1371 464166 : AddReducer(data, &graph_reducer, &redundancy_elimination);
1372 464167 : AddReducer(data, &graph_reducer, &machine_reducer);
1373 464166 : AddReducer(data, &graph_reducer, &common_reducer);
1374 464167 : AddReducer(data, &graph_reducer, &value_numbering);
1375 464167 : graph_reducer.ReduceGraph();
1376 464166 : }
1377 : };
1378 :
1379 : struct ControlFlowOptimizationPhase {
1380 : static const char* phase_name() { return "V8.TFControlFlowOptimization"; }
1381 :
1382 464165 : void Run(PipelineData* data, Zone* temp_zone) {
1383 : ControlFlowOptimizer optimizer(data->graph(), data->common(),
1384 464165 : data->machine(), temp_zone);
1385 464165 : optimizer.Optimize();
1386 464168 : }
1387 : };
1388 :
1389 : struct EffectControlLinearizationPhase {
1390 : static const char* phase_name() { return "V8.TFEffectLinearization"; }
1391 :
1392 464163 : void Run(PipelineData* data, Zone* temp_zone) {
1393 : {
1394 : // The scheduler requires the graphs to be trimmed, so trim now.
1395 : // TODO(jarin) Remove the trimming once the scheduler can handle untrimmed
1396 : // graphs.
1397 928327 : GraphTrimmer trimmer(temp_zone, data->graph());
1398 : NodeVector roots(temp_zone);
1399 464167 : data->jsgraph()->GetCachedNodes(&roots);
1400 464167 : trimmer.TrimGraph(roots.begin(), roots.end());
1401 :
1402 : // Schedule the graph without node splitting so that we can
1403 : // fix the effect and control flow for nodes with low-level side
1404 : // effects (such as changing representation to tagged or
1405 : // 'floating' allocation regions.)
1406 464167 : Schedule* schedule = Scheduler::ComputeSchedule(temp_zone, data->graph(),
1407 464167 : Scheduler::kTempSchedule);
1408 464167 : if (FLAG_turbo_verify) ScheduleVerifier::Run(schedule);
1409 : TraceSchedule(data->info(), data, schedule,
1410 464167 : "effect linearization schedule");
1411 :
1412 : EffectControlLinearizer::MaskArrayIndexEnable mask_array_index =
1413 : (data->info()->GetPoisoningMitigationLevel() !=
1414 : PoisoningMitigationLevel::kDontPoison)
1415 : ? EffectControlLinearizer::kMaskArrayIndex
1416 464163 : : EffectControlLinearizer::kDoNotMaskArrayIndex;
1417 : // Post-pass for wiring the control/effects
1418 : // - connect allocating representation changes into the control&effect
1419 : // chains and lower them,
1420 : // - get rid of the region markers,
1421 : // - introduce effect phis and rewire effects to get SSA again.
1422 : EffectControlLinearizer linearizer(
1423 : data->jsgraph(), schedule, temp_zone, data->source_positions(),
1424 464163 : data->node_origins(), mask_array_index, data->embedded_maps());
1425 464159 : linearizer.Run();
1426 : }
1427 : {
1428 : // The {EffectControlLinearizer} might leave {Dead} nodes behind, so we
1429 : // run {DeadCodeElimination} to prune these parts of the graph.
1430 : // Also, the following store-store elimination phase greatly benefits from
1431 : // doing a common operator reducer and dead code elimination just before
1432 : // it, to eliminate conditional deopts with a constant condition.
1433 : GraphReducer graph_reducer(temp_zone, data->graph(),
1434 928329 : data->jsgraph()->Dead());
1435 : DeadCodeElimination dead_code_elimination(&graph_reducer, data->graph(),
1436 464163 : data->common(), temp_zone);
1437 : CommonOperatorReducer common_reducer(&graph_reducer, data->graph(),
1438 : data->broker(), data->common(),
1439 464166 : data->machine(), temp_zone);
1440 464168 : AddReducer(data, &graph_reducer, &dead_code_elimination);
1441 464158 : AddReducer(data, &graph_reducer, &common_reducer);
1442 464167 : graph_reducer.ReduceGraph();
1443 : }
1444 464165 : }
1445 : };
1446 :
1447 : struct StoreStoreEliminationPhase {
1448 : static const char* phase_name() { return "V8.TFStoreStoreElimination"; }
1449 :
1450 464161 : void Run(PipelineData* data, Zone* temp_zone) {
1451 928321 : GraphTrimmer trimmer(temp_zone, data->graph());
1452 : NodeVector roots(temp_zone);
1453 464168 : data->jsgraph()->GetCachedNodes(&roots);
1454 464167 : trimmer.TrimGraph(roots.begin(), roots.end());
1455 :
1456 464168 : StoreStoreElimination::Run(data->jsgraph(), temp_zone);
1457 464164 : }
1458 : };
1459 :
1460 : struct LoadEliminationPhase {
1461 : static const char* phase_name() { return "V8.TFLoadElimination"; }
1462 :
1463 464154 : void Run(PipelineData* data, Zone* temp_zone) {
1464 : GraphReducer graph_reducer(temp_zone, data->graph(),
1465 928306 : data->jsgraph()->Dead());
1466 : BranchElimination branch_condition_elimination(&graph_reducer,
1467 928305 : data->jsgraph(), temp_zone);
1468 : DeadCodeElimination dead_code_elimination(&graph_reducer, data->graph(),
1469 464156 : data->common(), temp_zone);
1470 928309 : RedundancyElimination redundancy_elimination(&graph_reducer, temp_zone);
1471 : LoadElimination load_elimination(&graph_reducer, data->jsgraph(),
1472 : temp_zone);
1473 464153 : CheckpointElimination checkpoint_elimination(&graph_reducer);
1474 928308 : ValueNumberingReducer value_numbering(temp_zone, data->graph()->zone());
1475 : CommonOperatorReducer common_reducer(&graph_reducer, data->graph(),
1476 : data->broker(), data->common(),
1477 464151 : data->machine(), temp_zone);
1478 : TypedOptimization typed_optimization(&graph_reducer, data->dependencies(),
1479 928311 : data->jsgraph(), data->broker());
1480 : ConstantFoldingReducer constant_folding_reducer(
1481 928309 : &graph_reducer, data->jsgraph(), data->broker());
1482 : TypeNarrowingReducer type_narrowing_reducer(&graph_reducer, data->jsgraph(),
1483 928309 : data->broker());
1484 464156 : AddReducer(data, &graph_reducer, &branch_condition_elimination);
1485 464155 : AddReducer(data, &graph_reducer, &dead_code_elimination);
1486 464156 : AddReducer(data, &graph_reducer, &redundancy_elimination);
1487 464157 : AddReducer(data, &graph_reducer, &load_elimination);
1488 464156 : AddReducer(data, &graph_reducer, &type_narrowing_reducer);
1489 464157 : AddReducer(data, &graph_reducer, &constant_folding_reducer);
1490 464157 : AddReducer(data, &graph_reducer, &typed_optimization);
1491 464156 : AddReducer(data, &graph_reducer, &checkpoint_elimination);
1492 464156 : AddReducer(data, &graph_reducer, &common_reducer);
1493 464156 : AddReducer(data, &graph_reducer, &value_numbering);
1494 464156 : graph_reducer.ReduceGraph();
1495 464153 : }
1496 : };
1497 :
1498 : struct MemoryOptimizationPhase {
1499 : static const char* phase_name() { return "V8.TFMemoryOptimization"; }
1500 :
1501 531434 : void Run(PipelineData* data, Zone* temp_zone) {
1502 : // The memory optimizer requires the graphs to be trimmed, so trim now.
1503 1062869 : GraphTrimmer trimmer(temp_zone, data->graph());
1504 : NodeVector roots(temp_zone);
1505 531436 : data->jsgraph()->GetCachedNodes(&roots);
1506 531436 : trimmer.TrimGraph(roots.begin(), roots.end());
1507 :
1508 : // Optimize allocations and load/store operations.
1509 : MemoryOptimizer optimizer(
1510 : data->jsgraph(), temp_zone, data->info()->GetPoisoningMitigationLevel(),
1511 : data->info()->is_allocation_folding_enabled()
1512 : ? MemoryOptimizer::AllocationFolding::kDoAllocationFolding
1513 1062870 : : MemoryOptimizer::AllocationFolding::kDontAllocationFolding);
1514 531433 : optimizer.Optimize();
1515 531434 : }
1516 : };
1517 :
1518 : struct LateOptimizationPhase {
1519 : static const char* phase_name() { return "V8.TFLateOptimization"; }
1520 :
1521 464163 : void Run(PipelineData* data, Zone* temp_zone) {
1522 : GraphReducer graph_reducer(temp_zone, data->graph(),
1523 928331 : data->jsgraph()->Dead());
1524 : BranchElimination branch_condition_elimination(&graph_reducer,
1525 928330 : data->jsgraph(), temp_zone);
1526 : DeadCodeElimination dead_code_elimination(&graph_reducer, data->graph(),
1527 464166 : data->common(), temp_zone);
1528 928329 : ValueNumberingReducer value_numbering(temp_zone, data->graph()->zone());
1529 928332 : MachineOperatorReducer machine_reducer(&graph_reducer, data->jsgraph());
1530 : CommonOperatorReducer common_reducer(&graph_reducer, data->graph(),
1531 : data->broker(), data->common(),
1532 464165 : data->machine(), temp_zone);
1533 : SelectLowering select_lowering(data->jsgraph()->graph(),
1534 928333 : data->jsgraph()->common());
1535 464163 : AddReducer(data, &graph_reducer, &branch_condition_elimination);
1536 464165 : AddReducer(data, &graph_reducer, &dead_code_elimination);
1537 464166 : AddReducer(data, &graph_reducer, &machine_reducer);
1538 464168 : AddReducer(data, &graph_reducer, &common_reducer);
1539 464167 : AddReducer(data, &graph_reducer, &select_lowering);
1540 464168 : AddReducer(data, &graph_reducer, &value_numbering);
1541 464167 : graph_reducer.ReduceGraph();
1542 464166 : }
1543 : };
1544 :
1545 : struct MachineOperatorOptimizationPhase {
1546 : static const char* phase_name() { return "V8.TFMachineOperatorOptimization"; }
1547 :
1548 464168 : void Run(PipelineData* data, Zone* temp_zone) {
1549 : GraphReducer graph_reducer(temp_zone, data->graph(),
1550 928335 : data->jsgraph()->Dead());
1551 928335 : ValueNumberingReducer value_numbering(temp_zone, data->graph()->zone());
1552 928334 : MachineOperatorReducer machine_reducer(&graph_reducer, data->jsgraph());
1553 :
1554 464166 : AddReducer(data, &graph_reducer, &machine_reducer);
1555 464166 : AddReducer(data, &graph_reducer, &value_numbering);
1556 464168 : graph_reducer.ReduceGraph();
1557 464168 : }
1558 : };
1559 :
1560 : struct CsaOptimizationPhase {
1561 : static const char* phase_name() { return "V8.CSAOptimization"; }
1562 :
1563 67268 : void Run(PipelineData* data, Zone* temp_zone) {
1564 : GraphReducer graph_reducer(temp_zone, data->graph(),
1565 134536 : data->jsgraph()->Dead());
1566 : BranchElimination branch_condition_elimination(&graph_reducer,
1567 134536 : data->jsgraph(), temp_zone);
1568 : DeadCodeElimination dead_code_elimination(&graph_reducer, data->graph(),
1569 67268 : data->common(), temp_zone);
1570 134536 : MachineOperatorReducer machine_reducer(&graph_reducer, data->jsgraph());
1571 : CommonOperatorReducer common_reducer(&graph_reducer, data->graph(),
1572 : data->broker(), data->common(),
1573 67268 : data->machine(), temp_zone);
1574 67268 : AddReducer(data, &graph_reducer, &branch_condition_elimination);
1575 67268 : AddReducer(data, &graph_reducer, &dead_code_elimination);
1576 67268 : AddReducer(data, &graph_reducer, &machine_reducer);
1577 67268 : AddReducer(data, &graph_reducer, &common_reducer);
1578 67268 : graph_reducer.ReduceGraph();
1579 67268 : }
1580 : };
1581 :
1582 : struct EarlyGraphTrimmingPhase {
1583 : static const char* phase_name() { return "V8.TFEarlyTrimming"; }
1584 464201 : void Run(PipelineData* data, Zone* temp_zone) {
1585 928405 : GraphTrimmer trimmer(temp_zone, data->graph());
1586 : NodeVector roots(temp_zone);
1587 464204 : data->jsgraph()->GetCachedNodes(&roots);
1588 464202 : trimmer.TrimGraph(roots.begin(), roots.end());
1589 464204 : }
1590 : };
1591 :
1592 :
1593 : struct LateGraphTrimmingPhase {
1594 : static const char* phase_name() { return "V8.TFLateGraphTrimming"; }
1595 2402502 : void Run(PipelineData* data, Zone* temp_zone) {
1596 4806232 : GraphTrimmer trimmer(temp_zone, data->graph());
1597 : NodeVector roots(temp_zone);
1598 2404895 : if (data->jsgraph()) {
1599 1408700 : data->jsgraph()->GetCachedNodes(&roots);
1600 : }
1601 2404896 : trimmer.TrimGraph(roots.begin(), roots.end());
1602 2403806 : }
1603 : };
1604 :
1605 :
1606 : struct ComputeSchedulePhase {
1607 : static const char* phase_name() { return "V8.TFScheduling"; }
1608 :
1609 2402797 : void Run(PipelineData* data, Zone* temp_zone) {
1610 4807366 : Schedule* schedule = Scheduler::ComputeSchedule(
1611 : temp_zone, data->graph(), data->info()->is_splitting_enabled()
1612 : ? Scheduler::kSplitNodes
1613 2402797 : : Scheduler::kNoFlags);
1614 2404569 : if (FLAG_turbo_verify) ScheduleVerifier::Run(schedule);
1615 : data->set_schedule(schedule);
1616 2404569 : }
1617 : };
1618 :
1619 : struct InstructionRangesAsJSON {
1620 : const InstructionSequence* sequence;
1621 : const ZoneVector<std::pair<int, int>>* instr_origins;
1622 : };
1623 :
1624 9 : std::ostream& operator<<(std::ostream& out, const InstructionRangesAsJSON& s) {
1625 9 : const int max = static_cast<int>(s.sequence->LastInstructionIndex());
1626 :
1627 9 : out << ", \"nodeIdToInstructionRange\": {";
1628 : bool need_comma = false;
1629 540 : for (size_t i = 0; i < s.instr_origins->size(); ++i) {
1630 174 : std::pair<int, int> offset = (*s.instr_origins)[i];
1631 174 : if (offset.first == -1) continue;
1632 141 : const int first = max - offset.first + 1;
1633 141 : const int second = max - offset.second + 1;
1634 141 : if (need_comma) out << ", ";
1635 282 : out << "\"" << i << "\": [" << first << ", " << second << "]";
1636 : need_comma = true;
1637 : }
1638 9 : out << "}";
1639 9 : out << ", \"blockIdtoInstructionRange\": {";
1640 : need_comma = false;
1641 9 : for (auto block : s.sequence->instruction_blocks()) {
1642 21 : if (need_comma) out << ", ";
1643 84 : out << "\"" << block->rpo_number() << "\": [" << block->code_start() << ", "
1644 21 : << block->code_end() << "]";
1645 : need_comma = true;
1646 : }
1647 9 : out << "}";
1648 9 : return out;
1649 : }
1650 :
1651 : struct InstructionSelectionPhase {
1652 : static const char* phase_name() { return "V8.TFSelectInstructions"; }
1653 :
1654 2641178 : void Run(PipelineData* data, Zone* temp_zone, Linkage* linkage) {
1655 : InstructionSelector selector(
1656 : temp_zone, data->graph()->NodeCount(), linkage, data->sequence(),
1657 : data->schedule(), data->source_positions(), data->frame(),
1658 : data->info()->switch_jump_table_enabled()
1659 : ? InstructionSelector::kEnableSwitchJumpTable
1660 : : InstructionSelector::kDisableSwitchJumpTable,
1661 : data->info()->is_source_positions_enabled()
1662 : ? InstructionSelector::kAllSourcePositions
1663 : : InstructionSelector::kCallSourcePositions,
1664 : InstructionSelector::SupportedFeatures(),
1665 : FLAG_turbo_instruction_scheduling
1666 : ? InstructionSelector::kEnableScheduling
1667 : : InstructionSelector::kDisableScheduling,
1668 1647275 : !data->isolate() || data->isolate()->serializer_enabled() ||
1669 : data->isolate()->IsGeneratingEmbeddedBuiltins()
1670 : ? InstructionSelector::kDisableRootsRelativeAddressing
1671 : : InstructionSelector::kEnableRootsRelativeAddressing,
1672 : data->info()->GetPoisoningMitigationLevel(),
1673 : data->info()->trace_turbo_json_enabled()
1674 : ? InstructionSelector::kEnableTraceTurboJson
1675 12091317 : : InstructionSelector::kDisableTraceTurboJson);
1676 2643164 : if (!selector.SelectInstructions()) {
1677 : data->set_compilation_failed();
1678 : }
1679 2643165 : if (data->info()->trace_turbo_json_enabled()) {
1680 18 : TurboJsonFile json_of(data->info(), std::ios_base::app);
1681 9 : json_of << "{\"name\":\"" << phase_name()
1682 : << "\",\"type\":\"instructions\""
1683 : << InstructionRangesAsJSON{data->sequence(),
1684 27 : &selector.instr_origins()}
1685 9 : << "},\n";
1686 : }
1687 2643165 : }
1688 : };
1689 :
1690 :
1691 : struct MeetRegisterConstraintsPhase {
1692 : static const char* phase_name() { return "V8.TFMeetRegisterConstraints"; }
1693 :
1694 : void Run(PipelineData* data, Zone* temp_zone) {
1695 2642018 : ConstraintBuilder builder(data->register_allocation_data());
1696 2640967 : builder.MeetRegisterConstraints();
1697 : }
1698 : };
1699 :
1700 :
1701 : struct ResolvePhisPhase {
1702 : static const char* phase_name() { return "V8.TFResolvePhis"; }
1703 :
1704 : void Run(PipelineData* data, Zone* temp_zone) {
1705 2643028 : ConstraintBuilder builder(data->register_allocation_data());
1706 2643010 : builder.ResolvePhis();
1707 : }
1708 : };
1709 :
1710 :
1711 : struct BuildLiveRangesPhase {
1712 : static const char* phase_name() { return "V8.TFBuildLiveRanges"; }
1713 :
1714 2640808 : void Run(PipelineData* data, Zone* temp_zone) {
1715 2640808 : LiveRangeBuilder builder(data->register_allocation_data(), temp_zone);
1716 2640830 : builder.BuildLiveRanges();
1717 2643112 : }
1718 : };
1719 :
1720 : struct BuildBundlesPhase {
1721 : static const char* phase_name() { return "V8.TFBuildLiveRangeBundles"; }
1722 :
1723 : void Run(PipelineData* data, Zone* temp_zone) {
1724 : BundleBuilder builder(data->register_allocation_data());
1725 2643176 : builder.BuildBundles();
1726 : }
1727 : };
1728 :
1729 : struct SplinterLiveRangesPhase {
1730 : static const char* phase_name() { return "V8.TFSplinterLiveRanges"; }
1731 :
1732 : void Run(PipelineData* data, Zone* temp_zone) {
1733 : LiveRangeSeparator live_range_splinterer(data->register_allocation_data(),
1734 : temp_zone);
1735 2643046 : live_range_splinterer.Splinter();
1736 : }
1737 : };
1738 :
1739 :
1740 : template <typename RegAllocator>
1741 : struct AllocateGeneralRegistersPhase {
1742 : static const char* phase_name() { return "V8.TFAllocateGeneralRegisters"; }
1743 :
1744 2642969 : void Run(PipelineData* data, Zone* temp_zone) {
1745 : RegAllocator allocator(data->register_allocation_data(), GENERAL_REGISTERS,
1746 2642969 : temp_zone);
1747 2643465 : allocator.AllocateRegisters();
1748 2643074 : }
1749 : };
1750 :
1751 : template <typename RegAllocator>
1752 : struct AllocateFPRegistersPhase {
1753 : static const char* phase_name() { return "V8.TFAllocateFPRegisters"; }
1754 :
1755 295682 : void Run(PipelineData* data, Zone* temp_zone) {
1756 : RegAllocator allocator(data->register_allocation_data(), FP_REGISTERS,
1757 295682 : temp_zone);
1758 295724 : allocator.AllocateRegisters();
1759 295712 : }
1760 : };
1761 :
1762 :
1763 : struct MergeSplintersPhase {
1764 : static const char* phase_name() { return "V8.TFMergeSplinteredRanges"; }
1765 : void Run(PipelineData* pipeline_data, Zone* temp_zone) {
1766 : RegisterAllocationData* data = pipeline_data->register_allocation_data();
1767 : LiveRangeMerger live_range_merger(data, temp_zone);
1768 2642914 : live_range_merger.Merge();
1769 : }
1770 : };
1771 :
1772 :
1773 : struct LocateSpillSlotsPhase {
1774 : static const char* phase_name() { return "V8.TFLocateSpillSlots"; }
1775 :
1776 : void Run(PipelineData* data, Zone* temp_zone) {
1777 2643161 : SpillSlotLocator locator(data->register_allocation_data());
1778 2642775 : locator.LocateSpillSlots();
1779 : }
1780 : };
1781 :
1782 : struct DecideSpillingModePhase {
1783 : static const char* phase_name() { return "V8.TFDecideSpillingMode"; }
1784 :
1785 : void Run(PipelineData* data, Zone* temp_zone) {
1786 2643159 : OperandAssigner assigner(data->register_allocation_data());
1787 2641175 : assigner.DecideSpillingMode();
1788 : }
1789 : };
1790 :
1791 : struct AssignSpillSlotsPhase {
1792 : static const char* phase_name() { return "V8.TFAssignSpillSlots"; }
1793 :
1794 : void Run(PipelineData* data, Zone* temp_zone) {
1795 2643192 : OperandAssigner assigner(data->register_allocation_data());
1796 2643217 : assigner.AssignSpillSlots();
1797 : }
1798 : };
1799 :
1800 :
1801 : struct CommitAssignmentPhase {
1802 : static const char* phase_name() { return "V8.TFCommitAssignment"; }
1803 :
1804 : void Run(PipelineData* data, Zone* temp_zone) {
1805 2643153 : OperandAssigner assigner(data->register_allocation_data());
1806 2643244 : assigner.CommitAssignment();
1807 : }
1808 : };
1809 :
1810 :
1811 : struct PopulateReferenceMapsPhase {
1812 : static const char* phase_name() { return "V8.TFPopulatePointerMaps"; }
1813 :
1814 : void Run(PipelineData* data, Zone* temp_zone) {
1815 2643223 : ReferenceMapPopulator populator(data->register_allocation_data());
1816 2643276 : populator.PopulateReferenceMaps();
1817 : }
1818 : };
1819 :
1820 :
1821 : struct ConnectRangesPhase {
1822 : static const char* phase_name() { return "V8.TFConnectRanges"; }
1823 :
1824 : void Run(PipelineData* data, Zone* temp_zone) {
1825 2643205 : LiveRangeConnector connector(data->register_allocation_data());
1826 2643256 : connector.ConnectRanges(temp_zone);
1827 : }
1828 : };
1829 :
1830 :
1831 : struct ResolveControlFlowPhase {
1832 : static const char* phase_name() { return "V8.TFResolveControlFlow"; }
1833 :
1834 : void Run(PipelineData* data, Zone* temp_zone) {
1835 2643239 : LiveRangeConnector connector(data->register_allocation_data());
1836 2643275 : connector.ResolveControlFlow(temp_zone);
1837 : }
1838 : };
1839 :
1840 :
1841 : struct OptimizeMovesPhase {
1842 : static const char* phase_name() { return "V8.TFOptimizeMoves"; }
1843 :
1844 : void Run(PipelineData* data, Zone* temp_zone) {
1845 2643117 : MoveOptimizer move_optimizer(temp_zone, data->sequence());
1846 2642577 : move_optimizer.Run();
1847 : }
1848 : };
1849 :
1850 :
1851 : struct FrameElisionPhase {
1852 : static const char* phase_name() { return "V8.TFFrameElision"; }
1853 :
1854 : void Run(PipelineData* data, Zone* temp_zone) {
1855 2643012 : FrameElider(data->sequence()).Run();
1856 : }
1857 : };
1858 :
1859 :
1860 : struct JumpThreadingPhase {
1861 : static const char* phase_name() { return "V8.TFJumpThreading"; }
1862 :
1863 2641196 : void Run(PipelineData* data, Zone* temp_zone, bool frame_at_start) {
1864 : ZoneVector<RpoNumber> result(temp_zone);
1865 2641196 : if (JumpThreading::ComputeForwarding(temp_zone, result, data->sequence(),
1866 : frame_at_start)) {
1867 732057 : JumpThreading::ApplyForwarding(temp_zone, result, data->sequence());
1868 : }
1869 2643380 : }
1870 : };
1871 :
1872 : struct AssembleCodePhase {
1873 : static const char* phase_name() { return "V8.TFAssembleCode"; }
1874 :
1875 : void Run(PipelineData* data, Zone* temp_zone) {
1876 2642037 : data->code_generator()->AssembleCode();
1877 : }
1878 : };
1879 :
1880 : struct FinalizeCodePhase {
1881 : static const char* phase_name() { return "V8.TFFinalizeCode"; }
1882 :
1883 : void Run(PipelineData* data, Zone* temp_zone) {
1884 1589816 : data->set_code(data->code_generator()->FinalizeCode());
1885 : }
1886 : };
1887 :
1888 :
1889 : struct PrintGraphPhase {
1890 : static const char* phase_name() { return nullptr; }
1891 :
1892 42 : void Run(PipelineData* data, Zone* temp_zone, const char* phase) {
1893 : OptimizedCompilationInfo* info = data->info();
1894 : Graph* graph = data->graph();
1895 :
1896 42 : if (info->trace_turbo_json_enabled()) { // Print JSON.
1897 : AllowHandleDereference allow_deref;
1898 :
1899 84 : TurboJsonFile json_of(info, std::ios_base::app);
1900 42 : json_of << "{\"name\":\"" << phase << "\",\"type\":\"graph\",\"data\":"
1901 84 : << AsJSON(*graph, data->source_positions(), data->node_origins())
1902 42 : << "},\n";
1903 : }
1904 :
1905 42 : if (info->trace_turbo_scheduled_enabled()) {
1906 0 : AccountingAllocator allocator;
1907 : Schedule* schedule = data->schedule();
1908 0 : if (schedule == nullptr) {
1909 0 : schedule = Scheduler::ComputeSchedule(temp_zone, data->graph(),
1910 0 : Scheduler::kNoFlags);
1911 : }
1912 :
1913 : AllowHandleDereference allow_deref;
1914 0 : CodeTracer::Scope tracing_scope(data->GetCodeTracer());
1915 0 : OFStream os(tracing_scope.file());
1916 0 : os << "-- Graph after " << phase << " -- " << std::endl;
1917 0 : os << AsScheduledGraph(schedule);
1918 42 : } else if (info->trace_turbo_graph_enabled()) { // Simple textual RPO.
1919 : AllowHandleDereference allow_deref;
1920 42 : CodeTracer::Scope tracing_scope(data->GetCodeTracer());
1921 84 : OFStream os(tracing_scope.file());
1922 42 : os << "-- Graph after " << phase << " -- " << std::endl;
1923 42 : os << AsRPO(*graph);
1924 : }
1925 42 : }
1926 : };
1927 :
1928 :
1929 : struct VerifyGraphPhase {
1930 : static const char* phase_name() { return nullptr; }
1931 :
1932 : void Run(PipelineData* data, Zone* temp_zone, const bool untyped,
1933 : bool values_only = false) {
1934 : Verifier::CodeType code_type;
1935 67556 : switch (data->info()->code_kind()) {
1936 : case Code::WASM_FUNCTION:
1937 : case Code::WASM_TO_JS_FUNCTION:
1938 : case Code::JS_TO_WASM_FUNCTION:
1939 : case Code::WASM_INTERPRETER_ENTRY:
1940 : case Code::C_WASM_ENTRY:
1941 : code_type = Verifier::kWasm;
1942 : break;
1943 : default:
1944 : code_type = Verifier::kDefault;
1945 : }
1946 67556 : Verifier::Run(data->graph(), !untyped ? Verifier::TYPED : Verifier::UNTYPED,
1947 : values_only ? Verifier::kValuesOnly : Verifier::kAll,
1948 67556 : code_type);
1949 : }
1950 : };
1951 :
1952 13101367 : void PipelineImpl::RunPrintAndVerify(const char* phase, bool untyped) {
1953 26203278 : if (info()->trace_turbo_json_enabled() ||
1954 : info()->trace_turbo_graph_enabled()) {
1955 42 : Run<PrintGraphPhase>(phase);
1956 : }
1957 13102482 : if (FLAG_turbo_verify) {
1958 288 : Run<VerifyGraphPhase>(untyped);
1959 : }
1960 13102482 : }
1961 :
1962 464200 : bool PipelineImpl::CreateGraph() {
1963 464200 : PipelineData* data = this->data_;
1964 :
1965 : data->BeginPhaseKind("V8.TFGraphCreation");
1966 :
1967 928398 : if (info()->trace_turbo_json_enabled() ||
1968 : info()->trace_turbo_graph_enabled()) {
1969 1 : CodeTracer::Scope tracing_scope(data->GetCodeTracer());
1970 2 : OFStream os(tracing_scope.file());
1971 : os << "---------------------------------------------------\n"
1972 3 : << "Begin compiling method " << info()->GetDebugName().get()
1973 : << " using TurboFan" << std::endl;
1974 : }
1975 464198 : if (info()->trace_turbo_json_enabled()) {
1976 2 : TurboCfgFile tcf(isolate());
1977 1 : tcf << AsC1VCompilation(info());
1978 : }
1979 :
1980 464198 : data->source_positions()->AddDecorator();
1981 464200 : if (data->info()->trace_turbo_json_enabled()) {
1982 1 : data->node_origins()->AddDecorator();
1983 : }
1984 :
1985 464201 : if (FLAG_concurrent_inlining) {
1986 84 : data->broker()->StartSerializing();
1987 84 : Run<SerializeStandardObjectsPhase>();
1988 84 : Run<SerializationPhase>();
1989 : } else {
1990 464117 : data->broker()->SetNativeContextRef();
1991 : }
1992 :
1993 464202 : Run<GraphBuilderPhase>();
1994 464203 : RunPrintAndVerify(GraphBuilderPhase::phase_name(), true);
1995 :
1996 464202 : if (FLAG_concurrent_inlining) {
1997 84 : Run<CopyMetadataForConcurrentCompilePhase>();
1998 : }
1999 :
2000 : // Perform function context specialization and inlining (if enabled).
2001 464202 : Run<InliningPhase>();
2002 464203 : RunPrintAndVerify(InliningPhase::phase_name(), true);
2003 :
2004 : // Remove dead->live edges from the graph.
2005 464204 : Run<EarlyGraphTrimmingPhase>();
2006 464203 : RunPrintAndVerify(EarlyGraphTrimmingPhase::phase_name(), true);
2007 :
2008 : // Determine the Typer operation flags.
2009 : {
2010 1301126 : if (is_sloppy(info()->shared_info()->language_mode()) &&
2011 836926 : info()->shared_info()->IsUserJavaScript()) {
2012 : // Sloppy mode functions always have an Object for this.
2013 : data->AddTyperFlag(Typer::kThisIsReceiver);
2014 : }
2015 464200 : if (IsClassConstructor(info()->shared_info()->kind())) {
2016 : // Class constructors cannot be [[Call]]ed.
2017 : data->AddTyperFlag(Typer::kNewTargetIsReceiver);
2018 : }
2019 : }
2020 :
2021 : // Run the type-sensitive lowerings and optimizations on the graph.
2022 : {
2023 464200 : if (FLAG_concurrent_inlining) {
2024 : // TODO(neis): Remove CopyMetadataForConcurrentCompilePhase call once
2025 : // brokerization of JSNativeContextSpecialization is complete.
2026 84 : Run<CopyMetadataForConcurrentCompilePhase>();
2027 84 : data->broker()->StopSerializing();
2028 : } else {
2029 464116 : data->broker()->StartSerializing();
2030 464117 : Run<SerializeStandardObjectsPhase>();
2031 464120 : Run<CopyMetadataForConcurrentCompilePhase>();
2032 464119 : data->broker()->StopSerializing();
2033 : }
2034 : }
2035 :
2036 : data->EndPhaseKind();
2037 :
2038 464203 : return true;
2039 : }
2040 :
2041 464166 : bool PipelineImpl::OptimizeGraph(Linkage* linkage) {
2042 464166 : PipelineData* data = this->data_;
2043 :
2044 : data->BeginPhaseKind("V8.TFLowering");
2045 :
2046 : // Type the graph and keep the Typer running such that new nodes get
2047 : // automatically typed when they are created.
2048 464166 : Run<TyperPhase>(data->CreateTyper());
2049 464166 : RunPrintAndVerify(TyperPhase::phase_name());
2050 464166 : Run<TypedLoweringPhase>();
2051 464168 : RunPrintAndVerify(TypedLoweringPhase::phase_name());
2052 :
2053 464168 : if (data->info()->is_loop_peeling_enabled()) {
2054 460764 : Run<LoopPeelingPhase>();
2055 460762 : RunPrintAndVerify(LoopPeelingPhase::phase_name(), true);
2056 : } else {
2057 3404 : Run<LoopExitEliminationPhase>();
2058 3404 : RunPrintAndVerify(LoopExitEliminationPhase::phase_name(), true);
2059 : }
2060 :
2061 464166 : if (FLAG_turbo_load_elimination) {
2062 464155 : Run<LoadEliminationPhase>();
2063 464157 : RunPrintAndVerify(LoadEliminationPhase::phase_name());
2064 : }
2065 464168 : data->DeleteTyper();
2066 :
2067 464168 : if (FLAG_turbo_escape) {
2068 464168 : Run<EscapeAnalysisPhase>();
2069 464167 : if (data->compilation_failed()) {
2070 : info()->AbortOptimization(
2071 0 : BailoutReason::kCyclicObjectStateDetectedInEscapeAnalysis);
2072 : data->EndPhaseKind();
2073 : return false;
2074 : }
2075 464167 : RunPrintAndVerify(EscapeAnalysisPhase::phase_name());
2076 : }
2077 :
2078 : // Perform simplified lowering. This has to run w/o the Typer decorator,
2079 : // because we cannot compute meaningful types anyways, and the computed types
2080 : // might even conflict with the representation/truncation logic.
2081 464165 : Run<SimplifiedLoweringPhase>();
2082 464167 : RunPrintAndVerify(SimplifiedLoweringPhase::phase_name(), true);
2083 :
2084 : // From now on it is invalid to look at types on the nodes, because the types
2085 : // on the nodes might not make sense after representation selection due to the
2086 : // way we handle truncations; if we'd want to look at types afterwards we'd
2087 : // essentially need to re-type (large portions of) the graph.
2088 :
2089 : // In order to catch bugs related to type access after this point, we now
2090 : // remove the types from the nodes (currently only in Debug builds).
2091 : #ifdef DEBUG
2092 : Run<UntyperPhase>();
2093 : RunPrintAndVerify(UntyperPhase::phase_name(), true);
2094 : #endif
2095 :
2096 : // Run generic lowering pass.
2097 464166 : Run<GenericLoweringPhase>();
2098 464167 : RunPrintAndVerify(GenericLoweringPhase::phase_name(), true);
2099 :
2100 : data->BeginPhaseKind("V8.TFBlockBuilding");
2101 :
2102 : // Run early optimization pass.
2103 464167 : Run<EarlyOptimizationPhase>();
2104 464165 : RunPrintAndVerify(EarlyOptimizationPhase::phase_name(), true);
2105 :
2106 464165 : Run<EffectControlLinearizationPhase>();
2107 464167 : RunPrintAndVerify(EffectControlLinearizationPhase::phase_name(), true);
2108 :
2109 464167 : if (FLAG_turbo_store_elimination) {
2110 464167 : Run<StoreStoreEliminationPhase>();
2111 464167 : RunPrintAndVerify(StoreStoreEliminationPhase::phase_name(), true);
2112 : }
2113 :
2114 : // Optimize control flow.
2115 464163 : if (FLAG_turbo_cf_optimization) {
2116 464163 : Run<ControlFlowOptimizationPhase>();
2117 464167 : RunPrintAndVerify(ControlFlowOptimizationPhase::phase_name(), true);
2118 : }
2119 :
2120 464166 : Run<LateOptimizationPhase>();
2121 464167 : RunPrintAndVerify(LateOptimizationPhase::phase_name(), true);
2122 :
2123 : // Optimize memory access and allocation operations.
2124 464167 : Run<MemoryOptimizationPhase>();
2125 464168 : RunPrintAndVerify(MemoryOptimizationPhase::phase_name(), true);
2126 :
2127 : // Run value numbering and machine operator reducer to optimize load/store
2128 : // address computation (in particular, reuse the address computation whenever
2129 : // possible).
2130 464168 : Run<MachineOperatorOptimizationPhase>();
2131 464166 : RunPrintAndVerify(MachineOperatorOptimizationPhase::phase_name(), true);
2132 :
2133 464166 : data->source_positions()->RemoveDecorator();
2134 464166 : if (data->info()->trace_turbo_json_enabled()) {
2135 1 : data->node_origins()->RemoveDecorator();
2136 : }
2137 :
2138 464166 : ComputeScheduledGraph();
2139 :
2140 464165 : return SelectInstructions(linkage);
2141 : }
2142 :
2143 67268 : MaybeHandle<Code> Pipeline::GenerateCodeForCodeStub(
2144 : Isolate* isolate, CallDescriptor* call_descriptor, Graph* graph,
2145 : SourcePositionTable* source_positions, Code::Kind kind,
2146 : const char* debug_name, int32_t builtin_index,
2147 : PoisoningMitigationLevel poisoning_level, const AssemblerOptions& options) {
2148 134536 : OptimizedCompilationInfo info(CStrVector(debug_name), graph->zone(), kind);
2149 : info.set_builtin_index(builtin_index);
2150 :
2151 67268 : if (poisoning_level != PoisoningMitigationLevel::kDontPoison) {
2152 : info.SetPoisoningMitigationLevel(poisoning_level);
2153 : }
2154 :
2155 : // Construct a pipeline for scheduling and code generation.
2156 134536 : ZoneStats zone_stats(isolate->allocator());
2157 67268 : NodeOriginTable node_origins(graph);
2158 : JumpOptimizationInfo jump_opt;
2159 : bool should_optimize_jumps =
2160 67268 : isolate->serializer_enabled() && FLAG_turbo_rewrite_far_jumps;
2161 : PipelineData data(&zone_stats, &info, isolate, graph, nullptr,
2162 : source_positions, &node_origins,
2163 134536 : should_optimize_jumps ? &jump_opt : nullptr, options);
2164 : data.set_verify_graph(FLAG_verify_csa);
2165 134536 : std::unique_ptr<PipelineStatistics> pipeline_statistics;
2166 67268 : if (FLAG_turbo_stats || FLAG_turbo_stats_nvp) {
2167 0 : pipeline_statistics.reset(new PipelineStatistics(
2168 0 : &info, isolate->GetTurboStatistics(), &zone_stats));
2169 0 : pipeline_statistics->BeginPhaseKind("V8.TFStubCodegen");
2170 : }
2171 :
2172 : PipelineImpl pipeline(&data);
2173 :
2174 134536 : if (info.trace_turbo_json_enabled() || info.trace_turbo_graph_enabled()) {
2175 0 : CodeTracer::Scope tracing_scope(data.GetCodeTracer());
2176 0 : OFStream os(tracing_scope.file());
2177 : os << "---------------------------------------------------\n"
2178 0 : << "Begin compiling " << debug_name << " using TurboFan" << std::endl;
2179 0 : if (info.trace_turbo_json_enabled()) {
2180 0 : TurboJsonFile json_of(&info, std::ios_base::trunc);
2181 0 : json_of << "{\"function\" : ";
2182 0 : JsonPrintFunctionSource(json_of, -1, info.GetDebugName(),
2183 : Handle<Script>(), isolate,
2184 0 : Handle<SharedFunctionInfo>());
2185 0 : json_of << ",\n\"phases\":[";
2186 : }
2187 0 : pipeline.Run<PrintGraphPhase>("V8.TFMachineCode");
2188 : }
2189 :
2190 : // Optimize memory access and allocation operations.
2191 67268 : pipeline.Run<MemoryOptimizationPhase>();
2192 67268 : pipeline.RunPrintAndVerify(MemoryOptimizationPhase::phase_name(), true);
2193 :
2194 67268 : pipeline.Run<CsaOptimizationPhase>();
2195 67268 : pipeline.RunPrintAndVerify(CsaOptimizationPhase::phase_name(), true);
2196 :
2197 67268 : pipeline.Run<VerifyGraphPhase>(true);
2198 67268 : pipeline.ComputeScheduledGraph();
2199 : DCHECK_NOT_NULL(data.schedule());
2200 :
2201 : // First run code generation on a copy of the pipeline, in order to be able to
2202 : // repeat it for jump optimization. The first run has to happen on a temporary
2203 : // pipeline to avoid deletion of zones on the main pipeline.
2204 : PipelineData second_data(&zone_stats, &info, isolate, data.graph(),
2205 : data.schedule(), data.source_positions(),
2206 : data.node_origins(), data.jump_optimization_info(),
2207 134536 : options);
2208 : second_data.set_verify_graph(FLAG_verify_csa);
2209 : PipelineImpl second_pipeline(&second_data);
2210 67268 : second_pipeline.SelectInstructionsAndAssemble(call_descriptor);
2211 :
2212 : Handle<Code> code;
2213 67268 : if (jump_opt.is_optimizable()) {
2214 : jump_opt.set_optimizing();
2215 114576 : code = pipeline.GenerateCode(call_descriptor).ToHandleChecked();
2216 : } else {
2217 19960 : code = second_pipeline.FinalizeCode().ToHandleChecked();
2218 : }
2219 :
2220 134536 : return code;
2221 : }
2222 :
2223 : // static
2224 376290 : wasm::WasmCompilationResult Pipeline::GenerateCodeForWasmNativeStub(
2225 : wasm::WasmEngine* wasm_engine, CallDescriptor* call_descriptor,
2226 : MachineGraph* mcgraph, Code::Kind kind, int wasm_kind,
2227 : const char* debug_name, const AssemblerOptions& options,
2228 : SourcePositionTable* source_positions) {
2229 : Graph* graph = mcgraph->graph();
2230 752594 : OptimizedCompilationInfo info(CStrVector(debug_name), graph->zone(), kind);
2231 : // Construct a pipeline for scheduling and code generation.
2232 752599 : ZoneStats zone_stats(wasm_engine->allocator());
2233 376287 : NodeOriginTable* node_positions = new (graph->zone()) NodeOriginTable(graph);
2234 : // {instruction_buffer} must live longer than {PipelineData}, since
2235 : // {PipelineData} will reference the {instruction_buffer} via the
2236 : // {AssemblerBuffer} of the {Assembler} contained in the {CodeGenerator}.
2237 : std::unique_ptr<wasm::WasmInstructionBuffer> instruction_buffer =
2238 752592 : wasm::WasmInstructionBuffer::New();
2239 : PipelineData data(&zone_stats, wasm_engine, &info, mcgraph, nullptr,
2240 752587 : source_positions, node_positions, options);
2241 752596 : std::unique_ptr<PipelineStatistics> pipeline_statistics;
2242 376305 : if (FLAG_turbo_stats || FLAG_turbo_stats_nvp) {
2243 1 : pipeline_statistics.reset(new PipelineStatistics(
2244 0 : &info, wasm_engine->GetOrCreateTurboStatistics(), &zone_stats));
2245 0 : pipeline_statistics->BeginPhaseKind("V8.WasmStubCodegen");
2246 : }
2247 :
2248 : PipelineImpl pipeline(&data);
2249 :
2250 752609 : if (info.trace_turbo_json_enabled() || info.trace_turbo_graph_enabled()) {
2251 0 : CodeTracer::Scope tracing_scope(data.GetCodeTracer());
2252 0 : OFStream os(tracing_scope.file());
2253 : os << "---------------------------------------------------\n"
2254 0 : << "Begin compiling method " << info.GetDebugName().get()
2255 : << " using TurboFan" << std::endl;
2256 : }
2257 :
2258 376304 : if (info.trace_turbo_graph_enabled()) { // Simple textual RPO.
2259 0 : StdoutStream{} << "-- wasm stub " << Code::Kind2String(kind) << " graph -- "
2260 : << std::endl
2261 0 : << AsRPO(*graph);
2262 : }
2263 :
2264 376304 : if (info.trace_turbo_json_enabled()) {
2265 0 : TurboJsonFile json_of(&info, std::ios_base::trunc);
2266 0 : json_of << "{\"function\":\"" << info.GetDebugName().get()
2267 0 : << "\", \"source\":\"\",\n\"phases\":[";
2268 : }
2269 :
2270 376304 : pipeline.RunPrintAndVerify("V8.WasmNativeStubMachineCode", true);
2271 376301 : pipeline.ComputeScheduledGraph();
2272 :
2273 : Linkage linkage(call_descriptor);
2274 376288 : CHECK(pipeline.SelectInstructions(&linkage));
2275 752596 : pipeline.AssembleCode(&linkage, instruction_buffer->CreateView());
2276 :
2277 : CodeGenerator* code_generator = pipeline.code_generator();
2278 : wasm::WasmCompilationResult result;
2279 376296 : code_generator->tasm()->GetCode(
2280 : nullptr, &result.code_desc, code_generator->safepoint_table_builder(),
2281 376296 : static_cast<int>(code_generator->GetHandlerTableOffset()));
2282 752580 : result.instr_buffer = instruction_buffer->ReleaseBuffer();
2283 752578 : result.source_positions = code_generator->GetSourcePositionTable();
2284 752583 : result.protected_instructions = code_generator->GetProtectedInstructions();
2285 376295 : result.frame_slot_count = code_generator->frame()->GetTotalFrameSlotCount();
2286 376295 : result.tagged_parameter_slots = call_descriptor->GetTaggedParameterSlots();
2287 376291 : result.result_tier = wasm::ExecutionTier::kTurbofan;
2288 :
2289 : DCHECK(result.succeeded());
2290 :
2291 376291 : if (info.trace_turbo_json_enabled()) {
2292 0 : TurboJsonFile json_of(&info, std::ios_base::app);
2293 0 : json_of << "{\"name\":\"disassembly\",\"type\":\"disassembly\",\"data\":\"";
2294 : #ifdef ENABLE_DISASSEMBLER
2295 : std::stringstream disassembler_stream;
2296 : Disassembler::Decode(
2297 : nullptr, &disassembler_stream, result.code_desc.buffer,
2298 : result.code_desc.buffer + result.code_desc.safepoint_table_offset,
2299 : CodeReference(&result.code_desc));
2300 : for (auto const c : disassembler_stream.str()) {
2301 : json_of << AsEscapedUC16ForJSON(c);
2302 : }
2303 : #endif // ENABLE_DISASSEMBLER
2304 0 : json_of << "\"}\n]";
2305 0 : json_of << "\n}";
2306 : }
2307 :
2308 752583 : if (info.trace_turbo_json_enabled() || info.trace_turbo_graph_enabled()) {
2309 0 : CodeTracer::Scope tracing_scope(data.GetCodeTracer());
2310 0 : OFStream os(tracing_scope.file());
2311 : os << "---------------------------------------------------\n"
2312 0 : << "Finished compiling method " << info.GetDebugName().get()
2313 : << " using TurboFan" << std::endl;
2314 : }
2315 :
2316 376294 : return result;
2317 : }
2318 :
2319 : // static
2320 142681 : MaybeHandle<Code> Pipeline::GenerateCodeForWasmHeapStub(
2321 : Isolate* isolate, CallDescriptor* call_descriptor, Graph* graph,
2322 : Code::Kind kind, const char* debug_name, const AssemblerOptions& options,
2323 : SourcePositionTable* source_positions) {
2324 285362 : OptimizedCompilationInfo info(CStrVector(debug_name), graph->zone(), kind);
2325 : // Construct a pipeline for scheduling and code generation.
2326 285361 : ZoneStats zone_stats(isolate->allocator());
2327 142681 : NodeOriginTable* node_positions = new (graph->zone()) NodeOriginTable(graph);
2328 : PipelineData data(&zone_stats, &info, isolate, graph, nullptr,
2329 285360 : source_positions, node_positions, nullptr, options);
2330 285360 : std::unique_ptr<PipelineStatistics> pipeline_statistics;
2331 142681 : if (FLAG_turbo_stats || FLAG_turbo_stats_nvp) {
2332 0 : pipeline_statistics.reset(new PipelineStatistics(
2333 0 : &info, isolate->GetTurboStatistics(), &zone_stats));
2334 0 : pipeline_statistics->BeginPhaseKind("V8.WasmStubCodegen");
2335 : }
2336 :
2337 : PipelineImpl pipeline(&data);
2338 :
2339 285362 : if (info.trace_turbo_json_enabled() ||
2340 : info.trace_turbo_graph_enabled()) {
2341 0 : CodeTracer::Scope tracing_scope(data.GetCodeTracer());
2342 0 : OFStream os(tracing_scope.file());
2343 : os << "---------------------------------------------------\n"
2344 0 : << "Begin compiling method " << info.GetDebugName().get()
2345 : << " using TurboFan" << std::endl;
2346 : }
2347 :
2348 142681 : if (info.trace_turbo_graph_enabled()) { // Simple textual RPO.
2349 0 : StdoutStream{} << "-- wasm stub " << Code::Kind2String(kind) << " graph -- "
2350 : << std::endl
2351 0 : << AsRPO(*graph);
2352 : }
2353 :
2354 142681 : if (info.trace_turbo_json_enabled()) {
2355 0 : TurboJsonFile json_of(&info, std::ios_base::trunc);
2356 0 : json_of << "{\"function\":\"" << info.GetDebugName().get()
2357 0 : << "\", \"source\":\"\",\n\"phases\":[";
2358 : }
2359 :
2360 142681 : pipeline.RunPrintAndVerify("V8.WasmMachineCode", true);
2361 142681 : pipeline.ComputeScheduledGraph();
2362 :
2363 : Handle<Code> code;
2364 428040 : if (pipeline.GenerateCode(call_descriptor).ToHandle(&code) &&
2365 : pipeline.CommitDependencies(code)) {
2366 142679 : return code;
2367 : }
2368 0 : return MaybeHandle<Code>();
2369 : }
2370 :
2371 : // static
2372 3380 : MaybeHandle<Code> Pipeline::GenerateCodeForTesting(
2373 : OptimizedCompilationInfo* info, Isolate* isolate,
2374 : JSHeapBroker** out_broker) {
2375 6760 : ZoneStats zone_stats(isolate->allocator());
2376 : std::unique_ptr<PipelineStatistics> pipeline_statistics(
2377 : CreatePipelineStatistics(Handle<Script>::null(), info, isolate,
2378 6760 : &zone_stats));
2379 6760 : PipelineData data(&zone_stats, isolate, info, pipeline_statistics.get());
2380 3380 : if (out_broker != nullptr) {
2381 64 : *out_broker = data.broker();
2382 : }
2383 :
2384 : PipelineImpl pipeline(&data);
2385 :
2386 3380 : Linkage linkage(Linkage::ComputeIncoming(data.instruction_zone(), info));
2387 3380 : Deoptimizer::EnsureCodeForDeoptimizationEntries(isolate);
2388 :
2389 3380 : if (!pipeline.CreateGraph()) return MaybeHandle<Code>();
2390 3380 : if (!pipeline.OptimizeGraph(&linkage)) return MaybeHandle<Code>();
2391 6760 : pipeline.AssembleCode(&linkage);
2392 : Handle<Code> code;
2393 10140 : if (pipeline.FinalizeCode(out_broker == nullptr).ToHandle(&code) &&
2394 : pipeline.CommitDependencies(code)) {
2395 3380 : return code;
2396 : }
2397 0 : return MaybeHandle<Code>();
2398 : }
2399 :
2400 : // static
2401 915870 : MaybeHandle<Code> Pipeline::GenerateCodeForTesting(
2402 : OptimizedCompilationInfo* info, Isolate* isolate,
2403 : CallDescriptor* call_descriptor, Graph* graph,
2404 : const AssemblerOptions& options, Schedule* schedule) {
2405 : // Construct a pipeline for scheduling and code generation.
2406 1831740 : ZoneStats zone_stats(isolate->allocator());
2407 915870 : NodeOriginTable* node_positions = new (info->zone()) NodeOriginTable(graph);
2408 : PipelineData data(&zone_stats, info, isolate, graph, schedule, nullptr,
2409 1831740 : node_positions, nullptr, options);
2410 1831740 : std::unique_ptr<PipelineStatistics> pipeline_statistics;
2411 915870 : if (FLAG_turbo_stats || FLAG_turbo_stats_nvp) {
2412 0 : pipeline_statistics.reset(new PipelineStatistics(
2413 0 : info, isolate->GetTurboStatistics(), &zone_stats));
2414 0 : pipeline_statistics->BeginPhaseKind("V8.TFTestCodegen");
2415 : }
2416 :
2417 : PipelineImpl pipeline(&data);
2418 :
2419 915870 : if (info->trace_turbo_json_enabled()) {
2420 0 : TurboJsonFile json_of(info, std::ios_base::trunc);
2421 0 : json_of << "{\"function\":\"" << info->GetDebugName().get()
2422 0 : << "\", \"source\":\"\",\n\"phases\":[";
2423 : }
2424 : // TODO(rossberg): Should this really be untyped?
2425 915870 : pipeline.RunPrintAndVerify("V8.TFMachineCode", true);
2426 :
2427 : // Ensure we have a schedule.
2428 915870 : if (data.schedule() == nullptr) {
2429 734584 : pipeline.ComputeScheduledGraph();
2430 : }
2431 :
2432 : Handle<Code> code;
2433 2747610 : if (pipeline.GenerateCode(call_descriptor).ToHandle(&code) &&
2434 : pipeline.CommitDependencies(code)) {
2435 915870 : return code;
2436 : }
2437 0 : return MaybeHandle<Code>();
2438 : }
2439 :
2440 : // static
2441 479915 : OptimizedCompilationJob* Pipeline::NewCompilationJob(
2442 : Isolate* isolate, Handle<JSFunction> function, bool has_script) {
2443 : Handle<SharedFunctionInfo> shared =
2444 : handle(function->shared(), function->GetIsolate());
2445 479915 : return new PipelineCompilationJob(isolate, shared, function);
2446 : }
2447 :
2448 : // static
2449 617161 : void Pipeline::GenerateCodeForWasmFunction(
2450 : OptimizedCompilationInfo* info, wasm::WasmEngine* wasm_engine,
2451 : MachineGraph* mcgraph, CallDescriptor* call_descriptor,
2452 : SourcePositionTable* source_positions, NodeOriginTable* node_origins,
2453 : wasm::FunctionBody function_body, const wasm::WasmModule* module,
2454 : int function_index) {
2455 1236124 : ZoneStats zone_stats(wasm_engine->allocator());
2456 : std::unique_ptr<PipelineStatistics> pipeline_statistics(
2457 : CreatePipelineStatistics(wasm_engine, function_body, module, info,
2458 1236881 : &zone_stats));
2459 : // {instruction_buffer} must live longer than {PipelineData}, since
2460 : // {PipelineData} will reference the {instruction_buffer} via the
2461 : // {AssemblerBuffer} of the {Assembler} contained in the {CodeGenerator}.
2462 : std::unique_ptr<wasm::WasmInstructionBuffer> instruction_buffer =
2463 1237023 : wasm::WasmInstructionBuffer::New();
2464 : PipelineData data(&zone_stats, wasm_engine, info, mcgraph,
2465 : pipeline_statistics.get(), source_positions, node_origins,
2466 1854433 : WasmAssemblerOptions());
2467 :
2468 : PipelineImpl pipeline(&data);
2469 :
2470 1239193 : if (data.info()->trace_turbo_json_enabled() ||
2471 : data.info()->trace_turbo_graph_enabled()) {
2472 8 : CodeTracer::Scope tracing_scope(data.GetCodeTracer());
2473 16 : OFStream os(tracing_scope.file());
2474 : os << "---------------------------------------------------\n"
2475 24 : << "Begin compiling method " << data.info()->GetDebugName().get()
2476 : << " using TurboFan" << std::endl;
2477 : }
2478 :
2479 619604 : pipeline.RunPrintAndVerify("V8.WasmMachineCode", true);
2480 :
2481 : data.BeginPhaseKind("V8.WasmOptimization");
2482 619336 : const bool is_asm_js = module->origin == wasm::kAsmJsOrigin;
2483 619336 : if (FLAG_turbo_splitting && !is_asm_js) {
2484 : data.info()->MarkAsSplittingEnabled();
2485 : }
2486 619336 : if (FLAG_wasm_opt || is_asm_js) {
2487 16352 : PipelineRunScope scope(&data, "V8.WasmFullOptimization");
2488 : GraphReducer graph_reducer(scope.zone(), data.graph(),
2489 24642 : data.mcgraph()->Dead());
2490 : DeadCodeElimination dead_code_elimination(&graph_reducer, data.graph(),
2491 8213 : data.common(), scope.zone());
2492 16427 : ValueNumberingReducer value_numbering(scope.zone(), data.graph()->zone());
2493 : const bool allow_signalling_nan = is_asm_js;
2494 : MachineOperatorReducer machine_reducer(&graph_reducer, data.mcgraph(),
2495 16427 : allow_signalling_nan);
2496 : CommonOperatorReducer common_reducer(&graph_reducer, data.graph(),
2497 : data.broker(), data.common(),
2498 8213 : data.machine(), scope.zone());
2499 8213 : AddReducer(&data, &graph_reducer, &dead_code_elimination);
2500 8213 : AddReducer(&data, &graph_reducer, &machine_reducer);
2501 8214 : AddReducer(&data, &graph_reducer, &common_reducer);
2502 8214 : AddReducer(&data, &graph_reducer, &value_numbering);
2503 16428 : graph_reducer.ReduceGraph();
2504 : } else {
2505 1222842 : PipelineRunScope scope(&data, "V8.WasmBaseOptimization");
2506 : GraphReducer graph_reducer(scope.zone(), data.graph(),
2507 1833680 : data.mcgraph()->Dead());
2508 1221064 : ValueNumberingReducer value_numbering(scope.zone(), data.graph()->zone());
2509 608869 : AddReducer(&data, &graph_reducer, &value_numbering);
2510 608905 : graph_reducer.ReduceGraph();
2511 : }
2512 619003 : pipeline.RunPrintAndVerify("V8.WasmOptimization", true);
2513 :
2514 619019 : if (data.node_origins()) {
2515 8 : data.node_origins()->RemoveDecorator();
2516 : }
2517 :
2518 619019 : pipeline.ComputeScheduledGraph();
2519 :
2520 : Linkage linkage(call_descriptor);
2521 617250 : if (!pipeline.SelectInstructions(&linkage)) return;
2522 1236023 : pipeline.AssembleCode(&linkage, instruction_buffer->CreateView());
2523 :
2524 1234695 : auto result = base::make_unique<wasm::WasmCompilationResult>();
2525 : CodeGenerator* code_generator = pipeline.code_generator();
2526 619787 : code_generator->tasm()->GetCode(
2527 : nullptr, &result->code_desc, code_generator->safepoint_table_builder(),
2528 619787 : static_cast<int>(code_generator->GetHandlerTableOffset()));
2529 :
2530 1236477 : result->instr_buffer = instruction_buffer->ReleaseBuffer();
2531 617777 : result->frame_slot_count = code_generator->frame()->GetTotalFrameSlotCount();
2532 617777 : result->tagged_parameter_slots = call_descriptor->GetTaggedParameterSlots();
2533 1234146 : result->source_positions = code_generator->GetSourcePositionTable();
2534 1234261 : result->protected_instructions = code_generator->GetProtectedInstructions();
2535 617131 : result->result_tier = wasm::ExecutionTier::kTurbofan;
2536 :
2537 617131 : if (data.info()->trace_turbo_json_enabled()) {
2538 16 : TurboJsonFile json_of(data.info(), std::ios_base::app);
2539 8 : json_of << "{\"name\":\"disassembly\",\"type\":\"disassembly\",\"data\":\"";
2540 : #ifdef ENABLE_DISASSEMBLER
2541 : std::stringstream disassembler_stream;
2542 : Disassembler::Decode(
2543 : nullptr, &disassembler_stream, result->code_desc.buffer,
2544 : result->code_desc.buffer + result->code_desc.safepoint_table_offset,
2545 : CodeReference(&result->code_desc));
2546 : for (auto const c : disassembler_stream.str()) {
2547 : json_of << AsEscapedUC16ForJSON(c);
2548 : }
2549 : #endif // ENABLE_DISASSEMBLER
2550 8 : json_of << "\"}\n]";
2551 8 : json_of << "\n}";
2552 : }
2553 :
2554 1234148 : if (data.info()->trace_turbo_json_enabled() ||
2555 : data.info()->trace_turbo_graph_enabled()) {
2556 8 : CodeTracer::Scope tracing_scope(data.GetCodeTracer());
2557 16 : OFStream os(tracing_scope.file());
2558 : os << "---------------------------------------------------\n"
2559 24 : << "Finished compiling method " << data.info()->GetDebugName().get()
2560 : << " using TurboFan" << std::endl;
2561 : }
2562 :
2563 : DCHECK(result->succeeded());
2564 617131 : info->SetWasmCompilationResult(std::move(result));
2565 : }
2566 :
2567 42 : bool Pipeline::AllocateRegistersForTesting(const RegisterConfiguration* config,
2568 : InstructionSequence* sequence,
2569 : bool run_verifier) {
2570 : OptimizedCompilationInfo info(ArrayVector("testing"), sequence->zone(),
2571 84 : Code::STUB);
2572 84 : ZoneStats zone_stats(sequence->isolate()->allocator());
2573 84 : PipelineData data(&zone_stats, &info, sequence->isolate(), sequence);
2574 42 : data.InitializeFrameData(nullptr);
2575 : PipelineImpl pipeline(&data);
2576 42 : pipeline.AllocateRegisters(config, nullptr, run_verifier);
2577 84 : return !data.compilation_failed();
2578 : }
2579 :
2580 2403764 : void PipelineImpl::ComputeScheduledGraph() {
2581 2403764 : PipelineData* data = this->data_;
2582 :
2583 : // We should only schedule the graph if it is not scheduled yet.
2584 : DCHECK_NULL(data->schedule());
2585 :
2586 2403764 : Run<LateGraphTrimmingPhase>();
2587 2404342 : RunPrintAndVerify(LateGraphTrimmingPhase::phase_name(), true);
2588 :
2589 2404332 : Run<ComputeSchedulePhase>();
2590 2403182 : TraceSchedule(data->info(), data, data->schedule(), "schedule");
2591 2402168 : }
2592 :
2593 2640677 : bool PipelineImpl::SelectInstructions(Linkage* linkage) {
2594 2640677 : auto call_descriptor = linkage->GetIncomingDescriptor();
2595 2640677 : PipelineData* data = this->data_;
2596 :
2597 : // We should have a scheduled graph.
2598 : DCHECK_NOT_NULL(data->graph());
2599 : DCHECK_NOT_NULL(data->schedule());
2600 :
2601 2640677 : if (FLAG_turbo_profiling) {
2602 8 : data->set_profiler_data(BasicBlockInstrumentor::Instrument(
2603 : info(), data->graph(), data->schedule(), data->isolate()));
2604 : }
2605 :
2606 : bool verify_stub_graph = data->verify_graph();
2607 : // Jump optimization runs instruction selection twice, but the instruction
2608 : // selector mutates nodes like swapping the inputs of a load, which can
2609 : // violate the machine graph verification rules. So we skip the second
2610 : // verification on a graph that already verified before.
2611 : auto jump_opt = data->jump_optimization_info();
2612 2640677 : if (jump_opt && jump_opt->is_optimizing()) {
2613 : verify_stub_graph = false;
2614 : }
2615 7921952 : if (verify_stub_graph ||
2616 2640598 : (FLAG_turbo_verify_machine_graph != nullptr &&
2617 0 : (!strcmp(FLAG_turbo_verify_machine_graph, "*") ||
2618 0 : !strcmp(FLAG_turbo_verify_machine_graph, data->debug_name())))) {
2619 0 : if (FLAG_trace_verify_csa) {
2620 : AllowHandleDereference allow_deref;
2621 0 : CodeTracer::Scope tracing_scope(data->GetCodeTracer());
2622 0 : OFStream os(tracing_scope.file());
2623 : os << "--------------------------------------------------\n"
2624 0 : << "--- Verifying " << data->debug_name() << " generated by TurboFan\n"
2625 : << "--------------------------------------------------\n"
2626 0 : << *data->schedule()
2627 : << "--------------------------------------------------\n"
2628 0 : << "--- End of " << data->debug_name() << " generated by TurboFan\n"
2629 0 : << "--------------------------------------------------\n";
2630 : }
2631 0 : Zone temp_zone(data->allocator(), ZONE_NAME);
2632 0 : MachineGraphVerifier::Run(
2633 : data->graph(), data->schedule(), linkage,
2634 : data->info()->IsNotOptimizedFunctionOrWasmFunction(),
2635 0 : data->debug_name(), &temp_zone);
2636 : }
2637 :
2638 2640677 : data->InitializeInstructionSequence(call_descriptor);
2639 :
2640 2641192 : data->InitializeFrameData(call_descriptor);
2641 : // Select and schedule instructions covering the scheduled graph.
2642 2640992 : Run<InstructionSelectionPhase>(linkage);
2643 2642100 : if (data->compilation_failed()) {
2644 9 : info()->AbortOptimization(BailoutReason::kCodeGenerationFailed);
2645 : data->EndPhaseKind();
2646 : return false;
2647 : }
2648 :
2649 2642091 : if (info()->trace_turbo_json_enabled() && !data->MayHaveUnverifiableGraph()) {
2650 : AllowHandleDereference allow_deref;
2651 18 : TurboCfgFile tcf(isolate());
2652 : tcf << AsC1V("CodeGen", data->schedule(), data->source_positions(),
2653 9 : data->sequence());
2654 : }
2655 :
2656 2642091 : if (info()->trace_turbo_json_enabled()) {
2657 18 : std::ostringstream source_position_output;
2658 : // Output source position information before the graph is deleted.
2659 9 : if (data_->source_positions() != nullptr) {
2660 9 : data_->source_positions()->PrintJson(source_position_output);
2661 : } else {
2662 0 : source_position_output << "{}";
2663 : }
2664 9 : source_position_output << ",\n\"NodeOrigins\" : ";
2665 9 : data_->node_origins()->PrintJson(source_position_output);
2666 18 : data_->set_source_position_output(source_position_output.str());
2667 : }
2668 :
2669 2642091 : data->DeleteGraphZone();
2670 :
2671 : data->BeginPhaseKind("V8.TFRegisterAllocation");
2672 :
2673 2642308 : bool run_verifier = FLAG_turbo_verify_allocation;
2674 :
2675 : // Allocate registers.
2676 2642308 : if (call_descriptor->HasRestrictedAllocatableRegisters()) {
2677 : RegList registers = call_descriptor->AllocatableRegisters();
2678 : DCHECK_LT(0, NumRegs(registers));
2679 : std::unique_ptr<const RegisterConfiguration> config;
2680 336 : config.reset(RegisterConfiguration::RestrictGeneralRegisters(registers));
2681 336 : AllocateRegisters(config.get(), call_descriptor, run_verifier);
2682 2641972 : } else if (data->info()->GetPoisoningMitigationLevel() !=
2683 : PoisoningMitigationLevel::kDontPoison) {
2684 : #ifdef V8_TARGET_ARCH_IA32
2685 : FATAL("Poisoning is not supported on ia32.");
2686 : #else
2687 0 : AllocateRegisters(RegisterConfiguration::Poisoning(), call_descriptor,
2688 0 : run_verifier);
2689 : #endif // V8_TARGET_ARCH_IA32
2690 : } else {
2691 2641972 : AllocateRegisters(RegisterConfiguration::Default(), call_descriptor,
2692 2640679 : run_verifier);
2693 : }
2694 :
2695 : // Verify the instruction sequence has the same hash in two stages.
2696 2642429 : VerifyGeneratedCodeIsIdempotent();
2697 :
2698 2641758 : Run<FrameElisionPhase>();
2699 2642973 : if (data->compilation_failed()) {
2700 : info()->AbortOptimization(
2701 0 : BailoutReason::kNotEnoughVirtualRegistersRegalloc);
2702 : data->EndPhaseKind();
2703 : return false;
2704 : }
2705 :
2706 : // TODO(mtrofin): move this off to the register allocator.
2707 : bool generate_frame_at_start =
2708 2642973 : data_->sequence()->instruction_blocks().front()->must_construct_frame();
2709 : // Optimimize jumps.
2710 2642973 : if (FLAG_turbo_jt) {
2711 2643090 : Run<JumpThreadingPhase>(generate_frame_at_start);
2712 : }
2713 :
2714 : data->EndPhaseKind();
2715 :
2716 : return true;
2717 : }
2718 :
2719 2641644 : void PipelineImpl::VerifyGeneratedCodeIsIdempotent() {
2720 2641644 : PipelineData* data = this->data_;
2721 : JumpOptimizationInfo* jump_opt = data->jump_optimization_info();
2722 2641644 : if (jump_opt == nullptr) return;
2723 :
2724 : InstructionSequence* code = data->sequence();
2725 : int instruction_blocks = code->InstructionBlockCount();
2726 : int virtual_registers = code->VirtualRegisterCount();
2727 : size_t hash_code = base::hash_combine(instruction_blocks, virtual_registers);
2728 18283338 : for (auto instr : *code) {
2729 : hash_code = base::hash_combine(hash_code, instr->opcode(),
2730 : instr->InputCount(), instr->OutputCount());
2731 : }
2732 25085524 : for (int i = 0; i < virtual_registers; i++) {
2733 12481274 : hash_code = base::hash_combine(hash_code, code->GetRepresentation(i));
2734 : }
2735 122976 : if (jump_opt->is_collecting()) {
2736 : jump_opt->set_hash_code(hash_code);
2737 : } else {
2738 57288 : CHECK_EQ(hash_code, jump_opt->hash_code());
2739 : }
2740 : }
2741 :
2742 : struct InstructionStartsAsJSON {
2743 : const ZoneVector<int>* instr_starts;
2744 : };
2745 :
2746 9 : std::ostream& operator<<(std::ostream& out, const InstructionStartsAsJSON& s) {
2747 9 : out << ", \"instructionOffsetToPCOffset\": {";
2748 : bool need_comma = false;
2749 198 : for (size_t i = 0; i < s.instr_starts->size(); ++i) {
2750 60 : if (need_comma) out << ", ";
2751 120 : int offset = (*s.instr_starts)[i];
2752 60 : out << "\"" << i << "\":" << offset;
2753 : need_comma = true;
2754 : }
2755 9 : out << "}";
2756 9 : return out;
2757 : }
2758 :
2759 2640460 : void PipelineImpl::AssembleCode(Linkage* linkage,
2760 : std::unique_ptr<AssemblerBuffer> buffer) {
2761 2640460 : PipelineData* data = this->data_;
2762 : data->BeginPhaseKind("V8.TFCodeGeneration");
2763 5281081 : data->InitializeCodeGenerator(linkage, std::move(buffer));
2764 :
2765 2640621 : Run<AssembleCodePhase>();
2766 2642828 : if (data->info()->trace_turbo_json_enabled()) {
2767 18 : TurboJsonFile json_of(data->info(), std::ios_base::app);
2768 : json_of << "{\"name\":\"code generation\""
2769 : << ", \"type\":\"instructions\""
2770 18 : << InstructionStartsAsJSON{&data->code_generator()->instr_starts()};
2771 9 : json_of << "},\n";
2772 : }
2773 2642828 : data->DeleteInstructionZone();
2774 : data->EndPhaseKind();
2775 2642712 : }
2776 :
2777 : struct BlockStartsAsJSON {
2778 : const ZoneVector<int>* block_starts;
2779 : };
2780 :
2781 1 : std::ostream& operator<<(std::ostream& out, const BlockStartsAsJSON& s) {
2782 1 : out << ", \"blockIdToOffset\": {";
2783 : bool need_comma = false;
2784 17 : for (size_t i = 0; i < s.block_starts->size(); ++i) {
2785 5 : if (need_comma) out << ", ";
2786 10 : int offset = (*s.block_starts)[i];
2787 5 : out << "\"" << i << "\":" << offset;
2788 : need_comma = true;
2789 : }
2790 1 : out << "},";
2791 1 : return out;
2792 : }
2793 :
2794 1589802 : MaybeHandle<Code> PipelineImpl::FinalizeCode(bool retire_broker) {
2795 1589802 : PipelineData* data = this->data_;
2796 1589802 : if (data->broker() && retire_broker) {
2797 463931 : data->broker()->Retire();
2798 : }
2799 1589796 : Run<FinalizeCodePhase>();
2800 :
2801 : MaybeHandle<Code> maybe_code = data->code();
2802 : Handle<Code> code;
2803 1589816 : if (!maybe_code.ToHandle(&code)) {
2804 0 : return maybe_code;
2805 : }
2806 :
2807 : if (data->profiler_data()) {
2808 : #ifdef ENABLE_DISASSEMBLER
2809 : std::ostringstream os;
2810 : code->Disassemble(nullptr, os);
2811 : data->profiler_data()->SetCode(&os);
2812 : #endif // ENABLE_DISASSEMBLER
2813 : }
2814 :
2815 : info()->SetCode(code);
2816 1589816 : PrintCode(isolate(), code, info());
2817 :
2818 1589814 : if (info()->trace_turbo_json_enabled()) {
2819 2 : TurboJsonFile json_of(info(), std::ios_base::app);
2820 :
2821 : json_of << "{\"name\":\"disassembly\",\"type\":\"disassembly\""
2822 3 : << BlockStartsAsJSON{&data->code_generator()->block_starts()}
2823 1 : << "\"data\":\"";
2824 : #ifdef ENABLE_DISASSEMBLER
2825 : std::stringstream disassembly_stream;
2826 : code->Disassemble(nullptr, disassembly_stream);
2827 : std::string disassembly_string(disassembly_stream.str());
2828 : for (const auto& c : disassembly_string) {
2829 : json_of << AsEscapedUC16ForJSON(c);
2830 : }
2831 : #endif // ENABLE_DISASSEMBLER
2832 1 : json_of << "\"}\n],\n";
2833 1 : json_of << "\"nodePositions\":";
2834 1 : json_of << data->source_position_output() << ",\n";
2835 1 : JsonPrintAllSourceWithPositions(json_of, data->info(), isolate());
2836 1 : json_of << "\n}";
2837 : }
2838 3179627 : if (info()->trace_turbo_json_enabled() ||
2839 : info()->trace_turbo_graph_enabled()) {
2840 1 : CodeTracer::Scope tracing_scope(data->GetCodeTracer());
2841 2 : OFStream os(tracing_scope.file());
2842 : os << "---------------------------------------------------\n"
2843 3 : << "Finished compiling method " << info()->GetDebugName().get()
2844 : << " using TurboFan" << std::endl;
2845 : }
2846 1589814 : return code;
2847 : }
2848 :
2849 1183107 : bool PipelineImpl::SelectInstructionsAndAssemble(
2850 : CallDescriptor* call_descriptor) {
2851 : Linkage linkage(call_descriptor);
2852 :
2853 : // Perform instruction selection and register allocation.
2854 1183107 : if (!SelectInstructions(&linkage)) return false;
2855 :
2856 : // Generate the final machine code.
2857 2366212 : AssembleCode(&linkage);
2858 1183106 : return true;
2859 : }
2860 :
2861 1115839 : MaybeHandle<Code> PipelineImpl::GenerateCode(CallDescriptor* call_descriptor) {
2862 1115839 : if (!SelectInstructionsAndAssemble(call_descriptor))
2863 0 : return MaybeHandle<Code>();
2864 1115837 : return FinalizeCode();
2865 : }
2866 :
2867 0 : bool PipelineImpl::CommitDependencies(Handle<Code> code) {
2868 1986544 : return data_->dependencies() == nullptr ||
2869 463997 : data_->dependencies()->Commit(code);
2870 : }
2871 :
2872 : namespace {
2873 :
2874 5284219 : void TraceSequence(OptimizedCompilationInfo* info, PipelineData* data,
2875 : const char* phase_name) {
2876 5284219 : if (info->trace_turbo_json_enabled()) {
2877 : AllowHandleDereference allow_deref;
2878 36 : TurboJsonFile json_of(info, std::ios_base::app);
2879 18 : json_of << "{\"name\":\"" << phase_name << "\",\"type\":\"sequence\",";
2880 18 : json_of << InstructionSequenceAsJSON{data->sequence()};
2881 18 : json_of << "},\n";
2882 : }
2883 5284219 : if (info->trace_turbo_graph_enabled()) {
2884 : AllowHandleDereference allow_deref;
2885 18 : CodeTracer::Scope tracing_scope(data->GetCodeTracer());
2886 36 : OFStream os(tracing_scope.file());
2887 18 : os << "----- Instruction sequence " << phase_name << " -----\n"
2888 18 : << *data->sequence();
2889 : }
2890 5284219 : }
2891 :
2892 : } // namespace
2893 :
2894 2641204 : void PipelineImpl::AllocateRegisters(const RegisterConfiguration* config,
2895 : CallDescriptor* call_descriptor,
2896 : bool run_verifier) {
2897 2641204 : PipelineData* data = this->data_;
2898 : // Don't track usage for this zone in compiler stats.
2899 5284737 : std::unique_ptr<Zone> verifier_zone;
2900 : RegisterAllocatorVerifier* verifier = nullptr;
2901 2641204 : if (run_verifier) {
2902 42 : verifier_zone.reset(new Zone(data->allocator(), ZONE_NAME));
2903 : verifier = new (verifier_zone.get()) RegisterAllocatorVerifier(
2904 42 : verifier_zone.get(), config, data->sequence());
2905 : }
2906 :
2907 : #ifdef DEBUG
2908 : data_->sequence()->ValidateEdgeSplitForm();
2909 : data_->sequence()->ValidateDeferredBlockEntryPaths();
2910 : data_->sequence()->ValidateDeferredBlockExitPaths();
2911 : #endif
2912 :
2913 : RegisterAllocationFlags flags;
2914 2640968 : if (data->info()->is_turbo_control_flow_aware_allocation()) {
2915 : flags |= RegisterAllocationFlag::kTurboControlFlowAwareAllocation;
2916 : }
2917 2640968 : if (data->info()->is_turbo_preprocess_ranges()) {
2918 : flags |= RegisterAllocationFlag::kTurboPreprocessRanges;
2919 : }
2920 2640968 : data->InitializeRegisterAllocationData(config, call_descriptor, flags);
2921 2647381 : if (info()->is_osr()) data->osr_helper()->SetupFrame(data->frame());
2922 :
2923 2642725 : Run<MeetRegisterConstraintsPhase>();
2924 2642983 : Run<ResolvePhisPhase>();
2925 2643372 : Run<BuildLiveRangesPhase>();
2926 2643054 : Run<BuildBundlesPhase>();
2927 :
2928 2643420 : TraceSequence(info(), data, "before register allocation");
2929 2641258 : if (verifier != nullptr) {
2930 42 : CHECK(!data->register_allocation_data()->ExistsUseWithoutDefinition());
2931 42 : CHECK(data->register_allocation_data()
2932 : ->RangesDefinedInDeferredStayInDeferred());
2933 : }
2934 :
2935 2641258 : if (info()->trace_turbo_json_enabled() && !data->MayHaveUnverifiableGraph()) {
2936 18 : TurboCfgFile tcf(isolate());
2937 : tcf << AsC1VRegisterAllocationData("PreAllocation",
2938 9 : data->register_allocation_data());
2939 : }
2940 :
2941 2641258 : if (info()->is_turbo_preprocess_ranges()) {
2942 2641192 : Run<SplinterLiveRangesPhase>();
2943 2643316 : if (info()->trace_turbo_json_enabled() &&
2944 : !data->MayHaveUnverifiableGraph()) {
2945 18 : TurboCfgFile tcf(isolate());
2946 : tcf << AsC1VRegisterAllocationData("PostSplinter",
2947 9 : data->register_allocation_data());
2948 : }
2949 : }
2950 :
2951 2643382 : Run<AllocateGeneralRegistersPhase<LinearScanAllocator>>();
2952 :
2953 2642439 : if (data->sequence()->HasFPVirtualRegisters()) {
2954 295643 : Run<AllocateFPRegistersPhase<LinearScanAllocator>>();
2955 : }
2956 :
2957 2642644 : if (info()->is_turbo_preprocess_ranges()) {
2958 2642644 : Run<MergeSplintersPhase>();
2959 : }
2960 :
2961 2642943 : Run<DecideSpillingModePhase>();
2962 2643450 : Run<AssignSpillSlotsPhase>();
2963 2643490 : Run<CommitAssignmentPhase>();
2964 :
2965 : // TODO(chromium:725559): remove this check once
2966 : // we understand the cause of the bug. We keep just the
2967 : // check at the end of the allocation.
2968 2643453 : if (verifier != nullptr) {
2969 42 : verifier->VerifyAssignment("Immediately after CommitAssignmentPhase.");
2970 : }
2971 :
2972 2643453 : Run<PopulateReferenceMapsPhase>();
2973 :
2974 2643499 : Run<ConnectRangesPhase>();
2975 :
2976 2643430 : Run<ResolveControlFlowPhase>();
2977 2642994 : if (FLAG_turbo_move_optimization) {
2978 2643112 : Run<OptimizeMovesPhase>();
2979 : }
2980 2642759 : Run<LocateSpillSlotsPhase>();
2981 :
2982 2643480 : TraceSequence(info(), data, "after register allocation");
2983 :
2984 2642989 : if (verifier != nullptr) {
2985 42 : verifier->VerifyAssignment("End of regalloc pipeline.");
2986 42 : verifier->VerifyGapMoves();
2987 : }
2988 :
2989 2642578 : if (info()->trace_turbo_json_enabled() && !data->MayHaveUnverifiableGraph()) {
2990 18 : TurboCfgFile tcf(isolate());
2991 : tcf << AsC1VRegisterAllocationData("CodeGen",
2992 9 : data->register_allocation_data());
2993 : }
2994 :
2995 2642578 : data->DeleteRegisterAllocationZone();
2996 2642608 : }
2997 :
2998 48115024 : OptimizedCompilationInfo* PipelineImpl::info() const { return data_->info(); }
2999 :
3000 1 : Isolate* PipelineImpl::isolate() const { return data_->isolate(); }
3001 :
3002 0 : CodeGenerator* PipelineImpl::code_generator() const {
3003 996083 : return data_->code_generator();
3004 : }
3005 :
3006 : } // namespace compiler
3007 : } // namespace internal
3008 122004 : } // namespace v8
|