Line data Source code
1 : // Copyright 2014 the V8 project authors. All rights reserved.
2 : // Use of this source code is governed by a BSD-style license that can be
3 : // found in the LICENSE file.
4 :
5 : #include "src/compiler/pipeline.h"
6 :
7 : #include <fstream> // NOLINT(readability/streams)
8 : #include <memory>
9 : #include <sstream>
10 :
11 : #include "src/assembler-inl.h"
12 : #include "src/base/adapters.h"
13 : #include "src/base/optional.h"
14 : #include "src/base/platform/elapsed-timer.h"
15 : #include "src/bootstrapper.h"
16 : #include "src/compilation-info.h"
17 : #include "src/compiler.h"
18 : #include "src/compiler/basic-block-instrumentor.h"
19 : #include "src/compiler/branch-elimination.h"
20 : #include "src/compiler/bytecode-graph-builder.h"
21 : #include "src/compiler/checkpoint-elimination.h"
22 : #include "src/compiler/code-generator.h"
23 : #include "src/compiler/common-operator-reducer.h"
24 : #include "src/compiler/compiler-source-position-table.h"
25 : #include "src/compiler/control-flow-optimizer.h"
26 : #include "src/compiler/dead-code-elimination.h"
27 : #include "src/compiler/effect-control-linearizer.h"
28 : #include "src/compiler/escape-analysis-reducer.h"
29 : #include "src/compiler/escape-analysis.h"
30 : #include "src/compiler/frame-elider.h"
31 : #include "src/compiler/graph-trimmer.h"
32 : #include "src/compiler/graph-visualizer.h"
33 : #include "src/compiler/instruction-selector.h"
34 : #include "src/compiler/instruction.h"
35 : #include "src/compiler/js-builtin-reducer.h"
36 : #include "src/compiler/js-call-reducer.h"
37 : #include "src/compiler/js-context-specialization.h"
38 : #include "src/compiler/js-create-lowering.h"
39 : #include "src/compiler/js-generic-lowering.h"
40 : #include "src/compiler/js-inlining-heuristic.h"
41 : #include "src/compiler/js-intrinsic-lowering.h"
42 : #include "src/compiler/js-native-context-specialization.h"
43 : #include "src/compiler/js-typed-lowering.h"
44 : #include "src/compiler/jump-threading.h"
45 : #include "src/compiler/live-range-separator.h"
46 : #include "src/compiler/load-elimination.h"
47 : #include "src/compiler/loop-analysis.h"
48 : #include "src/compiler/loop-peeling.h"
49 : #include "src/compiler/loop-variable-optimizer.h"
50 : #include "src/compiler/machine-graph-verifier.h"
51 : #include "src/compiler/machine-operator-reducer.h"
52 : #include "src/compiler/memory-optimizer.h"
53 : #include "src/compiler/move-optimizer.h"
54 : #include "src/compiler/osr.h"
55 : #include "src/compiler/pipeline-statistics.h"
56 : #include "src/compiler/redundancy-elimination.h"
57 : #include "src/compiler/register-allocator-verifier.h"
58 : #include "src/compiler/register-allocator.h"
59 : #include "src/compiler/schedule.h"
60 : #include "src/compiler/scheduler.h"
61 : #include "src/compiler/select-lowering.h"
62 : #include "src/compiler/simplified-lowering.h"
63 : #include "src/compiler/simplified-operator-reducer.h"
64 : #include "src/compiler/simplified-operator.h"
65 : #include "src/compiler/store-store-elimination.h"
66 : #include "src/compiler/typed-optimization.h"
67 : #include "src/compiler/typer.h"
68 : #include "src/compiler/value-numbering-reducer.h"
69 : #include "src/compiler/verifier.h"
70 : #include "src/compiler/zone-stats.h"
71 : #include "src/isolate-inl.h"
72 : #include "src/ostreams.h"
73 : #include "src/parsing/parse-info.h"
74 : #include "src/register-configuration.h"
75 : #include "src/trap-handler/trap-handler.h"
76 : #include "src/utils.h"
77 :
78 : namespace v8 {
79 : namespace internal {
80 : namespace compiler {
81 :
82 : class PipelineData {
83 : public:
84 : // For main entry point.
85 461338 : PipelineData(ZoneStats* zone_stats, CompilationInfo* info,
86 : PipelineStatistics* pipeline_statistics)
87 : : isolate_(info->isolate()),
88 : info_(info),
89 : debug_name_(info_->GetDebugName()),
90 : may_have_unverifiable_graph_(false),
91 : zone_stats_(zone_stats),
92 : pipeline_statistics_(pipeline_statistics),
93 : graph_zone_scope_(zone_stats_, ZONE_NAME),
94 461339 : graph_zone_(graph_zone_scope_.zone()),
95 : instruction_zone_scope_(zone_stats_, ZONE_NAME),
96 461338 : instruction_zone_(instruction_zone_scope_.zone()),
97 : codegen_zone_scope_(zone_stats_, ZONE_NAME),
98 461339 : codegen_zone_(codegen_zone_scope_.zone()),
99 : register_allocation_zone_scope_(zone_stats_, ZONE_NAME),
100 2768030 : register_allocation_zone_(register_allocation_zone_scope_.zone()) {
101 : PhaseScope scope(pipeline_statistics, "init pipeline data");
102 922678 : graph_ = new (graph_zone_) Graph(graph_zone_);
103 922678 : source_positions_ = new (graph_zone_) SourcePositionTable(graph_);
104 922674 : simplified_ = new (graph_zone_) SimplifiedOperatorBuilder(graph_zone_);
105 : machine_ = new (graph_zone_) MachineOperatorBuilder(
106 : graph_zone_, MachineType::PointerRepresentation(),
107 : InstructionSelector::SupportedMachineOperatorFlags(),
108 922676 : InstructionSelector::AlignmentRequirements());
109 922674 : common_ = new (graph_zone_) CommonOperatorBuilder(graph_zone_);
110 922676 : javascript_ = new (graph_zone_) JSOperatorBuilder(graph_zone_);
111 : jsgraph_ = new (graph_zone_)
112 1384014 : JSGraph(isolate_, graph_, common_, javascript_, simplified_, machine_);
113 461338 : }
114 :
115 : // For WebAssembly compile entry point.
116 1139643 : PipelineData(ZoneStats* zone_stats, CompilationInfo* info, JSGraph* jsgraph,
117 : PipelineStatistics* pipeline_statistics,
118 : SourcePositionTable* source_positions,
119 : ZoneVector<trap_handler::ProtectedInstructionData>*
120 : protected_instructions)
121 : : isolate_(info->isolate()),
122 : info_(info),
123 : debug_name_(info_->GetDebugName()),
124 : zone_stats_(zone_stats),
125 : pipeline_statistics_(pipeline_statistics),
126 : graph_zone_scope_(zone_stats_, ZONE_NAME),
127 : graph_(jsgraph->graph()),
128 : source_positions_(source_positions),
129 : machine_(jsgraph->machine()),
130 : common_(jsgraph->common()),
131 : javascript_(jsgraph->javascript()),
132 : jsgraph_(jsgraph),
133 : instruction_zone_scope_(zone_stats_, ZONE_NAME),
134 227929 : instruction_zone_(instruction_zone_scope_.zone()),
135 : codegen_zone_scope_(zone_stats_, ZONE_NAME),
136 227925 : codegen_zone_(codegen_zone_scope_.zone()),
137 : register_allocation_zone_scope_(zone_stats_, ZONE_NAME),
138 227967 : register_allocation_zone_(register_allocation_zone_scope_.zone()),
139 2279406 : protected_instructions_(protected_instructions) {
140 227971 : }
141 :
142 : // For machine graph testing entry point.
143 630227 : PipelineData(ZoneStats* zone_stats, CompilationInfo* info, Graph* graph,
144 : Schedule* schedule, SourcePositionTable* source_positions,
145 : JumpOptimizationInfo* jump_opt)
146 : : isolate_(info->isolate()),
147 : info_(info),
148 : debug_name_(info_->GetDebugName()),
149 : zone_stats_(zone_stats),
150 : graph_zone_scope_(zone_stats_, ZONE_NAME),
151 : graph_(graph),
152 : source_positions_(source_positions),
153 : schedule_(schedule),
154 : instruction_zone_scope_(zone_stats_, ZONE_NAME),
155 630227 : instruction_zone_(instruction_zone_scope_.zone()),
156 : codegen_zone_scope_(zone_stats_, ZONE_NAME),
157 630227 : codegen_zone_(codegen_zone_scope_.zone()),
158 : register_allocation_zone_scope_(zone_stats_, ZONE_NAME),
159 630227 : register_allocation_zone_(register_allocation_zone_scope_.zone()),
160 3781362 : jump_optimization_info_(jump_opt) {}
161 : // For register allocation testing entry point.
162 42 : PipelineData(ZoneStats* zone_stats, CompilationInfo* info,
163 42 : InstructionSequence* sequence)
164 : : isolate_(info->isolate()),
165 : info_(info),
166 : debug_name_(info_->GetDebugName()),
167 : zone_stats_(zone_stats),
168 : graph_zone_scope_(zone_stats_, ZONE_NAME),
169 : instruction_zone_scope_(zone_stats_, ZONE_NAME),
170 : instruction_zone_(sequence->zone()),
171 : sequence_(sequence),
172 : codegen_zone_scope_(zone_stats_, ZONE_NAME),
173 42 : codegen_zone_(codegen_zone_scope_.zone()),
174 : register_allocation_zone_scope_(zone_stats_, ZONE_NAME),
175 252 : register_allocation_zone_(register_allocation_zone_scope_.zone()) {
176 42 : }
177 :
178 1319587 : ~PipelineData() {
179 1319587 : delete code_generator_; // Must happen before zones are destroyed.
180 1319589 : code_generator_ = nullptr;
181 1319589 : DeleteRegisterAllocationZone();
182 1319589 : DeleteInstructionZone();
183 1319589 : DeleteCodegenZone();
184 1319589 : DeleteGraphZone();
185 1319588 : }
186 :
187 : Isolate* isolate() const { return isolate_; }
188 : CompilationInfo* info() const { return info_; }
189 : ZoneStats* zone_stats() const { return zone_stats_; }
190 : PipelineStatistics* pipeline_statistics() { return pipeline_statistics_; }
191 : OsrHelper* osr_helper() { return &(*osr_helper_); }
192 : bool compilation_failed() const { return compilation_failed_; }
193 10 : void set_compilation_failed() { compilation_failed_ = true; }
194 :
195 : bool verify_graph() const { return verify_graph_; }
196 74492 : void set_verify_graph(bool value) { verify_graph_ = value; }
197 :
198 : Handle<Code> code() { return code_; }
199 : void set_code(Handle<Code> code) {
200 : DCHECK(code_.is_null());
201 1300509 : code_ = code;
202 : }
203 :
204 : CodeGenerator* code_generator() const { return code_generator_; }
205 :
206 : // RawMachineAssembler generally produces graphs which cannot be verified.
207 : bool MayHaveUnverifiableGraph() const { return may_have_unverifiable_graph_; }
208 :
209 : Zone* graph_zone() const { return graph_zone_; }
210 : Graph* graph() const { return graph_; }
211 : SourcePositionTable* source_positions() const { return source_positions_; }
212 : MachineOperatorBuilder* machine() const { return machine_; }
213 : CommonOperatorBuilder* common() const { return common_; }
214 : JSOperatorBuilder* javascript() const { return javascript_; }
215 : JSGraph* jsgraph() const { return jsgraph_; }
216 2216903 : Handle<Context> native_context() const {
217 4433812 : return handle(info()->native_context(), isolate());
218 : }
219 : Handle<JSGlobalObject> global_object() const {
220 : return handle(info()->global_object(), isolate());
221 : }
222 :
223 : Schedule* schedule() const { return schedule_; }
224 : void set_schedule(Schedule* schedule) {
225 : DCHECK(!schedule_);
226 1012887 : schedule_ = schedule;
227 : }
228 : void reset_schedule() { schedule_ = nullptr; }
229 :
230 : Zone* instruction_zone() const { return instruction_zone_; }
231 : Zone* codegen_zone() const { return codegen_zone_; }
232 : InstructionSequence* sequence() const { return sequence_; }
233 : Frame* frame() const { return frame_; }
234 :
235 : Zone* register_allocation_zone() const { return register_allocation_zone_; }
236 : RegisterAllocationData* register_allocation_data() const {
237 : return register_allocation_data_;
238 : }
239 :
240 : BasicBlockProfiler::Data* profiler_data() const { return profiler_data_; }
241 : void set_profiler_data(BasicBlockProfiler::Data* profiler_data) {
242 12 : profiler_data_ = profiler_data;
243 : }
244 :
245 : std::string const& source_position_output() const {
246 : return source_position_output_;
247 : }
248 : void set_source_position_output(std::string const& source_position_output) {
249 0 : source_position_output_ = source_position_output;
250 : }
251 :
252 : ZoneVector<trap_handler::ProtectedInstructionData>* protected_instructions()
253 : const {
254 : return protected_instructions_;
255 : }
256 :
257 : JumpOptimizationInfo* jump_optimization_info() const {
258 : return jump_optimization_info_;
259 : }
260 :
261 2621143 : void DeleteGraphZone() {
262 5242287 : if (graph_zone_ == nullptr) return;
263 461338 : graph_zone_scope_.Destroy();
264 461339 : graph_zone_ = nullptr;
265 461339 : graph_ = nullptr;
266 461339 : source_positions_ = nullptr;
267 461339 : simplified_ = nullptr;
268 461339 : machine_ = nullptr;
269 461339 : common_ = nullptr;
270 461339 : javascript_ = nullptr;
271 461339 : jsgraph_ = nullptr;
272 461339 : schedule_ = nullptr;
273 : }
274 :
275 2621080 : void DeleteInstructionZone() {
276 5242196 : if (instruction_zone_ == nullptr) return;
277 1319551 : instruction_zone_scope_.Destroy();
278 1319587 : instruction_zone_ = nullptr;
279 1319587 : sequence_ = nullptr;
280 : }
281 :
282 1319589 : void DeleteCodegenZone() {
283 2639178 : if (codegen_zone_ == nullptr) return;
284 1319589 : codegen_zone_scope_.Destroy();
285 1319589 : codegen_zone_ = nullptr;
286 1319589 : frame_ = nullptr;
287 : }
288 :
289 2621173 : void DeleteRegisterAllocationZone() {
290 5242351 : if (register_allocation_zone_ == nullptr) return;
291 1319583 : register_allocation_zone_scope_.Destroy();
292 1319588 : register_allocation_zone_ = nullptr;
293 1319588 : register_allocation_data_ = nullptr;
294 : }
295 :
296 3904669 : void InitializeInstructionSequence(const CallDescriptor* descriptor) {
297 : DCHECK_NULL(sequence_);
298 : InstructionBlocks* instruction_blocks =
299 : InstructionSequence::InstructionBlocksFor(instruction_zone(),
300 1301533 : schedule());
301 : sequence_ = new (instruction_zone()) InstructionSequence(
302 2603136 : info()->isolate(), instruction_zone(), instruction_blocks);
303 2603051 : if (descriptor && descriptor->RequiresFrameAsIncoming()) {
304 848640 : sequence_->instruction_blocks()[0]->mark_needs_frame();
305 : } else {
306 : DCHECK_EQ(0u, descriptor->CalleeSavedFPRegisters());
307 : DCHECK_EQ(0u, descriptor->CalleeSavedRegisters());
308 : }
309 1301524 : }
310 :
311 2603141 : void InitializeFrameData(CallDescriptor* descriptor) {
312 : DCHECK_NULL(frame_);
313 : int fixed_frame_size = 0;
314 1301574 : if (descriptor != nullptr) {
315 1301539 : fixed_frame_size = descriptor->CalculateFixedFrameSize();
316 : }
317 1301610 : frame_ = new (codegen_zone()) Frame(fixed_frame_size);
318 1301580 : }
319 :
320 1301554 : void InitializeRegisterAllocationData(const RegisterConfiguration* config,
321 1301554 : CallDescriptor* descriptor) {
322 : DCHECK_NULL(register_allocation_data_);
323 : register_allocation_data_ = new (register_allocation_zone())
324 : RegisterAllocationData(config, register_allocation_zone(), frame(),
325 1301600 : sequence(), debug_name());
326 1301570 : }
327 :
328 5809 : void InitializeOsrHelper() {
329 : DCHECK(!osr_helper_.has_value());
330 5809 : osr_helper_.emplace(info());
331 : }
332 :
333 : void set_start_source_position(int position) {
334 : DCHECK_EQ(start_source_position_, kNoSourcePosition);
335 439073 : start_source_position_ = position;
336 : }
337 :
338 1301534 : void InitializeCodeGenerator(Linkage* linkage) {
339 : DCHECK_NULL(code_generator_);
340 : code_generator_ = new CodeGenerator(
341 : codegen_zone(), frame(), linkage, sequence(), info(), osr_helper_,
342 1301534 : start_source_position_, jump_optimization_info_);
343 1301481 : }
344 :
345 3933194 : void BeginPhaseKind(const char* phase_kind_name) {
346 3933194 : if (pipeline_statistics() != nullptr) {
347 0 : pipeline_statistics()->BeginPhaseKind(phase_kind_name);
348 : }
349 : }
350 :
351 1744951 : void EndPhaseKind() {
352 1744951 : if (pipeline_statistics() != nullptr) {
353 0 : pipeline_statistics()->EndPhaseKind();
354 : }
355 : }
356 :
357 : const char* debug_name() const { return debug_name_.get(); }
358 :
359 : private:
360 : Isolate* const isolate_;
361 : CompilationInfo* const info_;
362 : std::unique_ptr<char[]> debug_name_;
363 : bool may_have_unverifiable_graph_ = true;
364 : ZoneStats* const zone_stats_;
365 : PipelineStatistics* pipeline_statistics_ = nullptr;
366 : bool compilation_failed_ = false;
367 : bool verify_graph_ = false;
368 : int start_source_position_ = kNoSourcePosition;
369 : base::Optional<OsrHelper> osr_helper_;
370 : Handle<Code> code_ = Handle<Code>::null();
371 : CodeGenerator* code_generator_ = nullptr;
372 :
373 : // All objects in the following group of fields are allocated in graph_zone_.
374 : // They are all set to nullptr when the graph_zone_ is destroyed.
375 : ZoneStats::Scope graph_zone_scope_;
376 : Zone* graph_zone_ = nullptr;
377 : Graph* graph_ = nullptr;
378 : SourcePositionTable* source_positions_ = nullptr;
379 : SimplifiedOperatorBuilder* simplified_ = nullptr;
380 : MachineOperatorBuilder* machine_ = nullptr;
381 : CommonOperatorBuilder* common_ = nullptr;
382 : JSOperatorBuilder* javascript_ = nullptr;
383 : JSGraph* jsgraph_ = nullptr;
384 : Schedule* schedule_ = nullptr;
385 :
386 : // All objects in the following group of fields are allocated in
387 : // instruction_zone_. They are all set to nullptr when the instruction_zone_
388 : // is destroyed.
389 : ZoneStats::Scope instruction_zone_scope_;
390 : Zone* instruction_zone_;
391 : InstructionSequence* sequence_ = nullptr;
392 :
393 : // All objects in the following group of fields are allocated in
394 : // codegen_zone_. They are all set to nullptr when the codegen_zone_
395 : // is destroyed.
396 : ZoneStats::Scope codegen_zone_scope_;
397 : Zone* codegen_zone_;
398 : Frame* frame_ = nullptr;
399 :
400 : // All objects in the following group of fields are allocated in
401 : // register_allocation_zone_. They are all set to nullptr when the zone is
402 : // destroyed.
403 : ZoneStats::Scope register_allocation_zone_scope_;
404 : Zone* register_allocation_zone_;
405 : RegisterAllocationData* register_allocation_data_ = nullptr;
406 :
407 : // Basic block profiling support.
408 : BasicBlockProfiler::Data* profiler_data_ = nullptr;
409 :
410 : // Source position output for --trace-turbo.
411 : std::string source_position_output_;
412 :
413 : ZoneVector<trap_handler::ProtectedInstructionData>* protected_instructions_ =
414 : nullptr;
415 :
416 : JumpOptimizationInfo* jump_optimization_info_ = nullptr;
417 :
418 : DISALLOW_COPY_AND_ASSIGN(PipelineData);
419 : };
420 :
421 : class PipelineImpl final {
422 : public:
423 1319577 : explicit PipelineImpl(PipelineData* data) : data_(data) {}
424 :
425 : // Helpers for executing pipeline phases.
426 : template <typename Phase>
427 : void Run();
428 : template <typename Phase, typename Arg0>
429 : void Run(Arg0 arg_0);
430 : template <typename Phase, typename Arg0, typename Arg1>
431 : void Run(Arg0 arg_0, Arg1 arg_1);
432 :
433 : // Run the graph creation and initial optimization passes.
434 : bool CreateGraph();
435 :
436 : // Run the concurrent optimization passes.
437 : bool OptimizeGraph(Linkage* linkage);
438 :
439 : // Run the code assembly pass.
440 : void AssembleCode(Linkage* linkage);
441 :
442 : // Run the code finalization pass.
443 : Handle<Code> FinalizeCode();
444 :
445 : bool ScheduleAndSelectInstructions(Linkage* linkage, bool trim_graph);
446 : void RunPrintAndVerify(const char* phase, bool untyped = false);
447 : Handle<Code> ScheduleAndGenerateCode(CallDescriptor* call_descriptor);
448 : void AllocateRegisters(const RegisterConfiguration* config,
449 : CallDescriptor* descriptor, bool run_verifier);
450 :
451 : CompilationInfo* info() const;
452 : Isolate* isolate() const;
453 :
454 : PipelineData* const data_;
455 : };
456 :
457 : namespace {
458 :
459 : // Print function's source if it was not printed before.
460 : // Return a sequential id under which this function was printed.
461 0 : int PrintFunctionSource(CompilationInfo* info,
462 0 : std::vector<Handle<SharedFunctionInfo>>* printed,
463 : int inlining_id, Handle<SharedFunctionInfo> shared) {
464 : // Outermost function has source id -1 and inlined functions take
465 : // source ids starting from 0.
466 : int source_id = -1;
467 0 : if (inlining_id != SourcePosition::kNotInlined) {
468 0 : for (unsigned i = 0; i < printed->size(); i++) {
469 0 : if (printed->at(i).is_identical_to(shared)) {
470 0 : return i;
471 : }
472 : }
473 0 : source_id = static_cast<int>(printed->size());
474 0 : printed->push_back(shared);
475 : }
476 :
477 : Isolate* isolate = info->isolate();
478 0 : if (!shared->script()->IsUndefined(isolate)) {
479 : Handle<Script> script(Script::cast(shared->script()), isolate);
480 :
481 0 : if (!script->source()->IsUndefined(isolate)) {
482 0 : CodeTracer::Scope tracing_scope(isolate->GetCodeTracer());
483 : Object* source_name = script->name();
484 0 : OFStream os(tracing_scope.file());
485 0 : os << "--- FUNCTION SOURCE (";
486 0 : if (source_name->IsString()) {
487 0 : os << String::cast(source_name)->ToCString().get() << ":";
488 : }
489 0 : os << shared->DebugName()->ToCString().get() << ") id{";
490 0 : os << info->optimization_id() << "," << source_id << "} start{";
491 0 : os << shared->start_position() << "} ---\n";
492 : {
493 : DisallowHeapAllocation no_allocation;
494 : int start = shared->start_position();
495 0 : int len = shared->end_position() - start;
496 : String::SubStringRange source(String::cast(script->source()), start,
497 : len);
498 0 : for (const auto& c : source) {
499 0 : os << AsReversiblyEscapedUC16(c);
500 : }
501 : }
502 :
503 0 : os << "\n--- END ---\n";
504 : }
505 : }
506 :
507 0 : return source_id;
508 : }
509 :
510 : // Print information for the given inlining: which function was inlined and
511 : // where the inlining occurred.
512 0 : void PrintInlinedFunctionInfo(CompilationInfo* info, int source_id,
513 : int inlining_id,
514 : const CompilationInfo::InlinedFunctionHolder& h) {
515 0 : CodeTracer::Scope tracing_scope(info->isolate()->GetCodeTracer());
516 0 : OFStream os(tracing_scope.file());
517 0 : os << "INLINE (" << h.shared_info->DebugName()->ToCString().get() << ") id{"
518 0 : << info->optimization_id() << "," << source_id << "} AS " << inlining_id
519 0 : << " AT ";
520 0 : const SourcePosition position = h.position.position;
521 0 : if (position.IsKnown()) {
522 0 : os << "<" << position.InliningId() << ":" << position.ScriptOffset() << ">";
523 : } else {
524 0 : os << "<?>";
525 : }
526 : os << std::endl;
527 0 : }
528 :
529 : // Print the source of all functions that participated in this optimizing
530 : // compilation. For inlined functions print source position of their inlining.
531 0 : void DumpParticipatingSource(CompilationInfo* info) {
532 : AllowDeferredHandleDereference allow_deference_for_print_code;
533 :
534 : std::vector<Handle<SharedFunctionInfo>> printed;
535 0 : printed.reserve(info->inlined_functions().size());
536 :
537 : PrintFunctionSource(info, &printed, SourcePosition::kNotInlined,
538 0 : info->shared_info());
539 0 : const auto& inlined = info->inlined_functions();
540 0 : for (unsigned id = 0; id < inlined.size(); id++) {
541 : const int source_id =
542 0 : PrintFunctionSource(info, &printed, id, inlined[id].shared_info);
543 0 : PrintInlinedFunctionInfo(info, source_id, id, inlined[id]);
544 : }
545 0 : }
546 :
547 : // Print the code after compiling it.
548 1300508 : void PrintCode(Handle<Code> code, CompilationInfo* info) {
549 1300508 : if (FLAG_print_opt_source && info->IsOptimizing()) {
550 0 : DumpParticipatingSource(info);
551 : }
552 :
553 : #ifdef ENABLE_DISASSEMBLER
554 : AllowDeferredHandleDereference allow_deference_for_print_code;
555 : Isolate* isolate = info->isolate();
556 : bool print_code =
557 : isolate->bootstrapper()->IsActive()
558 : ? FLAG_print_builtin_code
559 : : (FLAG_print_code || (info->IsStub() && FLAG_print_code_stubs) ||
560 : (info->IsOptimizing() && FLAG_print_opt_code &&
561 : info->shared_info()->PassesFilter(FLAG_print_opt_code_filter)) ||
562 : (info->IsWasm() && FLAG_print_wasm_code));
563 : if (print_code) {
564 : std::unique_ptr<char[]> debug_name = info->GetDebugName();
565 : CodeTracer::Scope tracing_scope(info->isolate()->GetCodeTracer());
566 : OFStream os(tracing_scope.file());
567 :
568 : // Print the source code if available.
569 : bool print_source = code->kind() == Code::OPTIMIZED_FUNCTION;
570 : if (print_source) {
571 : Handle<SharedFunctionInfo> shared = info->shared_info();
572 : if (shared->script()->IsScript() &&
573 : !Script::cast(shared->script())->source()->IsUndefined(isolate)) {
574 : os << "--- Raw source ---\n";
575 : StringCharacterStream stream(
576 : String::cast(Script::cast(shared->script())->source()),
577 : shared->start_position());
578 : // fun->end_position() points to the last character in the stream. We
579 : // need to compensate by adding one to calculate the length.
580 : int source_len = shared->end_position() - shared->start_position() + 1;
581 : for (int i = 0; i < source_len; i++) {
582 : if (stream.HasMore()) {
583 : os << AsReversiblyEscapedUC16(stream.GetNext());
584 : }
585 : }
586 : os << "\n\n";
587 : }
588 : }
589 : if (info->IsOptimizing()) {
590 : os << "--- Optimized code ---\n"
591 : << "optimization_id = " << info->optimization_id() << "\n";
592 : } else {
593 : os << "--- Code ---\n";
594 : }
595 : if (print_source) {
596 : Handle<SharedFunctionInfo> shared = info->shared_info();
597 : os << "source_position = " << shared->start_position() << "\n";
598 : }
599 : code->Disassemble(debug_name.get(), os);
600 : os << "--- End code ---\n";
601 : }
602 : #endif // ENABLE_DISASSEMBLER
603 1300508 : }
604 :
605 0 : struct TurboCfgFile : public std::ofstream {
606 0 : explicit TurboCfgFile(Isolate* isolate)
607 : : std::ofstream(isolate->GetTurboCfgFileName().c_str(),
608 0 : std::ios_base::app) {}
609 : };
610 :
611 0 : struct TurboJsonFile : public std::ofstream {
612 0 : TurboJsonFile(CompilationInfo* info, std::ios_base::openmode mode)
613 0 : : std::ofstream(GetVisualizerLogFileName(info, nullptr, "json").get(),
614 0 : mode) {}
615 : };
616 :
617 1744922 : void TraceSchedule(CompilationInfo* info, Schedule* schedule) {
618 1744912 : if (FLAG_trace_turbo) {
619 : AllowHandleDereference allow_deref;
620 0 : TurboJsonFile json_of(info, std::ios_base::app);
621 0 : json_of << "{\"name\":\"Schedule\",\"type\":\"schedule\",\"data\":\"";
622 0 : std::stringstream schedule_stream;
623 0 : schedule_stream << *schedule;
624 : std::string schedule_string(schedule_stream.str());
625 0 : for (const auto& c : schedule_string) {
626 0 : json_of << AsEscapedUC16ForJSON(c);
627 : }
628 0 : json_of << "\"},\n";
629 : }
630 1744912 : if (FLAG_trace_turbo_graph || FLAG_trace_turbo_scheduler) {
631 : AllowHandleDereference allow_deref;
632 10 : CodeTracer::Scope tracing_scope(info->isolate()->GetCodeTracer());
633 0 : OFStream os(tracing_scope.file());
634 0 : os << "-- Schedule --------------------------------------\n" << *schedule;
635 : }
636 1744902 : }
637 :
638 :
639 : class SourcePositionWrapper final : public Reducer {
640 : public:
641 : SourcePositionWrapper(Reducer* reducer, SourcePositionTable* table)
642 832155 : : reducer_(reducer), table_(table) {}
643 0 : ~SourcePositionWrapper() final {}
644 :
645 0 : const char* reducer_name() const override { return reducer_->reducer_name(); }
646 :
647 99395854 : Reduction Reduce(Node* node) final {
648 99395854 : SourcePosition const pos = table_->GetSourcePosition(node);
649 99396551 : SourcePositionTable::Scope position(table_, pos);
650 198792122 : return reducer_->Reduce(node);
651 : }
652 :
653 834297 : void Finalize() final { reducer_->Finalize(); }
654 :
655 : private:
656 : Reducer* const reducer_;
657 : SourcePositionTable* const table_;
658 :
659 : DISALLOW_COPY_AND_ASSIGN(SourcePositionWrapper);
660 : };
661 :
662 :
663 : class JSGraphReducer final : public GraphReducer {
664 : public:
665 7117045 : JSGraphReducer(JSGraph* jsgraph, Zone* zone)
666 7117045 : : GraphReducer(zone, jsgraph->graph(), jsgraph->Dead()) {}
667 3558527 : ~JSGraphReducer() final {}
668 : };
669 :
670 :
671 19002082 : void AddReducer(PipelineData* data, GraphReducer* graph_reducer,
672 : Reducer* reducer) {
673 17337757 : if (data->info()->is_source_positions_enabled()) {
674 832163 : void* const buffer = data->graph_zone()->New(sizeof(SourcePositionWrapper));
675 : SourcePositionWrapper* const wrapper =
676 832162 : new (buffer) SourcePositionWrapper(reducer, data->source_positions());
677 832162 : graph_reducer->AddReducer(wrapper);
678 : } else {
679 16505594 : graph_reducer->AddReducer(reducer);
680 : }
681 17337750 : }
682 :
683 :
684 68957472 : class PipelineRunScope {
685 : public:
686 103359416 : PipelineRunScope(PipelineData* data, const char* phase_name)
687 : : phase_scope_(
688 : phase_name == nullptr ? nullptr : data->pipeline_statistics(),
689 : phase_name),
690 68955846 : zone_scope_(data->zone_stats(), ZONE_NAME) {}
691 :
692 34491235 : Zone* zone() { return zone_scope_.zone(); }
693 :
694 : private:
695 : PhaseScope phase_scope_;
696 : ZoneStats::Scope zone_scope_;
697 : };
698 :
699 689252 : PipelineStatistics* CreatePipelineStatistics(Handle<Script> script,
700 0 : CompilationInfo* info,
701 : ZoneStats* zone_stats) {
702 : PipelineStatistics* pipeline_statistics = nullptr;
703 :
704 689252 : if (FLAG_turbo_stats || FLAG_turbo_stats_nvp) {
705 0 : pipeline_statistics = new PipelineStatistics(info, zone_stats);
706 0 : pipeline_statistics->BeginPhaseKind("initializing");
707 : }
708 :
709 689257 : if (FLAG_trace_turbo) {
710 0 : TurboJsonFile json_of(info, std::ios_base::trunc);
711 0 : std::unique_ptr<char[]> function_name = info->GetDebugName();
712 0 : int pos = info->IsStub() ? 0 : info->shared_info()->start_position();
713 0 : json_of << "{\"function\":\"" << function_name.get()
714 0 : << "\", \"sourcePosition\":" << pos << ", \"source\":\"";
715 : Isolate* isolate = info->isolate();
716 0 : if (!script.is_null() && !script->source()->IsUndefined(isolate)) {
717 : DisallowHeapAllocation no_allocation;
718 : int start = info->shared_info()->start_position();
719 0 : int len = info->shared_info()->end_position() - start;
720 : String::SubStringRange source(String::cast(script->source()), start, len);
721 0 : for (const auto& c : source) {
722 0 : json_of << AsEscapedUC16ForJSON(c);
723 : }
724 : }
725 0 : json_of << "\",\n\"phases\":[";
726 : }
727 :
728 689257 : return pipeline_statistics;
729 : }
730 :
731 : } // namespace
732 :
733 1828107 : class PipelineCompilationJob final : public CompilationJob {
734 : public:
735 457030 : PipelineCompilationJob(ParseInfo* parse_info,
736 : Handle<SharedFunctionInfo> shared_info,
737 : Handle<JSFunction> function)
738 : // Note that the CompilationInfo is not initialized at the time we pass it
739 : // to the CompilationJob constructor, but it is not dereferenced there.
740 : : CompilationJob(function->GetIsolate(), parse_info, &compilation_info_,
741 : "TurboFan"),
742 : parse_info_(parse_info),
743 : zone_stats_(function->GetIsolate()->allocator()),
744 : compilation_info_(parse_info_.get()->zone(), function->GetIsolate(),
745 : shared_info, function),
746 : pipeline_statistics_(CreatePipelineStatistics(
747 : parse_info_->script(), compilation_info(), &zone_stats_)),
748 : data_(&zone_stats_, compilation_info(), pipeline_statistics_.get()),
749 : pipeline_(&data_),
750 3199211 : linkage_(nullptr) {}
751 :
752 : protected:
753 : Status PrepareJobImpl() final;
754 : Status ExecuteJobImpl() final;
755 : Status FinalizeJobImpl() final;
756 :
757 : // Registers weak object to optimized code dependencies.
758 : void RegisterWeakObjectsInOptimizedCode(Handle<Code> code);
759 :
760 : private:
761 : std::unique_ptr<ParseInfo> parse_info_;
762 : ZoneStats zone_stats_;
763 : CompilationInfo compilation_info_;
764 : std::unique_ptr<PipelineStatistics> pipeline_statistics_;
765 : PipelineData data_;
766 : PipelineImpl pipeline_;
767 : Linkage* linkage_;
768 :
769 : DISALLOW_COPY_AND_ASSIGN(PipelineCompilationJob);
770 : };
771 :
772 439074 : PipelineCompilationJob::Status PipelineCompilationJob::PrepareJobImpl() {
773 439074 : if (!FLAG_always_opt) {
774 3597770 : compilation_info()->MarkAsBailoutOnUninitialized();
775 : }
776 439074 : if (FLAG_turbo_loop_peeling) {
777 : compilation_info()->MarkAsLoopPeelingEnabled();
778 : }
779 439074 : if (FLAG_turbo_inlining) {
780 : compilation_info()->MarkAsInliningEnabled();
781 : }
782 439074 : if (FLAG_inline_accessors) {
783 : compilation_info()->MarkAsAccessorInliningEnabled();
784 : }
785 439073 : if (compilation_info()->closure()->feedback_vector_cell()->map() ==
786 439074 : isolate()->heap()->one_closure_cell_map()) {
787 : compilation_info()->MarkAsFunctionContextSpecializing();
788 : }
789 :
790 : data_.set_start_source_position(
791 : compilation_info()->shared_info()->start_position());
792 :
793 439072 : linkage_ = new (compilation_info()->zone()) Linkage(
794 878146 : Linkage::ComputeIncoming(compilation_info()->zone(), compilation_info()));
795 :
796 439073 : if (!pipeline_.CreateGraph()) {
797 0 : if (isolate()->has_pending_exception()) return FAILED; // Stack overflowed.
798 0 : return AbortOptimization(kGraphBuildingFailed);
799 : }
800 :
801 439073 : if (compilation_info()->is_osr()) data_.InitializeOsrHelper();
802 :
803 : // Make sure that we have generated the maximal number of deopt entries.
804 : // This is in order to avoid triggering the generation of deopt entries later
805 : // during code assembly.
806 439073 : Deoptimizer::EnsureCodeForMaxDeoptimizationEntries(isolate());
807 :
808 439074 : return SUCCEEDED;
809 : }
810 :
811 439051 : PipelineCompilationJob::Status PipelineCompilationJob::ExecuteJobImpl() {
812 439051 : if (!pipeline_.OptimizeGraph(linkage_)) return FAILED;
813 439035 : pipeline_.AssembleCode(linkage_);
814 439041 : return SUCCEEDED;
815 : }
816 :
817 438217 : PipelineCompilationJob::Status PipelineCompilationJob::FinalizeJobImpl() {
818 438217 : Handle<Code> code = pipeline_.FinalizeCode();
819 438219 : if (code.is_null()) {
820 1314657 : if (compilation_info()->bailout_reason() == kNoReason) {
821 0 : return AbortOptimization(kCodeGenerationFailed);
822 : }
823 : return FAILED;
824 : }
825 438219 : compilation_info()->dependencies()->Commit(code);
826 : compilation_info()->SetCode(code);
827 :
828 876439 : compilation_info()->context()->native_context()->AddOptimizedCode(*code);
829 438220 : RegisterWeakObjectsInOptimizedCode(code);
830 438218 : return SUCCEEDED;
831 : }
832 :
833 : namespace {
834 :
835 848558 : void AddWeakObjectToCodeDependency(Isolate* isolate, Handle<HeapObject> object,
836 : Handle<Code> code) {
837 848558 : Handle<WeakCell> cell = Code::WeakCellFor(code);
838 848558 : Heap* heap = isolate->heap();
839 848558 : if (heap->InNewSpace(*object)) {
840 356886 : heap->AddWeakNewSpaceObjectToCodeDependency(object, cell);
841 : } else {
842 491672 : Handle<DependentCode> dep(heap->LookupWeakObjectToCodeDependency(object));
843 : dep =
844 491672 : DependentCode::InsertWeakCode(dep, DependentCode::kWeakCodeGroup, cell);
845 491672 : heap->AddWeakObjectToCodeDependency(object, dep);
846 : }
847 848558 : }
848 :
849 : } // namespace
850 :
851 438220 : void PipelineCompilationJob::RegisterWeakObjectsInOptimizedCode(
852 : Handle<Code> code) {
853 : DCHECK(code->is_optimized_code());
854 : std::vector<Handle<Map>> maps;
855 : std::vector<Handle<HeapObject>> objects;
856 : {
857 : DisallowHeapAllocation no_gc;
858 : int const mode_mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT);
859 3982703 : for (RelocIterator it(*code, mode_mask); !it.done(); it.next()) {
860 3544483 : RelocInfo::Mode mode = it.rinfo()->rmode();
861 7088966 : if (mode == RelocInfo::EMBEDDED_OBJECT &&
862 7088966 : code->IsWeakObjectInOptimizedCode(it.rinfo()->target_object())) {
863 973620 : Handle<HeapObject> object(HeapObject::cast(it.rinfo()->target_object()),
864 973620 : isolate());
865 973620 : if (object->IsMap()) {
866 125062 : maps.push_back(Handle<Map>::cast(object));
867 : } else {
868 848558 : objects.push_back(object);
869 : }
870 : }
871 : }
872 : }
873 1001498 : for (Handle<Map> map : maps) {
874 125062 : if (map->dependent_code()->IsEmpty(DependentCode::kWeakCodeGroup)) {
875 34600 : isolate()->heap()->AddRetainedMap(map);
876 : }
877 125062 : Map::AddDependentCode(map, DependentCode::kWeakCodeGroup, code);
878 : }
879 1724994 : for (Handle<HeapObject> object : objects) {
880 848558 : AddWeakObjectToCodeDependency(isolate(), object, code);
881 : }
882 : code->set_can_have_weak_objects(true);
883 438218 : }
884 :
885 911936 : class PipelineWasmCompilationJob final : public CompilationJob {
886 : public:
887 227968 : explicit PipelineWasmCompilationJob(
888 455885 : CompilationInfo* info, JSGraph* jsgraph, CallDescriptor* descriptor,
889 : SourcePositionTable* source_positions,
890 : ZoneVector<trap_handler::ProtectedInstructionData>* protected_insts,
891 : bool asmjs_origin)
892 : : CompilationJob(info->isolate(), nullptr, info, "TurboFan",
893 : State::kReadyToExecute),
894 : zone_stats_(info->isolate()->allocator()),
895 : pipeline_statistics_(CreatePipelineStatistics(Handle<Script>::null(),
896 : info, &zone_stats_)),
897 : data_(&zone_stats_, info, jsgraph, pipeline_statistics_.get(),
898 : source_positions, protected_insts),
899 : pipeline_(&data_),
900 : linkage_(descriptor),
901 911782 : asmjs_origin_(asmjs_origin) {}
902 :
903 : protected:
904 : Status PrepareJobImpl() final;
905 : Status ExecuteJobImpl() final;
906 : Status FinalizeJobImpl() final;
907 :
908 : private:
909 : size_t AllocatedMemory() const override;
910 :
911 : // Temporary regression check while we get the wasm code off the GC heap, and
912 : // until we decontextualize wasm code.
913 : // We expect the only embedded objects to be: CEntryStub, undefined, and
914 : // the various builtins for throwing exceptions like OOB.
915 : void ValidateImmovableEmbeddedObjects() const;
916 :
917 : ZoneStats zone_stats_;
918 : std::unique_ptr<PipelineStatistics> pipeline_statistics_;
919 : PipelineData data_;
920 : PipelineImpl pipeline_;
921 : Linkage linkage_;
922 : bool asmjs_origin_;
923 : };
924 :
925 : PipelineWasmCompilationJob::Status
926 0 : PipelineWasmCompilationJob::PrepareJobImpl() {
927 0 : UNREACHABLE(); // Prepare should always be skipped for WasmCompilationJob.
928 : return SUCCEEDED;
929 : }
930 :
931 : PipelineWasmCompilationJob::Status
932 227812 : PipelineWasmCompilationJob::ExecuteJobImpl() {
933 227812 : if (FLAG_trace_turbo) {
934 0 : TurboJsonFile json_of(compilation_info(), std::ios_base::trunc);
935 0 : json_of << "{\"function\":\"" << compilation_info()->GetDebugName().get()
936 0 : << "\", \"source\":\"\",\n\"phases\":[";
937 : }
938 :
939 227812 : pipeline_.RunPrintAndVerify("Machine", true);
940 227881 : if (FLAG_wasm_opt || asmjs_origin_) {
941 69765 : PipelineData* data = &data_;
942 11640 : PipelineRunScope scope(data, "Wasm optimization");
943 11625 : JSGraphReducer graph_reducer(data->jsgraph(), scope.zone());
944 : DeadCodeElimination dead_code_elimination(&graph_reducer, data->graph(),
945 11625 : data->common());
946 34875 : ValueNumberingReducer value_numbering(scope.zone(), data->graph()->zone());
947 34875 : MachineOperatorReducer machine_reducer(data->jsgraph(), asmjs_origin_);
948 : CommonOperatorReducer common_reducer(&graph_reducer, data->graph(),
949 11625 : data->common(), data->machine());
950 11625 : AddReducer(data, &graph_reducer, &dead_code_elimination);
951 11625 : AddReducer(data, &graph_reducer, &machine_reducer);
952 11625 : AddReducer(data, &graph_reducer, &common_reducer);
953 11624 : AddReducer(data, &graph_reducer, &value_numbering);
954 11625 : graph_reducer.ReduceGraph();
955 23250 : pipeline_.RunPrintAndVerify("Optimized Machine", true);
956 : }
957 :
958 227866 : if (!pipeline_.ScheduleAndSelectInstructions(&linkage_, true)) return FAILED;
959 227977 : pipeline_.AssembleCode(&linkage_);
960 227970 : return SUCCEEDED;
961 : }
962 :
963 227959 : size_t PipelineWasmCompilationJob::AllocatedMemory() const {
964 227959 : return pipeline_.data_->zone_stats()->GetCurrentAllocatedBytes();
965 : }
966 :
967 : PipelineWasmCompilationJob::Status
968 227754 : PipelineWasmCompilationJob::FinalizeJobImpl() {
969 227754 : pipeline_.FinalizeCode();
970 : ValidateImmovableEmbeddedObjects();
971 227754 : return SUCCEEDED;
972 : }
973 :
974 0 : void PipelineWasmCompilationJob::ValidateImmovableEmbeddedObjects() const {
975 : #if !DEBUG
976 : return;
977 : #endif
978 : // We expect the only embedded objects to be those originating from
979 : // a snapshot, which are immovable.
980 : DisallowHeapAllocation no_gc;
981 : Handle<Code> result = pipeline_.data_->code();
982 : if (result.is_null()) return;
983 : // TODO(aseemgarg): remove this restriction when
984 : // wasm-to-js is also internally immovable to include WASM_TO_JS
985 : if (result->kind() != Code::WASM_FUNCTION) return;
986 : static const int kAllGCRefs = (1 << (RelocInfo::LAST_GCED_ENUM + 1)) - 1;
987 : for (RelocIterator it(*result, kAllGCRefs); !it.done(); it.next()) {
988 : RelocInfo::Mode mode = it.rinfo()->rmode();
989 : Object* target = nullptr;
990 : switch (mode) {
991 : case RelocInfo::CODE_TARGET:
992 : // this would be either one of the stubs or builtins, because
993 : // we didn't link yet.
994 : target = Code::GetCodeFromTargetAddress(it.rinfo()->target_address());
995 : break;
996 : case RelocInfo::EMBEDDED_OBJECT:
997 : target = it.rinfo()->target_object();
998 : break;
999 : default:
1000 : UNREACHABLE();
1001 : }
1002 : CHECK_NOT_NULL(target);
1003 : bool is_immovable =
1004 : target->IsSmi() || Heap::IsImmovable(HeapObject::cast(target));
1005 : bool is_wasm = target->IsCode() &&
1006 : (Code::cast(target)->kind() == Code::WASM_FUNCTION ||
1007 : Code::cast(target)->kind() == Code::WASM_TO_JS_FUNCTION);
1008 : bool is_allowed_stub = false;
1009 : if (target->IsCode()) {
1010 : Code* code = Code::cast(target);
1011 : is_allowed_stub =
1012 : code->kind() == Code::STUB &&
1013 : CodeStub::MajorKeyFromKey(code->stub_key()) == CodeStub::DoubleToI;
1014 : }
1015 : CHECK(is_immovable || is_wasm || is_allowed_stub);
1016 : }
1017 : }
1018 :
1019 : template <typename Phase>
1020 31346625 : void PipelineImpl::Run() {
1021 31346625 : PipelineRunScope scope(this->data_, Phase::phase_name());
1022 : Phase phase;
1023 36556745 : phase.Run(this->data_, scope.zone());
1024 31347455 : }
1025 :
1026 : template <typename Phase, typename Arg0>
1027 3046351 : void PipelineImpl::Run(Arg0 arg_0) {
1028 3046351 : PipelineRunScope scope(this->data_, Phase::phase_name());
1029 : Phase phase;
1030 4348007 : phase.Run(this->data_, scope.zone(), arg_0);
1031 3046513 : }
1032 :
1033 : template <typename Phase, typename Arg0, typename Arg1>
1034 74492 : void PipelineImpl::Run(Arg0 arg_0, Arg1 arg_1) {
1035 74492 : PipelineRunScope scope(this->data_, Phase::phase_name());
1036 : Phase phase;
1037 148984 : phase.Run(this->data_, scope.zone(), arg_0, arg_1);
1038 74492 : }
1039 :
1040 : struct GraphBuilderPhase {
1041 : static const char* phase_name() { return "graph builder"; }
1042 :
1043 1773527 : void Run(PipelineData* data, Zone* temp_zone) {
1044 : JSTypeHintLowering::Flags flags = JSTypeHintLowering::kNoFlags;
1045 443381 : if (data->info()->is_bailout_on_uninitialized()) {
1046 : flags |= JSTypeHintLowering::kBailoutOnUninitialized;
1047 : }
1048 : BytecodeGraphBuilder graph_builder(
1049 : temp_zone, data->info()->shared_info(),
1050 : handle(data->info()->closure()->feedback_vector()),
1051 : data->info()->osr_offset(), data->jsgraph(), CallFrequency(1.0f),
1052 : data->source_positions(), data->native_context(),
1053 886763 : SourcePosition::kNotInlined, flags);
1054 443381 : graph_builder.CreateGraph();
1055 443381 : }
1056 : };
1057 :
1058 : namespace {
1059 :
1060 23093 : Maybe<OuterContext> GetModuleContext(Handle<JSFunction> closure) {
1061 : Context* current = closure->context();
1062 : size_t distance = 0;
1063 62136 : while (!current->IsNativeContext()) {
1064 15955 : if (current->IsModuleContext()) {
1065 : return Just(OuterContext(handle(current), distance));
1066 : }
1067 : current = current->previous();
1068 15950 : distance++;
1069 : }
1070 : return Nothing<OuterContext>();
1071 : }
1072 :
1073 443382 : Maybe<OuterContext> ChooseSpecializationContext(CompilationInfo* info) {
1074 443382 : if (info->is_function_context_specializing()) {
1075 : DCHECK(info->has_context());
1076 420289 : return Just(OuterContext(handle(info->context()), 0));
1077 : }
1078 23093 : return GetModuleContext(info->closure());
1079 : }
1080 :
1081 : } // anonymous namespace
1082 :
1083 : struct InliningPhase {
1084 : static const char* phase_name() { return "inlining"; }
1085 :
1086 4433814 : void Run(PipelineData* data, Zone* temp_zone) {
1087 443380 : JSGraphReducer graph_reducer(data->jsgraph(), temp_zone);
1088 : DeadCodeElimination dead_code_elimination(&graph_reducer, data->graph(),
1089 443381 : data->common());
1090 443382 : CheckpointElimination checkpoint_elimination(&graph_reducer);
1091 : CommonOperatorReducer common_reducer(&graph_reducer, data->graph(),
1092 443380 : data->common(), data->machine());
1093 : JSCallReducer call_reducer(&graph_reducer, data->jsgraph(),
1094 : data->info()->is_bailout_on_uninitialized()
1095 : ? JSCallReducer::kBailoutOnUninitialized
1096 : : JSCallReducer::kNoFlags,
1097 : data->native_context(),
1098 886763 : data->info()->dependencies());
1099 : JSContextSpecialization context_specialization(
1100 : &graph_reducer, data->jsgraph(),
1101 : ChooseSpecializationContext(data->info()),
1102 : data->info()->is_function_context_specializing()
1103 : ? data->info()->closure()
1104 443382 : : MaybeHandle<JSFunction>());
1105 : JSNativeContextSpecialization::Flags flags =
1106 : JSNativeContextSpecialization::kNoFlags;
1107 443382 : if (data->info()->is_accessor_inlining_enabled()) {
1108 : flags |= JSNativeContextSpecialization::kAccessorInliningEnabled;
1109 : }
1110 443382 : if (data->info()->is_bailout_on_uninitialized()) {
1111 : flags |= JSNativeContextSpecialization::kBailoutOnUninitialized;
1112 : }
1113 : JSNativeContextSpecialization native_context_specialization(
1114 : &graph_reducer, data->jsgraph(), flags, data->native_context(),
1115 886764 : data->info()->dependencies(), temp_zone);
1116 : JSInliningHeuristic inlining(
1117 : &graph_reducer, data->info()->is_inlining_enabled()
1118 : ? JSInliningHeuristic::kGeneralInlining
1119 : : JSInliningHeuristic::kRestrictedInlining,
1120 886763 : temp_zone, data->info(), data->jsgraph(), data->source_positions());
1121 443382 : JSIntrinsicLowering intrinsic_lowering(&graph_reducer, data->jsgraph());
1122 443382 : AddReducer(data, &graph_reducer, &dead_code_elimination);
1123 443381 : AddReducer(data, &graph_reducer, &checkpoint_elimination);
1124 443382 : AddReducer(data, &graph_reducer, &common_reducer);
1125 443382 : AddReducer(data, &graph_reducer, &native_context_specialization);
1126 443382 : AddReducer(data, &graph_reducer, &context_specialization);
1127 443382 : AddReducer(data, &graph_reducer, &intrinsic_lowering);
1128 443382 : AddReducer(data, &graph_reducer, &call_reducer);
1129 443382 : AddReducer(data, &graph_reducer, &inlining);
1130 443382 : graph_reducer.ReduceGraph();
1131 443382 : }
1132 : };
1133 :
1134 :
1135 : struct TyperPhase {
1136 : static const char* phase_name() { return "typer"; }
1137 :
1138 1330146 : void Run(PipelineData* data, Zone* temp_zone, Typer* typer) {
1139 : NodeVector roots(temp_zone);
1140 443382 : data->jsgraph()->GetCachedNodes(&roots);
1141 : LoopVariableOptimizer induction_vars(data->jsgraph()->graph(),
1142 443382 : data->common(), temp_zone);
1143 443382 : if (FLAG_turbo_loop_variable) induction_vars.Run();
1144 443382 : typer->Run(roots, &induction_vars);
1145 443381 : }
1146 : };
1147 :
1148 : struct UntyperPhase {
1149 : static const char* phase_name() { return "untyper"; }
1150 :
1151 : void Run(PipelineData* data, Zone* temp_zone) {
1152 0 : class RemoveTypeReducer final : public Reducer {
1153 : public:
1154 0 : const char* reducer_name() const override { return "RemoveTypeReducer"; }
1155 0 : Reduction Reduce(Node* node) final {
1156 0 : if (NodeProperties::IsTyped(node)) {
1157 : NodeProperties::RemoveType(node);
1158 : return Changed(node);
1159 : }
1160 : return NoChange();
1161 : }
1162 : };
1163 :
1164 : NodeVector roots(temp_zone);
1165 : data->jsgraph()->GetCachedNodes(&roots);
1166 : for (Node* node : roots) {
1167 : NodeProperties::RemoveType(node);
1168 : }
1169 :
1170 : JSGraphReducer graph_reducer(data->jsgraph(), temp_zone);
1171 : RemoveTypeReducer remove_type_reducer;
1172 : AddReducer(data, &graph_reducer, &remove_type_reducer);
1173 : graph_reducer.ReduceGraph();
1174 : }
1175 : };
1176 :
1177 : struct TypedLoweringPhase {
1178 : static const char* phase_name() { return "typed lowering"; }
1179 :
1180 3547051 : void Run(PipelineData* data, Zone* temp_zone) {
1181 443382 : JSGraphReducer graph_reducer(data->jsgraph(), temp_zone);
1182 : DeadCodeElimination dead_code_elimination(&graph_reducer, data->graph(),
1183 443382 : data->common());
1184 : JSBuiltinReducer builtin_reducer(
1185 : &graph_reducer, data->jsgraph(),
1186 1330143 : data->info()->dependencies(), data->native_context());
1187 : JSCreateLowering create_lowering(
1188 : &graph_reducer, data->info()->dependencies(), data->jsgraph(),
1189 886762 : data->native_context(), temp_zone);
1190 443382 : JSTypedLowering typed_lowering(&graph_reducer, data->jsgraph(), temp_zone);
1191 : TypedOptimization typed_optimization(
1192 886763 : &graph_reducer, data->info()->dependencies(), data->jsgraph());
1193 886763 : SimplifiedOperatorReducer simple_reducer(&graph_reducer, data->jsgraph());
1194 443380 : CheckpointElimination checkpoint_elimination(&graph_reducer);
1195 : CommonOperatorReducer common_reducer(&graph_reducer, data->graph(),
1196 443380 : data->common(), data->machine());
1197 443382 : AddReducer(data, &graph_reducer, &dead_code_elimination);
1198 443382 : AddReducer(data, &graph_reducer, &builtin_reducer);
1199 443381 : AddReducer(data, &graph_reducer, &create_lowering);
1200 443382 : AddReducer(data, &graph_reducer, &typed_optimization);
1201 443382 : AddReducer(data, &graph_reducer, &typed_lowering);
1202 443382 : AddReducer(data, &graph_reducer, &simple_reducer);
1203 443382 : AddReducer(data, &graph_reducer, &checkpoint_elimination);
1204 443382 : AddReducer(data, &graph_reducer, &common_reducer);
1205 443382 : graph_reducer.ReduceGraph();
1206 443382 : }
1207 : };
1208 :
1209 :
1210 : struct EscapeAnalysisPhase {
1211 : static const char* phase_name() { return "escape analysis"; }
1212 :
1213 1330078 : void Run(PipelineData* data, Zone* temp_zone) {
1214 443358 : EscapeAnalysis escape_analysis(data->jsgraph(), temp_zone);
1215 : escape_analysis.ReduceGraph();
1216 443360 : JSGraphReducer reducer(data->jsgraph(), temp_zone);
1217 : EscapeAnalysisReducer escape_reducer(&reducer, data->jsgraph(),
1218 : escape_analysis.analysis_result(),
1219 1330080 : temp_zone);
1220 443359 : AddReducer(data, &reducer, &escape_reducer);
1221 443359 : reducer.ReduceGraph();
1222 : // TODO(tebbi): Turn this into a debug mode check once we have confidence.
1223 443359 : escape_reducer.VerifyReplacement();
1224 443359 : }
1225 : };
1226 :
1227 : struct SimplifiedLoweringPhase {
1228 : static const char* phase_name() { return "simplified lowering"; }
1229 :
1230 443360 : void Run(PipelineData* data, Zone* temp_zone) {
1231 : SimplifiedLowering lowering(data->jsgraph(), temp_zone,
1232 443360 : data->source_positions());
1233 443359 : lowering.LowerAllNodes();
1234 443357 : }
1235 : };
1236 :
1237 : struct LoopPeelingPhase {
1238 : static const char* phase_name() { return "loop peeling"; }
1239 :
1240 1756108 : void Run(PipelineData* data, Zone* temp_zone) {
1241 439027 : GraphTrimmer trimmer(temp_zone, data->graph());
1242 : NodeVector roots(temp_zone);
1243 439027 : data->jsgraph()->GetCachedNodes(&roots);
1244 439028 : trimmer.TrimGraph(roots.begin(), roots.end());
1245 :
1246 : LoopTree* loop_tree =
1247 439028 : LoopFinder::BuildLoopTree(data->jsgraph()->graph(), temp_zone);
1248 : LoopPeeler::PeelInnerLoopsOfTree(data->graph(), data->common(), loop_tree,
1249 439026 : temp_zone);
1250 439027 : }
1251 : };
1252 :
1253 : struct LoopExitEliminationPhase {
1254 : static const char* phase_name() { return "loop exit elimination"; }
1255 :
1256 4332 : void Run(PipelineData* data, Zone* temp_zone) {
1257 4332 : LoopPeeler::EliminateLoopExits(data->graph(), temp_zone);
1258 : }
1259 : };
1260 :
1261 : struct ConcurrentOptimizationPrepPhase {
1262 : static const char* phase_name() { return "concurrency preparation"; }
1263 :
1264 2216910 : void Run(PipelineData* data, Zone* temp_zone) {
1265 : // Make sure we cache these code stubs.
1266 443382 : data->jsgraph()->CEntryStubConstant(1);
1267 443382 : data->jsgraph()->CEntryStubConstant(2);
1268 :
1269 : // TODO(turbofan): Remove this line once the Array constructor code
1270 : // is a proper builtin and no longer a CodeStub.
1271 443382 : data->jsgraph()->ArrayConstructorStubConstant();
1272 :
1273 : // This is needed for escape analysis.
1274 443382 : NodeProperties::SetType(data->jsgraph()->FalseConstant(), Type::Boolean());
1275 443382 : NodeProperties::SetType(data->jsgraph()->TrueConstant(), Type::Boolean());
1276 443382 : }
1277 : };
1278 :
1279 : struct GenericLoweringPhase {
1280 : static const char* phase_name() { return "generic lowering"; }
1281 :
1282 886719 : void Run(PipelineData* data, Zone* temp_zone) {
1283 443359 : JSGraphReducer graph_reducer(data->jsgraph(), temp_zone);
1284 886717 : JSGenericLowering generic_lowering(data->jsgraph());
1285 443360 : AddReducer(data, &graph_reducer, &generic_lowering);
1286 443360 : graph_reducer.ReduceGraph();
1287 443360 : }
1288 : };
1289 :
1290 : struct EarlyOptimizationPhase {
1291 : static const char* phase_name() { return "early optimization"; }
1292 :
1293 2660150 : void Run(PipelineData* data, Zone* temp_zone) {
1294 443357 : JSGraphReducer graph_reducer(data->jsgraph(), temp_zone);
1295 : DeadCodeElimination dead_code_elimination(&graph_reducer, data->graph(),
1296 443360 : data->common());
1297 886718 : SimplifiedOperatorReducer simple_reducer(&graph_reducer, data->jsgraph());
1298 886711 : RedundancyElimination redundancy_elimination(&graph_reducer, temp_zone);
1299 886712 : ValueNumberingReducer value_numbering(temp_zone, data->graph()->zone());
1300 886717 : MachineOperatorReducer machine_reducer(data->jsgraph());
1301 : CommonOperatorReducer common_reducer(&graph_reducer, data->graph(),
1302 443358 : data->common(), data->machine());
1303 443359 : AddReducer(data, &graph_reducer, &dead_code_elimination);
1304 443359 : AddReducer(data, &graph_reducer, &simple_reducer);
1305 443357 : AddReducer(data, &graph_reducer, &redundancy_elimination);
1306 443360 : AddReducer(data, &graph_reducer, &machine_reducer);
1307 443360 : AddReducer(data, &graph_reducer, &common_reducer);
1308 443359 : AddReducer(data, &graph_reducer, &value_numbering);
1309 443360 : graph_reducer.ReduceGraph();
1310 443360 : }
1311 : };
1312 :
1313 : struct ControlFlowOptimizationPhase {
1314 : static const char* phase_name() { return "control flow optimization"; }
1315 :
1316 443358 : void Run(PipelineData* data, Zone* temp_zone) {
1317 : ControlFlowOptimizer optimizer(data->graph(), data->common(),
1318 443358 : data->machine(), temp_zone);
1319 443358 : optimizer.Optimize();
1320 443358 : }
1321 : };
1322 :
1323 : struct EffectControlLinearizationPhase {
1324 : static const char* phase_name() { return "effect linearization"; }
1325 :
1326 2216789 : void Run(PipelineData* data, Zone* temp_zone) {
1327 : // The scheduler requires the graphs to be trimmed, so trim now.
1328 : // TODO(jarin) Remove the trimming once the scheduler can handle untrimmed
1329 : // graphs.
1330 443360 : GraphTrimmer trimmer(temp_zone, data->graph());
1331 : NodeVector roots(temp_zone);
1332 443358 : data->jsgraph()->GetCachedNodes(&roots);
1333 443360 : trimmer.TrimGraph(roots.begin(), roots.end());
1334 :
1335 : // Schedule the graph without node splitting so that we can
1336 : // fix the effect and control flow for nodes with low-level side
1337 : // effects (such as changing representation to tagged or
1338 : // 'floating' allocation regions.)
1339 : Schedule* schedule = Scheduler::ComputeSchedule(temp_zone, data->graph(),
1340 443360 : Scheduler::kTempSchedule);
1341 443359 : if (FLAG_turbo_verify) ScheduleVerifier::Run(schedule);
1342 443359 : TraceSchedule(data->info(), schedule);
1343 :
1344 : // Post-pass for wiring the control/effects
1345 : // - connect allocating representation changes into the control&effect
1346 : // chains and lower them,
1347 : // - get rid of the region markers,
1348 : // - introduce effect phis and rewire effects to get SSA again.
1349 : EffectControlLinearizer linearizer(data->jsgraph(), schedule, temp_zone,
1350 443352 : data->source_positions());
1351 443353 : linearizer.Run();
1352 443354 : }
1353 : };
1354 :
1355 : // The store-store elimination greatly benefits from doing a common operator
1356 : // reducer and dead code elimination just before it, to eliminate conditional
1357 : // deopts with a constant condition.
1358 :
1359 : struct DeadCodeEliminationPhase {
1360 : static const char* phase_name() { return "dead code elimination"; }
1361 :
1362 1330074 : void Run(PipelineData* data, Zone* temp_zone) {
1363 443355 : JSGraphReducer graph_reducer(data->jsgraph(), temp_zone);
1364 : DeadCodeElimination dead_code_elimination(&graph_reducer, data->graph(),
1365 443360 : data->common());
1366 : CommonOperatorReducer common_reducer(&graph_reducer, data->graph(),
1367 443359 : data->common(), data->machine());
1368 443359 : AddReducer(data, &graph_reducer, &dead_code_elimination);
1369 443359 : AddReducer(data, &graph_reducer, &common_reducer);
1370 443360 : graph_reducer.ReduceGraph();
1371 443358 : }
1372 : };
1373 :
1374 : struct StoreStoreEliminationPhase {
1375 : static const char* phase_name() { return "store-store elimination"; }
1376 :
1377 1330080 : void Run(PipelineData* data, Zone* temp_zone) {
1378 443360 : GraphTrimmer trimmer(temp_zone, data->graph());
1379 : NodeVector roots(temp_zone);
1380 443360 : data->jsgraph()->GetCachedNodes(&roots);
1381 443360 : trimmer.TrimGraph(roots.begin(), roots.end());
1382 :
1383 443360 : StoreStoreElimination::Run(data->jsgraph(), temp_zone);
1384 443354 : }
1385 : };
1386 :
1387 : struct LoadEliminationPhase {
1388 : static const char* phase_name() { return "load elimination"; }
1389 :
1390 2660087 : void Run(PipelineData* data, Zone* temp_zone) {
1391 443347 : JSGraphReducer graph_reducer(data->jsgraph(), temp_zone);
1392 : BranchElimination branch_condition_elimination(&graph_reducer,
1393 886695 : data->jsgraph(), temp_zone);
1394 : DeadCodeElimination dead_code_elimination(&graph_reducer, data->graph(),
1395 443349 : data->common());
1396 886697 : RedundancyElimination redundancy_elimination(&graph_reducer, temp_zone);
1397 : LoadElimination load_elimination(&graph_reducer, data->jsgraph(),
1398 : temp_zone);
1399 443348 : CheckpointElimination checkpoint_elimination(&graph_reducer);
1400 886696 : ValueNumberingReducer value_numbering(temp_zone, data->graph()->zone());
1401 : CommonOperatorReducer common_reducer(&graph_reducer, data->graph(),
1402 443347 : data->common(), data->machine());
1403 443348 : AddReducer(data, &graph_reducer, &branch_condition_elimination);
1404 443348 : AddReducer(data, &graph_reducer, &dead_code_elimination);
1405 443349 : AddReducer(data, &graph_reducer, &redundancy_elimination);
1406 443349 : AddReducer(data, &graph_reducer, &load_elimination);
1407 443349 : AddReducer(data, &graph_reducer, &checkpoint_elimination);
1408 443349 : AddReducer(data, &graph_reducer, &common_reducer);
1409 443349 : AddReducer(data, &graph_reducer, &value_numbering);
1410 443349 : graph_reducer.ReduceGraph();
1411 443348 : }
1412 : };
1413 :
1414 : struct MemoryOptimizationPhase {
1415 : static const char* phase_name() { return "memory optimization"; }
1416 :
1417 1330074 : void Run(PipelineData* data, Zone* temp_zone) {
1418 : // The memory optimizer requires the graphs to be trimmed, so trim now.
1419 443355 : GraphTrimmer trimmer(temp_zone, data->graph());
1420 : NodeVector roots(temp_zone);
1421 443359 : data->jsgraph()->GetCachedNodes(&roots);
1422 443360 : trimmer.TrimGraph(roots.begin(), roots.end());
1423 :
1424 : // Optimize allocations and load/store operations.
1425 443360 : MemoryOptimizer optimizer(data->jsgraph(), temp_zone);
1426 886714 : optimizer.Optimize();
1427 443357 : }
1428 : };
1429 :
1430 : struct LateOptimizationPhase {
1431 : static const char* phase_name() { return "late optimization"; }
1432 :
1433 3103514 : void Run(PipelineData* data, Zone* temp_zone) {
1434 443356 : JSGraphReducer graph_reducer(data->jsgraph(), temp_zone);
1435 : BranchElimination branch_condition_elimination(&graph_reducer,
1436 886715 : data->jsgraph(), temp_zone);
1437 : DeadCodeElimination dead_code_elimination(&graph_reducer, data->graph(),
1438 443360 : data->common());
1439 886711 : ValueNumberingReducer value_numbering(temp_zone, data->graph()->zone());
1440 886715 : MachineOperatorReducer machine_reducer(data->jsgraph());
1441 : CommonOperatorReducer common_reducer(&graph_reducer, data->graph(),
1442 443359 : data->common(), data->machine());
1443 : SelectLowering select_lowering(data->jsgraph()->graph(),
1444 886719 : data->jsgraph()->common());
1445 443356 : AddReducer(data, &graph_reducer, &branch_condition_elimination);
1446 443358 : AddReducer(data, &graph_reducer, &dead_code_elimination);
1447 443359 : AddReducer(data, &graph_reducer, &machine_reducer);
1448 443360 : AddReducer(data, &graph_reducer, &common_reducer);
1449 443360 : AddReducer(data, &graph_reducer, &select_lowering);
1450 443360 : AddReducer(data, &graph_reducer, &value_numbering);
1451 443360 : graph_reducer.ReduceGraph();
1452 443359 : }
1453 : };
1454 :
1455 : struct EarlyGraphTrimmingPhase {
1456 : static const char* phase_name() { return "early graph trimming"; }
1457 886763 : void Run(PipelineData* data, Zone* temp_zone) {
1458 443382 : GraphTrimmer trimmer(temp_zone, data->graph());
1459 : NodeVector roots(temp_zone);
1460 443381 : data->jsgraph()->GetCachedNodes(&roots);
1461 443382 : trimmer.TrimGraph(roots.begin(), roots.end());
1462 443382 : }
1463 : };
1464 :
1465 :
1466 : struct LateGraphTrimmingPhase {
1467 : static const char* phase_name() { return "late graph trimming"; }
1468 1342621 : void Run(PipelineData* data, Zone* temp_zone) {
1469 671298 : GraphTrimmer trimmer(temp_zone, data->graph());
1470 : NodeVector roots(temp_zone);
1471 671323 : if (data->jsgraph()) {
1472 671323 : data->jsgraph()->GetCachedNodes(&roots);
1473 : }
1474 671339 : trimmer.TrimGraph(roots.begin(), roots.end());
1475 671290 : }
1476 : };
1477 :
1478 :
1479 : struct ComputeSchedulePhase {
1480 : static const char* phase_name() { return "scheduling"; }
1481 :
1482 2025834 : void Run(PipelineData* data, Zone* temp_zone) {
1483 : Schedule* schedule = Scheduler::ComputeSchedule(
1484 : temp_zone, data->graph(), data->info()->is_splitting_enabled()
1485 : ? Scheduler::kSplitNodes
1486 2025834 : : Scheduler::kNoFlags);
1487 1012887 : if (FLAG_turbo_verify) ScheduleVerifier::Run(schedule);
1488 : data->set_schedule(schedule);
1489 1012887 : }
1490 : };
1491 :
1492 :
1493 : struct InstructionSelectionPhase {
1494 : static const char* phase_name() { return "select instructions"; }
1495 :
1496 3904639 : void Run(PipelineData* data, Zone* temp_zone, Linkage* linkage) {
1497 : InstructionSelector selector(
1498 : temp_zone, data->graph()->NodeCount(), linkage, data->sequence(),
1499 : data->schedule(), data->source_positions(), data->frame(),
1500 : data->info()->is_source_positions_enabled()
1501 : ? InstructionSelector::kAllSourcePositions
1502 : : InstructionSelector::kCallSourcePositions,
1503 : InstructionSelector::SupportedFeatures(),
1504 : FLAG_turbo_instruction_scheduling
1505 : ? InstructionSelector::kEnableScheduling
1506 : : InstructionSelector::kDisableScheduling,
1507 : data->info()->will_serialize()
1508 : ? InstructionSelector::kEnableSerialization
1509 5206187 : : InstructionSelector::kDisableSerialization);
1510 1301550 : if (!selector.SelectInstructions()) {
1511 : data->set_compilation_failed();
1512 : }
1513 1301527 : }
1514 : };
1515 :
1516 :
1517 : struct MeetRegisterConstraintsPhase {
1518 : static const char* phase_name() { return "meet register constraints"; }
1519 :
1520 1301569 : void Run(PipelineData* data, Zone* temp_zone) {
1521 1301569 : ConstraintBuilder builder(data->register_allocation_data());
1522 1301555 : builder.MeetRegisterConstraints();
1523 1301593 : }
1524 : };
1525 :
1526 :
1527 : struct ResolvePhisPhase {
1528 : static const char* phase_name() { return "resolve phis"; }
1529 :
1530 1301582 : void Run(PipelineData* data, Zone* temp_zone) {
1531 1301582 : ConstraintBuilder builder(data->register_allocation_data());
1532 1301582 : builder.ResolvePhis();
1533 1301523 : }
1534 : };
1535 :
1536 :
1537 : struct BuildLiveRangesPhase {
1538 : static const char* phase_name() { return "build live ranges"; }
1539 :
1540 1301543 : void Run(PipelineData* data, Zone* temp_zone) {
1541 1301543 : LiveRangeBuilder builder(data->register_allocation_data(), temp_zone);
1542 1301547 : builder.BuildLiveRanges();
1543 1301520 : }
1544 : };
1545 :
1546 :
1547 : struct SplinterLiveRangesPhase {
1548 : static const char* phase_name() { return "splinter live ranges"; }
1549 :
1550 1301563 : void Run(PipelineData* data, Zone* temp_zone) {
1551 : LiveRangeSeparator live_range_splinterer(data->register_allocation_data(),
1552 : temp_zone);
1553 1301563 : live_range_splinterer.Splinter();
1554 : }
1555 : };
1556 :
1557 :
1558 : template <typename RegAllocator>
1559 : struct AllocateGeneralRegistersPhase {
1560 : static const char* phase_name() { return "allocate general registers"; }
1561 :
1562 1301534 : void Run(PipelineData* data, Zone* temp_zone) {
1563 : RegAllocator allocator(data->register_allocation_data(), GENERAL_REGISTERS,
1564 1301534 : temp_zone);
1565 1301597 : allocator.AllocateRegisters();
1566 1301573 : }
1567 : };
1568 :
1569 : template <typename RegAllocator>
1570 : struct AllocateFPRegistersPhase {
1571 : static const char* phase_name() { return "allocate f.p. registers"; }
1572 :
1573 1301591 : void Run(PipelineData* data, Zone* temp_zone) {
1574 : RegAllocator allocator(data->register_allocation_data(), FP_REGISTERS,
1575 1301591 : temp_zone);
1576 1301599 : allocator.AllocateRegisters();
1577 1301589 : }
1578 : };
1579 :
1580 :
1581 : struct MergeSplintersPhase {
1582 : static const char* phase_name() { return "merge splintered ranges"; }
1583 1301598 : void Run(PipelineData* pipeline_data, Zone* temp_zone) {
1584 : RegisterAllocationData* data = pipeline_data->register_allocation_data();
1585 : LiveRangeMerger live_range_merger(data, temp_zone);
1586 1301598 : live_range_merger.Merge();
1587 : }
1588 : };
1589 :
1590 :
1591 : struct LocateSpillSlotsPhase {
1592 : static const char* phase_name() { return "locate spill slots"; }
1593 :
1594 1301600 : void Run(PipelineData* data, Zone* temp_zone) {
1595 1301600 : SpillSlotLocator locator(data->register_allocation_data());
1596 1301596 : locator.LocateSpillSlots();
1597 1301597 : }
1598 : };
1599 :
1600 :
1601 : struct AssignSpillSlotsPhase {
1602 : static const char* phase_name() { return "assign spill slots"; }
1603 :
1604 1301591 : void Run(PipelineData* data, Zone* temp_zone) {
1605 1301591 : OperandAssigner assigner(data->register_allocation_data());
1606 1301578 : assigner.AssignSpillSlots();
1607 1301559 : }
1608 : };
1609 :
1610 :
1611 : struct CommitAssignmentPhase {
1612 : static const char* phase_name() { return "commit assignment"; }
1613 :
1614 1301592 : void Run(PipelineData* data, Zone* temp_zone) {
1615 1301592 : OperandAssigner assigner(data->register_allocation_data());
1616 1301592 : assigner.CommitAssignment();
1617 1301581 : }
1618 : };
1619 :
1620 :
1621 : struct PopulateReferenceMapsPhase {
1622 : static const char* phase_name() { return "populate pointer maps"; }
1623 :
1624 1301586 : void Run(PipelineData* data, Zone* temp_zone) {
1625 1301586 : ReferenceMapPopulator populator(data->register_allocation_data());
1626 1301585 : populator.PopulateReferenceMaps();
1627 1301447 : }
1628 : };
1629 :
1630 :
1631 : struct ConnectRangesPhase {
1632 : static const char* phase_name() { return "connect ranges"; }
1633 :
1634 1301591 : void Run(PipelineData* data, Zone* temp_zone) {
1635 1301591 : LiveRangeConnector connector(data->register_allocation_data());
1636 1301558 : connector.ConnectRanges(temp_zone);
1637 1301511 : }
1638 : };
1639 :
1640 :
1641 : struct ResolveControlFlowPhase {
1642 : static const char* phase_name() { return "resolve control flow"; }
1643 :
1644 1301589 : void Run(PipelineData* data, Zone* temp_zone) {
1645 1301589 : LiveRangeConnector connector(data->register_allocation_data());
1646 1301590 : connector.ResolveControlFlow(temp_zone);
1647 1301584 : }
1648 : };
1649 :
1650 :
1651 : struct OptimizeMovesPhase {
1652 : static const char* phase_name() { return "optimize moves"; }
1653 :
1654 1301591 : void Run(PipelineData* data, Zone* temp_zone) {
1655 1301591 : MoveOptimizer move_optimizer(temp_zone, data->sequence());
1656 1301576 : move_optimizer.Run();
1657 1301577 : }
1658 : };
1659 :
1660 :
1661 : struct FrameElisionPhase {
1662 : static const char* phase_name() { return "frame elision"; }
1663 :
1664 1301557 : void Run(PipelineData* data, Zone* temp_zone) {
1665 1301557 : FrameElider(data->sequence()).Run();
1666 1301533 : }
1667 : };
1668 :
1669 :
1670 : struct JumpThreadingPhase {
1671 : static const char* phase_name() { return "jump threading"; }
1672 :
1673 3279411 : void Run(PipelineData* data, Zone* temp_zone, bool frame_at_start) {
1674 : ZoneVector<RpoNumber> result(temp_zone);
1675 1301543 : if (JumpThreading::ComputeForwarding(temp_zone, result, data->sequence(),
1676 1301543 : frame_at_start)) {
1677 676325 : JumpThreading::ApplyForwarding(result, data->sequence());
1678 : }
1679 1301534 : }
1680 : };
1681 :
1682 : struct AssembleCodePhase {
1683 : static const char* phase_name() { return "assemble code"; }
1684 :
1685 1301538 : void Run(PipelineData* data, Zone* temp_zone) {
1686 1301538 : data->code_generator()->AssembleCode();
1687 : }
1688 : };
1689 :
1690 : struct FinalizeCodePhase {
1691 : static const char* phase_name() { return "finalize code"; }
1692 :
1693 1300508 : void Run(PipelineData* data, Zone* temp_zone) {
1694 1300508 : data->set_code(data->code_generator()->FinalizeCode());
1695 : }
1696 : };
1697 :
1698 :
1699 : struct PrintGraphPhase {
1700 : static const char* phase_name() { return nullptr; }
1701 :
1702 0 : void Run(PipelineData* data, Zone* temp_zone, const char* phase) {
1703 0 : CompilationInfo* info = data->info();
1704 : Graph* graph = data->graph();
1705 :
1706 0 : if (FLAG_trace_turbo) { // Print JSON.
1707 : AllowHandleDereference allow_deref;
1708 0 : TurboJsonFile json_of(info, std::ios_base::app);
1709 0 : json_of << "{\"name\":\"" << phase << "\",\"type\":\"graph\",\"data\":"
1710 0 : << AsJSON(*graph, data->source_positions()) << "},\n";
1711 : }
1712 :
1713 0 : if (FLAG_trace_turbo_scheduled) { // Scheduled textual output.
1714 0 : AccountingAllocator allocator;
1715 : Schedule* schedule = data->schedule();
1716 0 : if (schedule == nullptr) {
1717 : schedule = Scheduler::ComputeSchedule(temp_zone, data->graph(),
1718 0 : Scheduler::kNoFlags);
1719 : }
1720 :
1721 : AllowHandleDereference allow_deref;
1722 0 : CodeTracer::Scope tracing_scope(info->isolate()->GetCodeTracer());
1723 0 : OFStream os(tracing_scope.file());
1724 0 : os << "-- Graph after " << phase << " -- " << std::endl;
1725 0 : os << AsScheduledGraph(schedule);
1726 0 : } else if (FLAG_trace_turbo_graph) { // Simple textual RPO.
1727 : AllowHandleDereference allow_deref;
1728 0 : CodeTracer::Scope tracing_scope(info->isolate()->GetCodeTracer());
1729 0 : OFStream os(tracing_scope.file());
1730 0 : os << "-- Graph after " << phase << " -- " << std::endl;
1731 0 : os << AsRPO(*graph);
1732 : }
1733 0 : }
1734 : };
1735 :
1736 :
1737 : struct VerifyGraphPhase {
1738 : static const char* phase_name() { return nullptr; }
1739 :
1740 74492 : void Run(PipelineData* data, Zone* temp_zone, const bool untyped,
1741 : bool values_only = false) {
1742 : Verifier::Run(data->graph(), !untyped ? Verifier::TYPED : Verifier::UNTYPED,
1743 148984 : values_only ? Verifier::kValuesOnly : Verifier::kAll);
1744 : }
1745 : };
1746 :
1747 9003667 : void PipelineImpl::RunPrintAndVerify(const char* phase, bool untyped) {
1748 9003667 : if (FLAG_trace_turbo || FLAG_trace_turbo_graph) {
1749 0 : Run<PrintGraphPhase>(phase);
1750 : }
1751 9003834 : if (FLAG_turbo_verify) {
1752 0 : Run<VerifyGraphPhase>(untyped);
1753 : }
1754 9003834 : }
1755 :
1756 443380 : bool PipelineImpl::CreateGraph() {
1757 1330143 : PipelineData* data = this->data_;
1758 :
1759 : data->BeginPhaseKind("graph creation");
1760 :
1761 443381 : if (FLAG_trace_turbo || FLAG_trace_turbo_graph) {
1762 0 : CodeTracer::Scope tracing_scope(isolate()->GetCodeTracer());
1763 0 : OFStream os(tracing_scope.file());
1764 0 : os << "---------------------------------------------------\n"
1765 0 : << "Begin compiling method " << info()->GetDebugName().get()
1766 0 : << " using Turbofan" << std::endl;
1767 : }
1768 443381 : if (FLAG_trace_turbo) {
1769 0 : TurboCfgFile tcf(isolate());
1770 0 : tcf << AsC1VCompilation(info());
1771 : }
1772 :
1773 443381 : data->source_positions()->AddDecorator();
1774 :
1775 443382 : Run<GraphBuilderPhase>();
1776 443382 : RunPrintAndVerify("Initial untyped", true);
1777 :
1778 : // Perform function context specialization and inlining (if enabled).
1779 443381 : Run<InliningPhase>();
1780 443382 : RunPrintAndVerify("Inlined", true);
1781 :
1782 : // Remove dead->live edges from the graph.
1783 443382 : Run<EarlyGraphTrimmingPhase>();
1784 443381 : RunPrintAndVerify("Early trimmed", true);
1785 :
1786 : // Run the type-sensitive lowerings and optimizations on the graph.
1787 : {
1788 : // Determine the Typer operation flags.
1789 : Typer::Flags flags = Typer::kNoFlags;
1790 786192 : if (is_sloppy(info()->shared_info()->language_mode()) &&
1791 342810 : info()->shared_info()->IsUserJavaScript()) {
1792 : // Sloppy mode functions always have an Object for this.
1793 : flags |= Typer::kThisIsReceiver;
1794 : }
1795 443382 : if (IsClassConstructor(info()->shared_info()->kind())) {
1796 : // Class constructors cannot be [[Call]]ed.
1797 : flags |= Typer::kNewTargetIsReceiver;
1798 : }
1799 :
1800 : // Type the graph and keep the Typer running on newly created nodes within
1801 : // this scope; the Typer is automatically unlinked from the Graph once we
1802 : // leave this scope below.
1803 443382 : Typer typer(isolate(), flags, data->graph());
1804 443381 : Run<TyperPhase>(&typer);
1805 443382 : RunPrintAndVerify("Typed");
1806 :
1807 : // Lower JSOperators where we can determine types.
1808 443382 : Run<TypedLoweringPhase>();
1809 443382 : RunPrintAndVerify("Lowered typed");
1810 : }
1811 :
1812 : // Do some hacky things to prepare for the optimization phase.
1813 : // (caching handles, etc.).
1814 443381 : Run<ConcurrentOptimizationPrepPhase>();
1815 :
1816 : data->EndPhaseKind();
1817 :
1818 443382 : return true;
1819 : }
1820 :
1821 443358 : bool PipelineImpl::OptimizeGraph(Linkage* linkage) {
1822 1773434 : PipelineData* data = this->data_;
1823 :
1824 : data->BeginPhaseKind("lowering");
1825 :
1826 443359 : if (data->info()->is_loop_peeling_enabled()) {
1827 439027 : Run<LoopPeelingPhase>();
1828 439027 : RunPrintAndVerify("Loops peeled", true);
1829 : } else {
1830 4332 : Run<LoopExitEliminationPhase>();
1831 4332 : RunPrintAndVerify("Loop exits eliminated", true);
1832 : }
1833 :
1834 443361 : if (FLAG_turbo_load_elimination) {
1835 443349 : Run<LoadEliminationPhase>();
1836 443349 : RunPrintAndVerify("Load eliminated");
1837 : }
1838 :
1839 443360 : if (FLAG_turbo_escape) {
1840 443360 : Run<EscapeAnalysisPhase>();
1841 443357 : if (data->compilation_failed()) {
1842 : info()->AbortOptimization(kCyclicObjectStateDetectedInEscapeAnalysis);
1843 : data->EndPhaseKind();
1844 : return false;
1845 : }
1846 443357 : RunPrintAndVerify("Escape Analysed");
1847 : }
1848 :
1849 : // Perform simplified lowering. This has to run w/o the Typer decorator,
1850 : // because we cannot compute meaningful types anyways, and the computed types
1851 : // might even conflict with the representation/truncation logic.
1852 443357 : Run<SimplifiedLoweringPhase>();
1853 443359 : RunPrintAndVerify("Simplified lowering", true);
1854 :
1855 : // From now on it is invalid to look at types on the nodes, because the types
1856 : // on the nodes might not make sense after representation selection due to the
1857 : // way we handle truncations; if we'd want to look at types afterwards we'd
1858 : // essentially need to re-type (large portions of) the graph.
1859 :
1860 : // In order to catch bugs related to type access after this point, we now
1861 : // remove the types from the nodes (currently only in Debug builds).
1862 : #ifdef DEBUG
1863 : Run<UntyperPhase>();
1864 : RunPrintAndVerify("Untyped", true);
1865 : #endif
1866 :
1867 : // Run generic lowering pass.
1868 443359 : Run<GenericLoweringPhase>();
1869 443360 : RunPrintAndVerify("Generic lowering", true);
1870 :
1871 : data->BeginPhaseKind("block building");
1872 :
1873 : // Run early optimization pass.
1874 443360 : Run<EarlyOptimizationPhase>();
1875 443358 : RunPrintAndVerify("Early optimized", true);
1876 :
1877 443358 : Run<EffectControlLinearizationPhase>();
1878 443359 : RunPrintAndVerify("Effect and control linearized", true);
1879 :
1880 443360 : Run<DeadCodeEliminationPhase>();
1881 443358 : RunPrintAndVerify("Dead code elimination", true);
1882 :
1883 443358 : if (FLAG_turbo_store_elimination) {
1884 443360 : Run<StoreStoreEliminationPhase>();
1885 443358 : RunPrintAndVerify("Store-store elimination", true);
1886 : }
1887 :
1888 : // Optimize control flow.
1889 443359 : if (FLAG_turbo_cf_optimization) {
1890 443359 : Run<ControlFlowOptimizationPhase>();
1891 443359 : RunPrintAndVerify("Control flow optimized", true);
1892 : }
1893 :
1894 : // Optimize memory access and allocation operations.
1895 443360 : Run<MemoryOptimizationPhase>();
1896 : // TODO(jarin, rossberg): Remove UNTYPED once machine typing works.
1897 443358 : RunPrintAndVerify("Memory optimized", true);
1898 :
1899 : // Lower changes that have been inserted before.
1900 443358 : Run<LateOptimizationPhase>();
1901 : // TODO(jarin, rossberg): Remove UNTYPED once machine typing works.
1902 443360 : RunPrintAndVerify("Late optimized", true);
1903 :
1904 443360 : data->source_positions()->RemoveDecorator();
1905 :
1906 443358 : return ScheduleAndSelectInstructions(linkage, true);
1907 : }
1908 :
1909 223476 : Handle<Code> Pipeline::GenerateCodeForCodeStub(Isolate* isolate,
1910 : CallDescriptor* call_descriptor,
1911 74492 : Graph* graph, Schedule* schedule,
1912 : Code::Kind kind,
1913 : const char* debug_name,
1914 : JumpOptimizationInfo* jump_opt) {
1915 74492 : CompilationInfo info(CStrVector(debug_name), isolate, graph->zone(), kind);
1916 74492 : if (isolate->serializer_enabled()) info.MarkAsSerializing();
1917 :
1918 : // Construct a pipeline for scheduling and code generation.
1919 148984 : ZoneStats zone_stats(isolate->allocator());
1920 74492 : SourcePositionTable source_positions(graph);
1921 : PipelineData data(&zone_stats, &info, graph, schedule, &source_positions,
1922 148984 : jump_opt);
1923 : data.set_verify_graph(FLAG_verify_csa);
1924 : std::unique_ptr<PipelineStatistics> pipeline_statistics;
1925 74492 : if (FLAG_turbo_stats || FLAG_turbo_stats_nvp) {
1926 0 : pipeline_statistics.reset(new PipelineStatistics(&info, &zone_stats));
1927 0 : pipeline_statistics->BeginPhaseKind("stub codegen");
1928 : }
1929 :
1930 : PipelineImpl pipeline(&data);
1931 : DCHECK_NOT_NULL(data.schedule());
1932 :
1933 74492 : if (FLAG_trace_turbo || FLAG_trace_turbo_graph) {
1934 0 : CodeTracer::Scope tracing_scope(isolate->GetCodeTracer());
1935 0 : OFStream os(tracing_scope.file());
1936 0 : os << "---------------------------------------------------\n"
1937 0 : << "Begin compiling " << debug_name << " using Turbofan" << std::endl;
1938 0 : if (FLAG_trace_turbo) {
1939 0 : TurboJsonFile json_of(&info, std::ios_base::trunc);
1940 0 : json_of << "{\"function\":\"" << info.GetDebugName().get()
1941 0 : << "\", \"source\":\"\",\n\"phases\":[";
1942 : }
1943 0 : pipeline.Run<PrintGraphPhase>("Machine");
1944 : }
1945 :
1946 74492 : pipeline.Run<VerifyGraphPhase>(false, true);
1947 148984 : return pipeline.ScheduleAndGenerateCode(call_descriptor);
1948 : }
1949 :
1950 : // static
1951 4308 : Handle<Code> Pipeline::GenerateCodeForTesting(CompilationInfo* info) {
1952 4308 : ZoneStats zone_stats(info->isolate()->allocator());
1953 : std::unique_ptr<PipelineStatistics> pipeline_statistics(
1954 4308 : CreatePipelineStatistics(Handle<Script>::null(), info, &zone_stats));
1955 8616 : PipelineData data(&zone_stats, info, pipeline_statistics.get());
1956 : PipelineImpl pipeline(&data);
1957 :
1958 4308 : Linkage linkage(Linkage::ComputeIncoming(data.instruction_zone(), info));
1959 :
1960 4308 : if (!pipeline.CreateGraph()) return Handle<Code>::null();
1961 4308 : if (!pipeline.OptimizeGraph(&linkage)) return Handle<Code>::null();
1962 4308 : pipeline.AssembleCode(&linkage);
1963 8616 : return pipeline.FinalizeCode();
1964 : }
1965 :
1966 : // static
1967 6 : Handle<Code> Pipeline::GenerateCodeForTesting(CompilationInfo* info,
1968 : Graph* graph,
1969 : Schedule* schedule) {
1970 : CallDescriptor* call_descriptor =
1971 6 : Linkage::ComputeIncoming(info->zone(), info);
1972 6 : return GenerateCodeForTesting(info, call_descriptor, graph, schedule);
1973 : }
1974 :
1975 : // static
1976 555735 : Handle<Code> Pipeline::GenerateCodeForTesting(
1977 1107159 : CompilationInfo* info, CallDescriptor* call_descriptor, Graph* graph,
1978 : Schedule* schedule, SourcePositionTable* source_positions) {
1979 : // Construct a pipeline for scheduling and code generation.
1980 555735 : ZoneStats zone_stats(info->isolate()->allocator());
1981 : // TODO(wasm): Refactor code generation to check for non-existing source
1982 : // table, then remove this conditional allocation.
1983 555735 : if (!source_positions)
1984 551424 : source_positions = new (info->zone()) SourcePositionTable(graph);
1985 : PipelineData data(&zone_stats, info, graph, schedule, source_positions,
1986 1111470 : nullptr);
1987 : std::unique_ptr<PipelineStatistics> pipeline_statistics;
1988 555735 : if (FLAG_turbo_stats || FLAG_turbo_stats_nvp) {
1989 0 : pipeline_statistics.reset(new PipelineStatistics(info, &zone_stats));
1990 0 : pipeline_statistics->BeginPhaseKind("test codegen");
1991 : }
1992 :
1993 : PipelineImpl pipeline(&data);
1994 :
1995 555735 : if (FLAG_trace_turbo) {
1996 0 : TurboJsonFile json_of(info, std::ios_base::trunc);
1997 0 : json_of << "{\"function\":\"" << info->GetDebugName().get()
1998 0 : << "\", \"source\":\"\",\n\"phases\":[";
1999 : }
2000 : // TODO(rossberg): Should this really be untyped?
2001 555735 : pipeline.RunPrintAndVerify("Machine", true);
2002 :
2003 1111470 : return pipeline.ScheduleAndGenerateCode(call_descriptor);
2004 : }
2005 :
2006 : // static
2007 457030 : CompilationJob* Pipeline::NewCompilationJob(Handle<JSFunction> function,
2008 : bool has_script) {
2009 : Handle<SharedFunctionInfo> shared = handle(function->shared());
2010 : ParseInfo* parse_info;
2011 457030 : if (!has_script) {
2012 0 : parse_info = ParseInfo::AllocateWithoutScript(shared);
2013 : } else {
2014 457030 : parse_info = new ParseInfo(shared);
2015 : }
2016 457031 : return new PipelineCompilationJob(parse_info, shared, function);
2017 : }
2018 :
2019 : // static
2020 227935 : CompilationJob* Pipeline::NewWasmCompilationJob(
2021 : CompilationInfo* info, JSGraph* jsgraph, CallDescriptor* descriptor,
2022 : SourcePositionTable* source_positions,
2023 : ZoneVector<trap_handler::ProtectedInstructionData>* protected_instructions,
2024 : wasm::ModuleOrigin asmjs_origin) {
2025 : return new PipelineWasmCompilationJob(info, jsgraph, descriptor,
2026 : source_positions,
2027 227935 : protected_instructions, asmjs_origin);
2028 : }
2029 :
2030 42 : bool Pipeline::AllocateRegistersForTesting(const RegisterConfiguration* config,
2031 84 : InstructionSequence* sequence,
2032 : bool run_verifier) {
2033 : CompilationInfo info(ArrayVector("testing"), sequence->isolate(),
2034 42 : sequence->zone(), Code::STUB);
2035 84 : ZoneStats zone_stats(sequence->isolate()->allocator());
2036 84 : PipelineData data(&zone_stats, &info, sequence);
2037 : PipelineImpl pipeline(&data);
2038 42 : pipeline.data_->InitializeFrameData(nullptr);
2039 42 : pipeline.AllocateRegisters(config, nullptr, run_verifier);
2040 84 : return !data.compilation_failed();
2041 : }
2042 :
2043 1301334 : bool PipelineImpl::ScheduleAndSelectInstructions(Linkage* linkage,
2044 : bool trim_graph) {
2045 1301557 : CallDescriptor* call_descriptor = linkage->GetIncomingDescriptor();
2046 7809091 : PipelineData* data = this->data_;
2047 :
2048 : DCHECK_NOT_NULL(data->graph());
2049 :
2050 1301334 : if (trim_graph) {
2051 671120 : Run<LateGraphTrimmingPhase>();
2052 671342 : RunPrintAndVerify("Late trimmed", true);
2053 : }
2054 1301569 : if (data->schedule() == nullptr) Run<ComputeSchedulePhase>();
2055 1301567 : TraceSchedule(data->info(), data->schedule());
2056 :
2057 1301549 : if (FLAG_turbo_profiling) {
2058 : data->set_profiler_data(BasicBlockInstrumentor::Instrument(
2059 12 : info(), data->graph(), data->schedule()));
2060 : }
2061 :
2062 : bool verify_stub_graph = data->verify_graph();
2063 : // Jump optimization runs instruction selection twice, but the instruction
2064 : // selector mutates nodes like swapping the inputs of a load, which can
2065 : // violate the machine graph verification rules. So we skip the second
2066 : // verification on a graph that already verified before.
2067 50916 : auto jump_opt = data->jump_optimization_info();
2068 1352465 : if (jump_opt && jump_opt->is_optimizing()) {
2069 : verify_stub_graph = false;
2070 : }
2071 3904635 : if (verify_stub_graph ||
2072 1301537 : (FLAG_turbo_verify_machine_graph != nullptr &&
2073 0 : (!strcmp(FLAG_turbo_verify_machine_graph, "*") ||
2074 0 : !strcmp(FLAG_turbo_verify_machine_graph, data->debug_name())))) {
2075 0 : if (FLAG_trace_verify_csa) {
2076 : AllowHandleDereference allow_deref;
2077 0 : CompilationInfo* info = data->info();
2078 0 : CodeTracer::Scope tracing_scope(info->isolate()->GetCodeTracer());
2079 0 : OFStream os(tracing_scope.file());
2080 0 : os << "--------------------------------------------------\n"
2081 0 : << "--- Verifying " << data->debug_name() << " generated by TurboFan\n"
2082 0 : << "--------------------------------------------------\n"
2083 0 : << *data->schedule()
2084 0 : << "--------------------------------------------------\n"
2085 0 : << "--- End of " << data->debug_name() << " generated by TurboFan\n"
2086 0 : << "--------------------------------------------------\n";
2087 : }
2088 0 : Zone temp_zone(data->isolate()->allocator(), ZONE_NAME);
2089 : MachineGraphVerifier::Run(data->graph(), data->schedule(), linkage,
2090 0 : data->info()->IsStub(), data->debug_name(),
2091 0 : &temp_zone);
2092 : }
2093 :
2094 1301549 : data->InitializeInstructionSequence(call_descriptor);
2095 :
2096 1301535 : data->InitializeFrameData(call_descriptor);
2097 : // Select and schedule instructions covering the scheduled graph.
2098 1301538 : Run<InstructionSelectionPhase>(linkage);
2099 1301570 : if (data->compilation_failed()) {
2100 : info()->AbortOptimization(kCodeGenerationFailed);
2101 : data->EndPhaseKind();
2102 : return false;
2103 : }
2104 :
2105 1301560 : if (FLAG_trace_turbo && !data->MayHaveUnverifiableGraph()) {
2106 : AllowHandleDereference allow_deref;
2107 0 : TurboCfgFile tcf(isolate());
2108 : tcf << AsC1V("CodeGen", data->schedule(), data->source_positions(),
2109 0 : data->sequence());
2110 : }
2111 :
2112 1301560 : if (FLAG_trace_turbo) {
2113 0 : std::ostringstream source_position_output;
2114 : // Output source position information before the graph is deleted.
2115 1301490 : data_->source_positions()->Print(source_position_output);
2116 0 : data_->set_source_position_output(source_position_output.str());
2117 : }
2118 :
2119 1301560 : data->DeleteGraphZone();
2120 :
2121 : data->BeginPhaseKind("register allocation");
2122 :
2123 1301557 : bool run_verifier = FLAG_turbo_verify_allocation;
2124 :
2125 : // Allocate registers.
2126 1301557 : if (call_descriptor->HasRestrictedAllocatableRegisters()) {
2127 : auto registers = call_descriptor->AllocatableRegisters();
2128 : DCHECK_LT(0, NumRegs(registers));
2129 : std::unique_ptr<const RegisterConfiguration> config;
2130 62 : config.reset(RegisterConfiguration::RestrictGeneralRegisters(registers));
2131 62 : AllocateRegisters(config.get(), call_descriptor, run_verifier);
2132 : } else {
2133 : AllocateRegisters(RegisterConfiguration::Default(), call_descriptor,
2134 1301495 : run_verifier);
2135 : }
2136 :
2137 1301546 : Run<FrameElisionPhase>();
2138 1301490 : if (data->compilation_failed()) {
2139 : info()->AbortOptimization(kNotEnoughVirtualRegistersRegalloc);
2140 : data->EndPhaseKind();
2141 : return false;
2142 : }
2143 :
2144 : // TODO(mtrofin): move this off to the register allocator.
2145 : bool generate_frame_at_start =
2146 3904470 : data_->sequence()->instruction_blocks().front()->must_construct_frame();
2147 : // Optimimize jumps.
2148 1301490 : if (FLAG_turbo_jt) {
2149 1301473 : Run<JumpThreadingPhase>(generate_frame_at_start);
2150 : }
2151 :
2152 : data->EndPhaseKind();
2153 :
2154 : return true;
2155 : }
2156 :
2157 1301538 : void PipelineImpl::AssembleCode(Linkage* linkage) {
2158 1301538 : PipelineData* data = this->data_;
2159 : data->BeginPhaseKind("code generation");
2160 1301538 : data->InitializeCodeGenerator(linkage);
2161 1301481 : Run<AssembleCodePhase>();
2162 1301504 : data->DeleteInstructionZone();
2163 1301555 : }
2164 :
2165 1300506 : Handle<Code> PipelineImpl::FinalizeCode() {
2166 1300506 : PipelineData* data = this->data_;
2167 1300506 : Run<FinalizeCodePhase>();
2168 :
2169 : Handle<Code> code = data->code();
2170 : if (data->profiler_data()) {
2171 : #if ENABLE_DISASSEMBLER
2172 : std::ostringstream os;
2173 : code->Disassemble(nullptr, os);
2174 : data->profiler_data()->SetCode(&os);
2175 : #endif
2176 : }
2177 :
2178 : info()->SetCode(code);
2179 1300509 : PrintCode(code, info());
2180 :
2181 1300508 : if (FLAG_trace_turbo) {
2182 0 : TurboJsonFile json_of(info(), std::ios_base::app);
2183 0 : json_of << "{\"name\":\"disassembly\",\"type\":\"disassembly\",\"data\":\"";
2184 : #if ENABLE_DISASSEMBLER
2185 : std::stringstream disassembly_stream;
2186 : code->Disassemble(nullptr, disassembly_stream);
2187 : std::string disassembly_string(disassembly_stream.str());
2188 : for (const auto& c : disassembly_string) {
2189 : json_of << AsEscapedUC16ForJSON(c);
2190 : }
2191 : #endif // ENABLE_DISASSEMBLER
2192 0 : json_of << "\"}\n],\n";
2193 0 : json_of << "\"nodePositions\":";
2194 : json_of << data->source_position_output();
2195 0 : json_of << "}";
2196 : }
2197 1300508 : if (FLAG_trace_turbo || FLAG_trace_turbo_graph) {
2198 0 : CodeTracer::Scope tracing_scope(isolate()->GetCodeTracer());
2199 0 : OFStream os(tracing_scope.file());
2200 0 : os << "---------------------------------------------------\n"
2201 0 : << "Finished compiling method " << info()->GetDebugName().get()
2202 0 : << " using Turbofan" << std::endl;
2203 : }
2204 1300508 : return code;
2205 : }
2206 :
2207 630227 : Handle<Code> PipelineImpl::ScheduleAndGenerateCode(
2208 : CallDescriptor* call_descriptor) {
2209 : Linkage linkage(call_descriptor);
2210 :
2211 : // Schedule the graph, perform instruction selection and register allocation.
2212 630227 : if (!ScheduleAndSelectInstructions(&linkage, false)) return Handle<Code>();
2213 :
2214 : // Generate the final machine code.
2215 630227 : AssembleCode(&linkage);
2216 630227 : return FinalizeCode();
2217 : }
2218 :
2219 1301566 : void PipelineImpl::AllocateRegisters(const RegisterConfiguration* config,
2220 : CallDescriptor* descriptor,
2221 : bool run_verifier) {
2222 1307501 : PipelineData* data = this->data_;
2223 : // Don't track usage for this zone in compiler stats.
2224 : std::unique_ptr<Zone> verifier_zone;
2225 : RegisterAllocatorVerifier* verifier = nullptr;
2226 1301566 : if (run_verifier) {
2227 42 : verifier_zone.reset(new Zone(isolate()->allocator(), ZONE_NAME));
2228 : verifier = new (verifier_zone.get()) RegisterAllocatorVerifier(
2229 42 : verifier_zone.get(), config, data->sequence());
2230 : }
2231 :
2232 : #ifdef DEBUG
2233 : data_->sequence()->ValidateEdgeSplitForm();
2234 : data_->sequence()->ValidateDeferredBlockEntryPaths();
2235 : data_->sequence()->ValidateDeferredBlockExitPaths();
2236 : #endif
2237 :
2238 1301566 : data->InitializeRegisterAllocationData(config, descriptor);
2239 1307382 : if (info()->is_osr()) data->osr_helper()->SetupFrame(data->frame());
2240 :
2241 1301573 : Run<MeetRegisterConstraintsPhase>();
2242 1301562 : Run<ResolvePhisPhase>();
2243 1301589 : Run<BuildLiveRangesPhase>();
2244 1301591 : if (FLAG_trace_turbo_graph) {
2245 : AllowHandleDereference allow_deref;
2246 0 : CodeTracer::Scope tracing_scope(isolate()->GetCodeTracer());
2247 0 : OFStream os(tracing_scope.file());
2248 0 : os << "----- Instruction sequence before register allocation -----\n"
2249 0 : << PrintableInstructionSequence({config, data->sequence()});
2250 : }
2251 1301591 : if (verifier != nullptr) {
2252 42 : CHECK(!data->register_allocation_data()->ExistsUseWithoutDefinition());
2253 42 : CHECK(data->register_allocation_data()
2254 : ->RangesDefinedInDeferredStayInDeferred());
2255 : }
2256 :
2257 1301591 : if (FLAG_turbo_preprocess_ranges) {
2258 1301496 : Run<SplinterLiveRangesPhase>();
2259 : }
2260 :
2261 1301691 : Run<AllocateGeneralRegistersPhase<LinearScanAllocator>>();
2262 1301600 : Run<AllocateFPRegistersPhase<LinearScanAllocator>>();
2263 :
2264 1301603 : if (FLAG_turbo_preprocess_ranges) {
2265 1301603 : Run<MergeSplintersPhase>();
2266 : }
2267 :
2268 1301599 : Run<AssignSpillSlotsPhase>();
2269 :
2270 1301599 : Run<CommitAssignmentPhase>();
2271 :
2272 : // TODO(chromium:725559): remove this check once
2273 : // we understand the cause of the bug. We keep just the
2274 : // check at the end of the allocation.
2275 1301574 : if (verifier != nullptr) {
2276 42 : verifier->VerifyAssignment("Immediately after CommitAssignmentPhase.");
2277 : }
2278 :
2279 1301574 : Run<PopulateReferenceMapsPhase>();
2280 1301595 : Run<ConnectRangesPhase>();
2281 1301596 : Run<ResolveControlFlowPhase>();
2282 1301599 : if (FLAG_turbo_move_optimization) {
2283 1301596 : Run<OptimizeMovesPhase>();
2284 : }
2285 :
2286 1301605 : Run<LocateSpillSlotsPhase>();
2287 :
2288 1301592 : if (FLAG_trace_turbo_graph) {
2289 : AllowHandleDereference allow_deref;
2290 0 : CodeTracer::Scope tracing_scope(isolate()->GetCodeTracer());
2291 0 : OFStream os(tracing_scope.file());
2292 0 : os << "----- Instruction sequence after register allocation -----\n"
2293 0 : << PrintableInstructionSequence({config, data->sequence()});
2294 : }
2295 :
2296 1301592 : if (verifier != nullptr) {
2297 42 : verifier->VerifyAssignment("End of regalloc pipeline.");
2298 42 : verifier->VerifyGapMoves();
2299 : }
2300 :
2301 1301590 : if (FLAG_trace_turbo && !data->MayHaveUnverifiableGraph()) {
2302 0 : TurboCfgFile tcf(data->isolate());
2303 : tcf << AsC1VRegisterAllocationData("CodeGen",
2304 0 : data->register_allocation_data());
2305 : }
2306 :
2307 1301590 : data->DeleteRegisterAllocationZone();
2308 1301591 : }
2309 :
2310 4789419 : CompilationInfo* PipelineImpl::info() const { return data_->info(); }
2311 :
2312 443424 : Isolate* PipelineImpl::isolate() const { return info()->isolate(); }
2313 :
2314 : } // namespace compiler
2315 : } // namespace internal
2316 : } // namespace v8
|