1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/compiler/pipeline.h"
6
7 #include <fstream> // NOLINT(readability/streams)
8 #include <sstream>
9
10 #include "src/base/adapters.h"
11 #include "src/base/platform/elapsed-timer.h"
12 #include "src/compiler/ast-graph-builder.h"
13 #include "src/compiler/ast-loop-assignment-analyzer.h"
14 #include "src/compiler/basic-block-instrumentor.h"
15 #include "src/compiler/branch-elimination.h"
16 #include "src/compiler/bytecode-graph-builder.h"
17 #include "src/compiler/change-lowering.h"
18 #include "src/compiler/code-generator.h"
19 #include "src/compiler/common-operator-reducer.h"
20 #include "src/compiler/control-flow-optimizer.h"
21 #include "src/compiler/dead-code-elimination.h"
22 #include "src/compiler/escape-analysis.h"
23 #include "src/compiler/escape-analysis-reducer.h"
24 #include "src/compiler/frame-elider.h"
25 #include "src/compiler/graph-replay.h"
26 #include "src/compiler/graph-trimmer.h"
27 #include "src/compiler/graph-visualizer.h"
28 #include "src/compiler/greedy-allocator.h"
29 #include "src/compiler/instruction.h"
30 #include "src/compiler/instruction-selector.h"
31 #include "src/compiler/js-builtin-reducer.h"
32 #include "src/compiler/js-call-reducer.h"
33 #include "src/compiler/js-context-relaxation.h"
34 #include "src/compiler/js-context-specialization.h"
35 #include "src/compiler/js-frame-specialization.h"
36 #include "src/compiler/js-generic-lowering.h"
37 #include "src/compiler/js-global-object-specialization.h"
38 #include "src/compiler/js-inlining-heuristic.h"
39 #include "src/compiler/js-intrinsic-lowering.h"
40 #include "src/compiler/js-native-context-specialization.h"
41 #include "src/compiler/js-typed-lowering.h"
42 #include "src/compiler/jump-threading.h"
43 #include "src/compiler/live-range-separator.h"
44 #include "src/compiler/load-elimination.h"
45 #include "src/compiler/loop-analysis.h"
46 #include "src/compiler/loop-peeling.h"
47 #include "src/compiler/machine-operator-reducer.h"
48 #include "src/compiler/move-optimizer.h"
49 #include "src/compiler/osr.h"
50 #include "src/compiler/pipeline-statistics.h"
51 #include "src/compiler/register-allocator.h"
52 #include "src/compiler/register-allocator-verifier.h"
53 #include "src/compiler/schedule.h"
54 #include "src/compiler/scheduler.h"
55 #include "src/compiler/select-lowering.h"
56 #include "src/compiler/simplified-lowering.h"
57 #include "src/compiler/simplified-operator.h"
58 #include "src/compiler/simplified-operator-reducer.h"
59 #include "src/compiler/tail-call-optimization.h"
60 #include "src/compiler/type-hint-analyzer.h"
61 #include "src/compiler/typer.h"
62 #include "src/compiler/value-numbering-reducer.h"
63 #include "src/compiler/verifier.h"
64 #include "src/compiler/zone-pool.h"
65 #include "src/ostreams.h"
66 #include "src/register-configuration.h"
67 #include "src/type-info.h"
68 #include "src/utils.h"
69
70 namespace v8 {
71 namespace internal {
72 namespace compiler {
73
74 class PipelineData {
75 public:
76 // For main entry point.
PipelineData(ZonePool * zone_pool,CompilationInfo * info,PipelineStatistics * pipeline_statistics)77 PipelineData(ZonePool* zone_pool, CompilationInfo* info,
78 PipelineStatistics* pipeline_statistics)
79 : isolate_(info->isolate()),
80 info_(info),
81 outer_zone_(info_->zone()),
82 zone_pool_(zone_pool),
83 pipeline_statistics_(pipeline_statistics),
84 compilation_failed_(false),
85 code_(Handle<Code>::null()),
86 graph_zone_scope_(zone_pool_),
87 graph_zone_(graph_zone_scope_.zone()),
88 graph_(nullptr),
89 loop_assignment_(nullptr),
90 simplified_(nullptr),
91 machine_(nullptr),
92 common_(nullptr),
93 javascript_(nullptr),
94 jsgraph_(nullptr),
95 schedule_(nullptr),
96 instruction_zone_scope_(zone_pool_),
97 instruction_zone_(instruction_zone_scope_.zone()),
98 sequence_(nullptr),
99 frame_(nullptr),
100 register_allocation_zone_scope_(zone_pool_),
101 register_allocation_zone_(register_allocation_zone_scope_.zone()),
102 register_allocation_data_(nullptr) {
103 PhaseScope scope(pipeline_statistics, "init pipeline data");
104 graph_ = new (graph_zone_) Graph(graph_zone_);
105 source_positions_.Reset(new SourcePositionTable(graph_));
106 simplified_ = new (graph_zone_) SimplifiedOperatorBuilder(graph_zone_);
107 machine_ = new (graph_zone_) MachineOperatorBuilder(
108 graph_zone_, MachineType::PointerRepresentation(),
109 InstructionSelector::SupportedMachineOperatorFlags());
110 common_ = new (graph_zone_) CommonOperatorBuilder(graph_zone_);
111 javascript_ = new (graph_zone_) JSOperatorBuilder(graph_zone_);
112 jsgraph_ = new (graph_zone_)
113 JSGraph(isolate_, graph_, common_, javascript_, simplified_, machine_);
114 }
115
116 // For machine graph testing entry point.
PipelineData(ZonePool * zone_pool,CompilationInfo * info,Graph * graph,Schedule * schedule)117 PipelineData(ZonePool* zone_pool, CompilationInfo* info, Graph* graph,
118 Schedule* schedule)
119 : isolate_(info->isolate()),
120 info_(info),
121 outer_zone_(nullptr),
122 zone_pool_(zone_pool),
123 pipeline_statistics_(nullptr),
124 compilation_failed_(false),
125 code_(Handle<Code>::null()),
126 graph_zone_scope_(zone_pool_),
127 graph_zone_(nullptr),
128 graph_(graph),
129 source_positions_(new SourcePositionTable(graph_)),
130 loop_assignment_(nullptr),
131 simplified_(nullptr),
132 machine_(nullptr),
133 common_(nullptr),
134 javascript_(nullptr),
135 jsgraph_(nullptr),
136 schedule_(schedule),
137 instruction_zone_scope_(zone_pool_),
138 instruction_zone_(instruction_zone_scope_.zone()),
139 sequence_(nullptr),
140 frame_(nullptr),
141 register_allocation_zone_scope_(zone_pool_),
142 register_allocation_zone_(register_allocation_zone_scope_.zone()),
143 register_allocation_data_(nullptr) {}
144
145 // For register allocation testing entry point.
PipelineData(ZonePool * zone_pool,CompilationInfo * info,InstructionSequence * sequence)146 PipelineData(ZonePool* zone_pool, CompilationInfo* info,
147 InstructionSequence* sequence)
148 : isolate_(info->isolate()),
149 info_(info),
150 outer_zone_(nullptr),
151 zone_pool_(zone_pool),
152 pipeline_statistics_(nullptr),
153 compilation_failed_(false),
154 code_(Handle<Code>::null()),
155 graph_zone_scope_(zone_pool_),
156 graph_zone_(nullptr),
157 graph_(nullptr),
158 loop_assignment_(nullptr),
159 simplified_(nullptr),
160 machine_(nullptr),
161 common_(nullptr),
162 javascript_(nullptr),
163 jsgraph_(nullptr),
164 schedule_(nullptr),
165 instruction_zone_scope_(zone_pool_),
166 instruction_zone_(sequence->zone()),
167 sequence_(sequence),
168 frame_(nullptr),
169 register_allocation_zone_scope_(zone_pool_),
170 register_allocation_zone_(register_allocation_zone_scope_.zone()),
171 register_allocation_data_(nullptr) {}
172
~PipelineData()173 ~PipelineData() {
174 DeleteRegisterAllocationZone();
175 DeleteInstructionZone();
176 DeleteGraphZone();
177 }
178
isolate() const179 Isolate* isolate() const { return isolate_; }
info() const180 CompilationInfo* info() const { return info_; }
zone_pool() const181 ZonePool* zone_pool() const { return zone_pool_; }
pipeline_statistics()182 PipelineStatistics* pipeline_statistics() { return pipeline_statistics_; }
compilation_failed() const183 bool compilation_failed() const { return compilation_failed_; }
set_compilation_failed()184 void set_compilation_failed() { compilation_failed_ = true; }
code()185 Handle<Code> code() { return code_; }
set_code(Handle<Code> code)186 void set_code(Handle<Code> code) {
187 DCHECK(code_.is_null());
188 code_ = code;
189 }
190
191 // RawMachineAssembler generally produces graphs which cannot be verified.
MayHaveUnverifiableGraph() const192 bool MayHaveUnverifiableGraph() const { return outer_zone_ == nullptr; }
193
graph_zone() const194 Zone* graph_zone() const { return graph_zone_; }
graph() const195 Graph* graph() const { return graph_; }
source_positions() const196 SourcePositionTable* source_positions() const {
197 return source_positions_.get();
198 }
machine() const199 MachineOperatorBuilder* machine() const { return machine_; }
common() const200 CommonOperatorBuilder* common() const { return common_; }
javascript() const201 JSOperatorBuilder* javascript() const { return javascript_; }
jsgraph() const202 JSGraph* jsgraph() const { return jsgraph_; }
native_context() const203 MaybeHandle<Context> native_context() const {
204 if (info()->is_native_context_specializing()) {
205 return handle(info()->native_context(), isolate());
206 }
207 return MaybeHandle<Context>();
208 }
209
loop_assignment() const210 LoopAssignmentAnalysis* loop_assignment() const { return loop_assignment_; }
set_loop_assignment(LoopAssignmentAnalysis * loop_assignment)211 void set_loop_assignment(LoopAssignmentAnalysis* loop_assignment) {
212 DCHECK(!loop_assignment_);
213 loop_assignment_ = loop_assignment;
214 }
215
type_hint_analysis() const216 TypeHintAnalysis* type_hint_analysis() const { return type_hint_analysis_; }
set_type_hint_analysis(TypeHintAnalysis * type_hint_analysis)217 void set_type_hint_analysis(TypeHintAnalysis* type_hint_analysis) {
218 DCHECK_NULL(type_hint_analysis_);
219 type_hint_analysis_ = type_hint_analysis;
220 }
221
schedule() const222 Schedule* schedule() const { return schedule_; }
set_schedule(Schedule * schedule)223 void set_schedule(Schedule* schedule) {
224 DCHECK(!schedule_);
225 schedule_ = schedule;
226 }
227
instruction_zone() const228 Zone* instruction_zone() const { return instruction_zone_; }
sequence() const229 InstructionSequence* sequence() const { return sequence_; }
frame() const230 Frame* frame() const { return frame_; }
231
register_allocation_zone() const232 Zone* register_allocation_zone() const { return register_allocation_zone_; }
register_allocation_data() const233 RegisterAllocationData* register_allocation_data() const {
234 return register_allocation_data_;
235 }
236
DeleteGraphZone()237 void DeleteGraphZone() {
238 // Destroy objects with destructors first.
239 source_positions_.Reset(nullptr);
240 if (graph_zone_ == nullptr) return;
241 // Destroy zone and clear pointers.
242 graph_zone_scope_.Destroy();
243 graph_zone_ = nullptr;
244 graph_ = nullptr;
245 loop_assignment_ = nullptr;
246 type_hint_analysis_ = nullptr;
247 simplified_ = nullptr;
248 machine_ = nullptr;
249 common_ = nullptr;
250 javascript_ = nullptr;
251 jsgraph_ = nullptr;
252 schedule_ = nullptr;
253 }
254
DeleteInstructionZone()255 void DeleteInstructionZone() {
256 if (instruction_zone_ == nullptr) return;
257 instruction_zone_scope_.Destroy();
258 instruction_zone_ = nullptr;
259 sequence_ = nullptr;
260 frame_ = nullptr;
261 }
262
DeleteRegisterAllocationZone()263 void DeleteRegisterAllocationZone() {
264 if (register_allocation_zone_ == nullptr) return;
265 register_allocation_zone_scope_.Destroy();
266 register_allocation_zone_ = nullptr;
267 register_allocation_data_ = nullptr;
268 }
269
InitializeInstructionSequence()270 void InitializeInstructionSequence() {
271 DCHECK(sequence_ == nullptr);
272 InstructionBlocks* instruction_blocks =
273 InstructionSequence::InstructionBlocksFor(instruction_zone(),
274 schedule());
275 sequence_ = new (instruction_zone()) InstructionSequence(
276 info()->isolate(), instruction_zone(), instruction_blocks);
277 }
278
InitializeRegisterAllocationData(const RegisterConfiguration * config,CallDescriptor * descriptor,const char * debug_name)279 void InitializeRegisterAllocationData(const RegisterConfiguration* config,
280 CallDescriptor* descriptor,
281 const char* debug_name) {
282 DCHECK(frame_ == nullptr);
283 DCHECK(register_allocation_data_ == nullptr);
284 int fixed_frame_size = 0;
285 if (descriptor != nullptr) {
286 fixed_frame_size = (descriptor->IsCFunctionCall())
287 ? StandardFrameConstants::kFixedSlotCountAboveFp +
288 StandardFrameConstants::kCPSlotCount
289 : StandardFrameConstants::kFixedSlotCount;
290 }
291 frame_ = new (instruction_zone()) Frame(fixed_frame_size, descriptor);
292 register_allocation_data_ = new (register_allocation_zone())
293 RegisterAllocationData(config, register_allocation_zone(), frame(),
294 sequence(), debug_name);
295 }
296
297 private:
298 Isolate* isolate_;
299 CompilationInfo* info_;
300 Zone* outer_zone_;
301 ZonePool* const zone_pool_;
302 PipelineStatistics* pipeline_statistics_;
303 bool compilation_failed_;
304 Handle<Code> code_;
305
306 // All objects in the following group of fields are allocated in graph_zone_.
307 // They are all set to nullptr when the graph_zone_ is destroyed.
308 ZonePool::Scope graph_zone_scope_;
309 Zone* graph_zone_;
310 Graph* graph_;
311 // TODO(dcarney): make this into a ZoneObject.
312 base::SmartPointer<SourcePositionTable> source_positions_;
313 LoopAssignmentAnalysis* loop_assignment_;
314 TypeHintAnalysis* type_hint_analysis_ = nullptr;
315 SimplifiedOperatorBuilder* simplified_;
316 MachineOperatorBuilder* machine_;
317 CommonOperatorBuilder* common_;
318 JSOperatorBuilder* javascript_;
319 JSGraph* jsgraph_;
320 Schedule* schedule_;
321
322 // All objects in the following group of fields are allocated in
323 // instruction_zone_. They are all set to nullptr when the instruction_zone_
324 // is
325 // destroyed.
326 ZonePool::Scope instruction_zone_scope_;
327 Zone* instruction_zone_;
328 InstructionSequence* sequence_;
329 Frame* frame_;
330
331 // All objects in the following group of fields are allocated in
332 // register_allocation_zone_. They are all set to nullptr when the zone is
333 // destroyed.
334 ZonePool::Scope register_allocation_zone_scope_;
335 Zone* register_allocation_zone_;
336 RegisterAllocationData* register_allocation_data_;
337
338 DISALLOW_COPY_AND_ASSIGN(PipelineData);
339 };
340
341
342 namespace {
343
344 struct TurboCfgFile : public std::ofstream {
TurboCfgFilev8::internal::compiler::__anon8f5d45590111::TurboCfgFile345 explicit TurboCfgFile(Isolate* isolate)
346 : std::ofstream(isolate->GetTurboCfgFileName().c_str(),
347 std::ios_base::app) {}
348 };
349
350
TraceSchedule(CompilationInfo * info,Schedule * schedule)351 void TraceSchedule(CompilationInfo* info, Schedule* schedule) {
352 if (FLAG_trace_turbo) {
353 FILE* json_file = OpenVisualizerLogFile(info, nullptr, "json", "a+");
354 if (json_file != nullptr) {
355 OFStream json_of(json_file);
356 json_of << "{\"name\":\"Schedule\",\"type\":\"schedule\",\"data\":\"";
357 std::stringstream schedule_stream;
358 schedule_stream << *schedule;
359 std::string schedule_string(schedule_stream.str());
360 for (const auto& c : schedule_string) {
361 json_of << AsEscapedUC16ForJSON(c);
362 }
363 json_of << "\"},\n";
364 fclose(json_file);
365 }
366 }
367 if (!FLAG_trace_turbo_graph && !FLAG_trace_turbo_scheduler) return;
368 OFStream os(stdout);
369 os << "-- Schedule --------------------------------------\n" << *schedule;
370 }
371
372
373 class AstGraphBuilderWithPositions final : public AstGraphBuilder {
374 public:
AstGraphBuilderWithPositions(Zone * local_zone,CompilationInfo * info,JSGraph * jsgraph,LoopAssignmentAnalysis * loop_assignment,TypeHintAnalysis * type_hint_analysis,SourcePositionTable * source_positions)375 AstGraphBuilderWithPositions(Zone* local_zone, CompilationInfo* info,
376 JSGraph* jsgraph,
377 LoopAssignmentAnalysis* loop_assignment,
378 TypeHintAnalysis* type_hint_analysis,
379 SourcePositionTable* source_positions)
380 : AstGraphBuilder(local_zone, info, jsgraph, loop_assignment,
381 type_hint_analysis),
382 source_positions_(source_positions),
383 start_position_(info->shared_info()->start_position()) {}
384
CreateGraph(bool stack_check)385 bool CreateGraph(bool stack_check) {
386 SourcePositionTable::Scope pos_scope(source_positions_, start_position_);
387 return AstGraphBuilder::CreateGraph(stack_check);
388 }
389
390 #define DEF_VISIT(type) \
391 void Visit##type(type* node) override { \
392 SourcePositionTable::Scope pos(source_positions_, \
393 SourcePosition(node->position())); \
394 AstGraphBuilder::Visit##type(node); \
395 }
396 AST_NODE_LIST(DEF_VISIT)
397 #undef DEF_VISIT
398
399 private:
400 SourcePositionTable* const source_positions_;
401 SourcePosition const start_position_;
402 };
403
404
405 class SourcePositionWrapper final : public Reducer {
406 public:
SourcePositionWrapper(Reducer * reducer,SourcePositionTable * table)407 SourcePositionWrapper(Reducer* reducer, SourcePositionTable* table)
408 : reducer_(reducer), table_(table) {}
~SourcePositionWrapper()409 ~SourcePositionWrapper() final {}
410
Reduce(Node * node)411 Reduction Reduce(Node* node) final {
412 SourcePosition const pos = table_->GetSourcePosition(node);
413 SourcePositionTable::Scope position(table_, pos);
414 return reducer_->Reduce(node);
415 }
416
Finalize()417 void Finalize() final { reducer_->Finalize(); }
418
419 private:
420 Reducer* const reducer_;
421 SourcePositionTable* const table_;
422
423 DISALLOW_COPY_AND_ASSIGN(SourcePositionWrapper);
424 };
425
426
427 class JSGraphReducer final : public GraphReducer {
428 public:
JSGraphReducer(JSGraph * jsgraph,Zone * zone)429 JSGraphReducer(JSGraph* jsgraph, Zone* zone)
430 : GraphReducer(zone, jsgraph->graph(), jsgraph->Dead()) {}
~JSGraphReducer()431 ~JSGraphReducer() final {}
432 };
433
434
AddReducer(PipelineData * data,GraphReducer * graph_reducer,Reducer * reducer)435 void AddReducer(PipelineData* data, GraphReducer* graph_reducer,
436 Reducer* reducer) {
437 if (data->info()->is_source_positions_enabled()) {
438 void* const buffer = data->graph_zone()->New(sizeof(SourcePositionWrapper));
439 SourcePositionWrapper* const wrapper =
440 new (buffer) SourcePositionWrapper(reducer, data->source_positions());
441 graph_reducer->AddReducer(wrapper);
442 } else {
443 graph_reducer->AddReducer(reducer);
444 }
445 }
446
447
448 class PipelineRunScope {
449 public:
PipelineRunScope(PipelineData * data,const char * phase_name)450 PipelineRunScope(PipelineData* data, const char* phase_name)
451 : phase_scope_(
452 phase_name == nullptr ? nullptr : data->pipeline_statistics(),
453 phase_name),
454 zone_scope_(data->zone_pool()) {}
455
zone()456 Zone* zone() { return zone_scope_.zone(); }
457
458 private:
459 PhaseScope phase_scope_;
460 ZonePool::Scope zone_scope_;
461 };
462
463 } // namespace
464
465
466 template <typename Phase>
Run()467 void Pipeline::Run() {
468 PipelineRunScope scope(this->data_, Phase::phase_name());
469 Phase phase;
470 phase.Run(this->data_, scope.zone());
471 }
472
473
474 template <typename Phase, typename Arg0>
Run(Arg0 arg_0)475 void Pipeline::Run(Arg0 arg_0) {
476 PipelineRunScope scope(this->data_, Phase::phase_name());
477 Phase phase;
478 phase.Run(this->data_, scope.zone(), arg_0);
479 }
480
481
482 struct LoopAssignmentAnalysisPhase {
phase_namev8::internal::compiler::LoopAssignmentAnalysisPhase483 static const char* phase_name() { return "loop assignment analysis"; }
484
Runv8::internal::compiler::LoopAssignmentAnalysisPhase485 void Run(PipelineData* data, Zone* temp_zone) {
486 AstLoopAssignmentAnalyzer analyzer(data->graph_zone(), data->info());
487 LoopAssignmentAnalysis* loop_assignment = analyzer.Analyze();
488 data->set_loop_assignment(loop_assignment);
489 }
490 };
491
492
493 struct TypeHintAnalysisPhase {
phase_namev8::internal::compiler::TypeHintAnalysisPhase494 static const char* phase_name() { return "type hint analysis"; }
495
Runv8::internal::compiler::TypeHintAnalysisPhase496 void Run(PipelineData* data, Zone* temp_zone) {
497 TypeHintAnalyzer analyzer(data->graph_zone());
498 Handle<Code> code(data->info()->shared_info()->code(), data->isolate());
499 TypeHintAnalysis* type_hint_analysis = analyzer.Analyze(code);
500 data->set_type_hint_analysis(type_hint_analysis);
501 }
502 };
503
504
505 struct GraphBuilderPhase {
phase_namev8::internal::compiler::GraphBuilderPhase506 static const char* phase_name() { return "graph builder"; }
507
Runv8::internal::compiler::GraphBuilderPhase508 void Run(PipelineData* data, Zone* temp_zone) {
509 bool stack_check = !data->info()->IsStub();
510 bool succeeded = false;
511
512 if (data->info()->shared_info()->HasBytecodeArray()) {
513 BytecodeGraphBuilder graph_builder(temp_zone, data->info(),
514 data->jsgraph());
515 succeeded = graph_builder.CreateGraph(stack_check);
516 } else {
517 AstGraphBuilderWithPositions graph_builder(
518 temp_zone, data->info(), data->jsgraph(), data->loop_assignment(),
519 data->type_hint_analysis(), data->source_positions());
520 succeeded = graph_builder.CreateGraph(stack_check);
521 }
522
523 if (!succeeded) {
524 data->set_compilation_failed();
525 }
526 }
527 };
528
529
530 struct InliningPhase {
phase_namev8::internal::compiler::InliningPhase531 static const char* phase_name() { return "inlining"; }
532
Runv8::internal::compiler::InliningPhase533 void Run(PipelineData* data, Zone* temp_zone) {
534 JSGraphReducer graph_reducer(data->jsgraph(), temp_zone);
535 DeadCodeElimination dead_code_elimination(&graph_reducer, data->graph(),
536 data->common());
537 CommonOperatorReducer common_reducer(&graph_reducer, data->graph(),
538 data->common(), data->machine());
539 JSCallReducer call_reducer(data->jsgraph(),
540 data->info()->is_deoptimization_enabled()
541 ? JSCallReducer::kDeoptimizationEnabled
542 : JSCallReducer::kNoFlags,
543 data->native_context());
544 JSContextSpecialization context_specialization(
545 &graph_reducer, data->jsgraph(),
546 data->info()->is_function_context_specializing()
547 ? data->info()->context()
548 : MaybeHandle<Context>());
549 JSFrameSpecialization frame_specialization(data->info()->osr_frame(),
550 data->jsgraph());
551 JSGlobalObjectSpecialization global_object_specialization(
552 &graph_reducer, data->jsgraph(),
553 data->info()->is_deoptimization_enabled()
554 ? JSGlobalObjectSpecialization::kDeoptimizationEnabled
555 : JSGlobalObjectSpecialization::kNoFlags,
556 data->native_context(), data->info()->dependencies());
557 JSNativeContextSpecialization native_context_specialization(
558 &graph_reducer, data->jsgraph(),
559 data->info()->is_deoptimization_enabled()
560 ? JSNativeContextSpecialization::kDeoptimizationEnabled
561 : JSNativeContextSpecialization::kNoFlags,
562 data->native_context(), data->info()->dependencies(), temp_zone);
563 JSInliningHeuristic inlining(&graph_reducer,
564 data->info()->is_inlining_enabled()
565 ? JSInliningHeuristic::kGeneralInlining
566 : JSInliningHeuristic::kRestrictedInlining,
567 temp_zone, data->info(), data->jsgraph());
568 AddReducer(data, &graph_reducer, &dead_code_elimination);
569 AddReducer(data, &graph_reducer, &common_reducer);
570 if (data->info()->is_frame_specializing()) {
571 AddReducer(data, &graph_reducer, &frame_specialization);
572 }
573 AddReducer(data, &graph_reducer, &global_object_specialization);
574 AddReducer(data, &graph_reducer, &native_context_specialization);
575 AddReducer(data, &graph_reducer, &context_specialization);
576 AddReducer(data, &graph_reducer, &call_reducer);
577 AddReducer(data, &graph_reducer, &inlining);
578 graph_reducer.ReduceGraph();
579 }
580 };
581
582
583 struct TyperPhase {
phase_namev8::internal::compiler::TyperPhase584 static const char* phase_name() { return "typer"; }
585
Runv8::internal::compiler::TyperPhase586 void Run(PipelineData* data, Zone* temp_zone, Typer* typer) {
587 NodeVector roots(temp_zone);
588 data->jsgraph()->GetCachedNodes(&roots);
589 typer->Run(roots);
590 }
591 };
592
593
594 struct OsrDeconstructionPhase {
phase_namev8::internal::compiler::OsrDeconstructionPhase595 static const char* phase_name() { return "OSR deconstruction"; }
596
Runv8::internal::compiler::OsrDeconstructionPhase597 void Run(PipelineData* data, Zone* temp_zone) {
598 OsrHelper osr_helper(data->info());
599 osr_helper.Deconstruct(data->jsgraph(), data->common(), temp_zone);
600 }
601 };
602
603
604 struct TypedLoweringPhase {
phase_namev8::internal::compiler::TypedLoweringPhase605 static const char* phase_name() { return "typed lowering"; }
606
Runv8::internal::compiler::TypedLoweringPhase607 void Run(PipelineData* data, Zone* temp_zone) {
608 JSGraphReducer graph_reducer(data->jsgraph(), temp_zone);
609 DeadCodeElimination dead_code_elimination(&graph_reducer, data->graph(),
610 data->common());
611 LoadElimination load_elimination(&graph_reducer);
612 JSBuiltinReducer builtin_reducer(&graph_reducer, data->jsgraph());
613 JSTypedLowering::Flags typed_lowering_flags = JSTypedLowering::kNoFlags;
614 if (data->info()->is_deoptimization_enabled()) {
615 typed_lowering_flags |= JSTypedLowering::kDeoptimizationEnabled;
616 }
617 if (data->info()->shared_info()->HasBytecodeArray()) {
618 typed_lowering_flags |= JSTypedLowering::kDisableBinaryOpReduction;
619 }
620 JSTypedLowering typed_lowering(&graph_reducer, data->info()->dependencies(),
621 typed_lowering_flags, data->jsgraph(),
622 temp_zone);
623 JSIntrinsicLowering intrinsic_lowering(
624 &graph_reducer, data->jsgraph(),
625 data->info()->is_deoptimization_enabled()
626 ? JSIntrinsicLowering::kDeoptimizationEnabled
627 : JSIntrinsicLowering::kDeoptimizationDisabled);
628 CommonOperatorReducer common_reducer(&graph_reducer, data->graph(),
629 data->common(), data->machine());
630 AddReducer(data, &graph_reducer, &dead_code_elimination);
631 AddReducer(data, &graph_reducer, &builtin_reducer);
632 AddReducer(data, &graph_reducer, &typed_lowering);
633 AddReducer(data, &graph_reducer, &intrinsic_lowering);
634 AddReducer(data, &graph_reducer, &load_elimination);
635 AddReducer(data, &graph_reducer, &common_reducer);
636 graph_reducer.ReduceGraph();
637 }
638 };
639
640
641 struct BranchEliminationPhase {
phase_namev8::internal::compiler::BranchEliminationPhase642 static const char* phase_name() { return "branch condition elimination"; }
643
Runv8::internal::compiler::BranchEliminationPhase644 void Run(PipelineData* data, Zone* temp_zone) {
645 JSGraphReducer graph_reducer(data->jsgraph(), temp_zone);
646 BranchElimination branch_condition_elimination(&graph_reducer,
647 data->jsgraph(), temp_zone);
648 DeadCodeElimination dead_code_elimination(&graph_reducer, data->graph(),
649 data->common());
650 AddReducer(data, &graph_reducer, &branch_condition_elimination);
651 AddReducer(data, &graph_reducer, &dead_code_elimination);
652 graph_reducer.ReduceGraph();
653 }
654 };
655
656
657 struct EscapeAnalysisPhase {
phase_namev8::internal::compiler::EscapeAnalysisPhase658 static const char* phase_name() { return "escape analysis"; }
659
Runv8::internal::compiler::EscapeAnalysisPhase660 void Run(PipelineData* data, Zone* temp_zone) {
661 EscapeAnalysis escape_analysis(data->graph(), data->jsgraph()->common(),
662 temp_zone);
663 escape_analysis.Run();
664 JSGraphReducer graph_reducer(data->jsgraph(), temp_zone);
665 EscapeAnalysisReducer escape_reducer(&graph_reducer, data->jsgraph(),
666 &escape_analysis, temp_zone);
667 AddReducer(data, &graph_reducer, &escape_reducer);
668 graph_reducer.ReduceGraph();
669 }
670 };
671
672
673 struct SimplifiedLoweringPhase {
phase_namev8::internal::compiler::SimplifiedLoweringPhase674 static const char* phase_name() { return "simplified lowering"; }
675
Runv8::internal::compiler::SimplifiedLoweringPhase676 void Run(PipelineData* data, Zone* temp_zone) {
677 SimplifiedLowering lowering(data->jsgraph(), temp_zone,
678 data->source_positions());
679 lowering.LowerAllNodes();
680 JSGraphReducer graph_reducer(data->jsgraph(), temp_zone);
681 DeadCodeElimination dead_code_elimination(&graph_reducer, data->graph(),
682 data->common());
683 SimplifiedOperatorReducer simple_reducer(data->jsgraph());
684 ValueNumberingReducer value_numbering(temp_zone);
685 MachineOperatorReducer machine_reducer(data->jsgraph());
686 CommonOperatorReducer common_reducer(&graph_reducer, data->graph(),
687 data->common(), data->machine());
688 AddReducer(data, &graph_reducer, &dead_code_elimination);
689 AddReducer(data, &graph_reducer, &simple_reducer);
690 AddReducer(data, &graph_reducer, &value_numbering);
691 AddReducer(data, &graph_reducer, &machine_reducer);
692 AddReducer(data, &graph_reducer, &common_reducer);
693 graph_reducer.ReduceGraph();
694 }
695 };
696
697
698 struct ControlFlowOptimizationPhase {
phase_namev8::internal::compiler::ControlFlowOptimizationPhase699 static const char* phase_name() { return "control flow optimization"; }
700
Runv8::internal::compiler::ControlFlowOptimizationPhase701 void Run(PipelineData* data, Zone* temp_zone) {
702 ControlFlowOptimizer optimizer(data->graph(), data->common(),
703 data->machine(), temp_zone);
704 optimizer.Optimize();
705 }
706 };
707
708
709 struct ChangeLoweringPhase {
phase_namev8::internal::compiler::ChangeLoweringPhase710 static const char* phase_name() { return "change lowering"; }
711
Runv8::internal::compiler::ChangeLoweringPhase712 void Run(PipelineData* data, Zone* temp_zone) {
713 JSGraphReducer graph_reducer(data->jsgraph(), temp_zone);
714 DeadCodeElimination dead_code_elimination(&graph_reducer, data->graph(),
715 data->common());
716 SimplifiedOperatorReducer simple_reducer(data->jsgraph());
717 ValueNumberingReducer value_numbering(temp_zone);
718 ChangeLowering lowering(data->jsgraph());
719 MachineOperatorReducer machine_reducer(data->jsgraph());
720 CommonOperatorReducer common_reducer(&graph_reducer, data->graph(),
721 data->common(), data->machine());
722 AddReducer(data, &graph_reducer, &dead_code_elimination);
723 AddReducer(data, &graph_reducer, &simple_reducer);
724 AddReducer(data, &graph_reducer, &value_numbering);
725 AddReducer(data, &graph_reducer, &lowering);
726 AddReducer(data, &graph_reducer, &machine_reducer);
727 AddReducer(data, &graph_reducer, &common_reducer);
728 graph_reducer.ReduceGraph();
729 }
730 };
731
732
733 struct EarlyGraphTrimmingPhase {
phase_namev8::internal::compiler::EarlyGraphTrimmingPhase734 static const char* phase_name() { return "early graph trimming"; }
Runv8::internal::compiler::EarlyGraphTrimmingPhase735 void Run(PipelineData* data, Zone* temp_zone) {
736 GraphTrimmer trimmer(temp_zone, data->graph());
737 NodeVector roots(temp_zone);
738 data->jsgraph()->GetCachedNodes(&roots);
739 trimmer.TrimGraph(roots.begin(), roots.end());
740 }
741 };
742
743
744 struct LateGraphTrimmingPhase {
phase_namev8::internal::compiler::LateGraphTrimmingPhase745 static const char* phase_name() { return "late graph trimming"; }
Runv8::internal::compiler::LateGraphTrimmingPhase746 void Run(PipelineData* data, Zone* temp_zone) {
747 GraphTrimmer trimmer(temp_zone, data->graph());
748 NodeVector roots(temp_zone);
749 data->jsgraph()->GetCachedNodes(&roots);
750 trimmer.TrimGraph(roots.begin(), roots.end());
751 }
752 };
753
754
755 struct StressLoopPeelingPhase {
phase_namev8::internal::compiler::StressLoopPeelingPhase756 static const char* phase_name() { return "stress loop peeling"; }
757
Runv8::internal::compiler::StressLoopPeelingPhase758 void Run(PipelineData* data, Zone* temp_zone) {
759 // Peel the first outer loop for testing.
760 // TODO(titzer): peel all loops? the N'th loop? Innermost loops?
761 LoopTree* loop_tree = LoopFinder::BuildLoopTree(data->graph(), temp_zone);
762 if (loop_tree != nullptr && loop_tree->outer_loops().size() > 0) {
763 LoopPeeler::Peel(data->graph(), data->common(), loop_tree,
764 loop_tree->outer_loops()[0], temp_zone);
765 }
766 }
767 };
768
769
770 struct GenericLoweringPhase {
phase_namev8::internal::compiler::GenericLoweringPhase771 static const char* phase_name() { return "generic lowering"; }
772
Runv8::internal::compiler::GenericLoweringPhase773 void Run(PipelineData* data, Zone* temp_zone) {
774 JSGraphReducer graph_reducer(data->jsgraph(), temp_zone);
775 JSContextRelaxation context_relaxing;
776 DeadCodeElimination dead_code_elimination(&graph_reducer, data->graph(),
777 data->common());
778 CommonOperatorReducer common_reducer(&graph_reducer, data->graph(),
779 data->common(), data->machine());
780 JSGenericLowering generic_lowering(data->info()->is_typing_enabled(),
781 data->jsgraph());
782 SelectLowering select_lowering(data->jsgraph()->graph(),
783 data->jsgraph()->common());
784 TailCallOptimization tco(data->common(), data->graph());
785 AddReducer(data, &graph_reducer, &context_relaxing);
786 AddReducer(data, &graph_reducer, &dead_code_elimination);
787 AddReducer(data, &graph_reducer, &common_reducer);
788 AddReducer(data, &graph_reducer, &generic_lowering);
789 AddReducer(data, &graph_reducer, &select_lowering);
790 AddReducer(data, &graph_reducer, &tco);
791 graph_reducer.ReduceGraph();
792 }
793 };
794
795
796 struct ComputeSchedulePhase {
phase_namev8::internal::compiler::ComputeSchedulePhase797 static const char* phase_name() { return "scheduling"; }
798
Runv8::internal::compiler::ComputeSchedulePhase799 void Run(PipelineData* data, Zone* temp_zone) {
800 Schedule* schedule = Scheduler::ComputeSchedule(
801 temp_zone, data->graph(), data->info()->is_splitting_enabled()
802 ? Scheduler::kSplitNodes
803 : Scheduler::kNoFlags);
804 if (FLAG_turbo_verify) ScheduleVerifier::Run(schedule);
805 data->set_schedule(schedule);
806 }
807 };
808
809
810 struct InstructionSelectionPhase {
phase_namev8::internal::compiler::InstructionSelectionPhase811 static const char* phase_name() { return "select instructions"; }
812
Runv8::internal::compiler::InstructionSelectionPhase813 void Run(PipelineData* data, Zone* temp_zone, Linkage* linkage) {
814 InstructionSelector selector(
815 temp_zone, data->graph()->NodeCount(), linkage, data->sequence(),
816 data->schedule(), data->source_positions(),
817 data->info()->is_source_positions_enabled()
818 ? InstructionSelector::kAllSourcePositions
819 : InstructionSelector::kCallSourcePositions);
820 selector.SelectInstructions();
821 }
822 };
823
824
825 struct MeetRegisterConstraintsPhase {
phase_namev8::internal::compiler::MeetRegisterConstraintsPhase826 static const char* phase_name() { return "meet register constraints"; }
827
Runv8::internal::compiler::MeetRegisterConstraintsPhase828 void Run(PipelineData* data, Zone* temp_zone) {
829 ConstraintBuilder builder(data->register_allocation_data());
830 builder.MeetRegisterConstraints();
831 }
832 };
833
834
835 struct ResolvePhisPhase {
phase_namev8::internal::compiler::ResolvePhisPhase836 static const char* phase_name() { return "resolve phis"; }
837
Runv8::internal::compiler::ResolvePhisPhase838 void Run(PipelineData* data, Zone* temp_zone) {
839 ConstraintBuilder builder(data->register_allocation_data());
840 builder.ResolvePhis();
841 }
842 };
843
844
845 struct BuildLiveRangesPhase {
phase_namev8::internal::compiler::BuildLiveRangesPhase846 static const char* phase_name() { return "build live ranges"; }
847
Runv8::internal::compiler::BuildLiveRangesPhase848 void Run(PipelineData* data, Zone* temp_zone) {
849 LiveRangeBuilder builder(data->register_allocation_data(), temp_zone);
850 builder.BuildLiveRanges();
851 }
852 };
853
854
855 struct SplinterLiveRangesPhase {
phase_namev8::internal::compiler::SplinterLiveRangesPhase856 static const char* phase_name() { return "splinter live ranges"; }
857
Runv8::internal::compiler::SplinterLiveRangesPhase858 void Run(PipelineData* data, Zone* temp_zone) {
859 LiveRangeSeparator live_range_splinterer(data->register_allocation_data(),
860 temp_zone);
861 live_range_splinterer.Splinter();
862 }
863 };
864
865
866 template <typename RegAllocator>
867 struct AllocateGeneralRegistersPhase {
phase_namev8::internal::compiler::AllocateGeneralRegistersPhase868 static const char* phase_name() { return "allocate general registers"; }
869
Runv8::internal::compiler::AllocateGeneralRegistersPhase870 void Run(PipelineData* data, Zone* temp_zone) {
871 RegAllocator allocator(data->register_allocation_data(), GENERAL_REGISTERS,
872 temp_zone);
873 allocator.AllocateRegisters();
874 }
875 };
876
877
878 template <typename RegAllocator>
879 struct AllocateDoubleRegistersPhase {
phase_namev8::internal::compiler::AllocateDoubleRegistersPhase880 static const char* phase_name() { return "allocate double registers"; }
881
Runv8::internal::compiler::AllocateDoubleRegistersPhase882 void Run(PipelineData* data, Zone* temp_zone) {
883 RegAllocator allocator(data->register_allocation_data(), DOUBLE_REGISTERS,
884 temp_zone);
885 allocator.AllocateRegisters();
886 }
887 };
888
889
890 struct MergeSplintersPhase {
phase_namev8::internal::compiler::MergeSplintersPhase891 static const char* phase_name() { return "merge splintered ranges"; }
Runv8::internal::compiler::MergeSplintersPhase892 void Run(PipelineData* pipeline_data, Zone* temp_zone) {
893 RegisterAllocationData* data = pipeline_data->register_allocation_data();
894 LiveRangeMerger live_range_merger(data, temp_zone);
895 live_range_merger.Merge();
896 }
897 };
898
899
900 struct LocateSpillSlotsPhase {
phase_namev8::internal::compiler::LocateSpillSlotsPhase901 static const char* phase_name() { return "locate spill slots"; }
902
Runv8::internal::compiler::LocateSpillSlotsPhase903 void Run(PipelineData* data, Zone* temp_zone) {
904 SpillSlotLocator locator(data->register_allocation_data());
905 locator.LocateSpillSlots();
906 }
907 };
908
909
910 struct AssignSpillSlotsPhase {
phase_namev8::internal::compiler::AssignSpillSlotsPhase911 static const char* phase_name() { return "assign spill slots"; }
912
Runv8::internal::compiler::AssignSpillSlotsPhase913 void Run(PipelineData* data, Zone* temp_zone) {
914 OperandAssigner assigner(data->register_allocation_data());
915 assigner.AssignSpillSlots();
916 }
917 };
918
919
920 struct CommitAssignmentPhase {
phase_namev8::internal::compiler::CommitAssignmentPhase921 static const char* phase_name() { return "commit assignment"; }
922
Runv8::internal::compiler::CommitAssignmentPhase923 void Run(PipelineData* data, Zone* temp_zone) {
924 OperandAssigner assigner(data->register_allocation_data());
925 assigner.CommitAssignment();
926 }
927 };
928
929
930 struct PopulateReferenceMapsPhase {
phase_namev8::internal::compiler::PopulateReferenceMapsPhase931 static const char* phase_name() { return "populate pointer maps"; }
932
Runv8::internal::compiler::PopulateReferenceMapsPhase933 void Run(PipelineData* data, Zone* temp_zone) {
934 ReferenceMapPopulator populator(data->register_allocation_data());
935 populator.PopulateReferenceMaps();
936 }
937 };
938
939
940 struct ConnectRangesPhase {
phase_namev8::internal::compiler::ConnectRangesPhase941 static const char* phase_name() { return "connect ranges"; }
942
Runv8::internal::compiler::ConnectRangesPhase943 void Run(PipelineData* data, Zone* temp_zone) {
944 LiveRangeConnector connector(data->register_allocation_data());
945 connector.ConnectRanges(temp_zone);
946 }
947 };
948
949
950 struct ResolveControlFlowPhase {
phase_namev8::internal::compiler::ResolveControlFlowPhase951 static const char* phase_name() { return "resolve control flow"; }
952
Runv8::internal::compiler::ResolveControlFlowPhase953 void Run(PipelineData* data, Zone* temp_zone) {
954 LiveRangeConnector connector(data->register_allocation_data());
955 connector.ResolveControlFlow(temp_zone);
956 }
957 };
958
959
960 struct OptimizeMovesPhase {
phase_namev8::internal::compiler::OptimizeMovesPhase961 static const char* phase_name() { return "optimize moves"; }
962
Runv8::internal::compiler::OptimizeMovesPhase963 void Run(PipelineData* data, Zone* temp_zone) {
964 MoveOptimizer move_optimizer(temp_zone, data->sequence());
965 move_optimizer.Run();
966 }
967 };
968
969
970 struct FrameElisionPhase {
phase_namev8::internal::compiler::FrameElisionPhase971 static const char* phase_name() { return "frame elision"; }
972
Runv8::internal::compiler::FrameElisionPhase973 void Run(PipelineData* data, Zone* temp_zone) {
974 FrameElider(data->sequence()).Run();
975 }
976 };
977
978
979 struct JumpThreadingPhase {
phase_namev8::internal::compiler::JumpThreadingPhase980 static const char* phase_name() { return "jump threading"; }
981
Runv8::internal::compiler::JumpThreadingPhase982 void Run(PipelineData* data, Zone* temp_zone) {
983 ZoneVector<RpoNumber> result(temp_zone);
984 if (JumpThreading::ComputeForwarding(temp_zone, result, data->sequence())) {
985 JumpThreading::ApplyForwarding(result, data->sequence());
986 }
987 }
988 };
989
990
991 struct GenerateCodePhase {
phase_namev8::internal::compiler::GenerateCodePhase992 static const char* phase_name() { return "generate code"; }
993
Runv8::internal::compiler::GenerateCodePhase994 void Run(PipelineData* data, Zone* temp_zone, Linkage* linkage) {
995 CodeGenerator generator(data->frame(), linkage, data->sequence(),
996 data->info());
997 data->set_code(generator.GenerateCode());
998 }
999 };
1000
1001
1002 struct PrintGraphPhase {
phase_namev8::internal::compiler::PrintGraphPhase1003 static const char* phase_name() { return nullptr; }
1004
Runv8::internal::compiler::PrintGraphPhase1005 void Run(PipelineData* data, Zone* temp_zone, const char* phase) {
1006 CompilationInfo* info = data->info();
1007 Graph* graph = data->graph();
1008
1009 { // Print JSON.
1010 FILE* json_file = OpenVisualizerLogFile(info, nullptr, "json", "a+");
1011 if (json_file == nullptr) return;
1012 OFStream json_of(json_file);
1013 json_of << "{\"name\":\"" << phase << "\",\"type\":\"graph\",\"data\":"
1014 << AsJSON(*graph, data->source_positions()) << "},\n";
1015 fclose(json_file);
1016 }
1017
1018 if (FLAG_trace_turbo_graph) { // Simple textual RPO.
1019 OFStream os(stdout);
1020 os << "-- Graph after " << phase << " -- " << std::endl;
1021 os << AsRPO(*graph);
1022 }
1023 }
1024 };
1025
1026
1027 struct VerifyGraphPhase {
phase_namev8::internal::compiler::VerifyGraphPhase1028 static const char* phase_name() { return nullptr; }
1029
Runv8::internal::compiler::VerifyGraphPhase1030 void Run(PipelineData* data, Zone* temp_zone, const bool untyped) {
1031 Verifier::Run(data->graph(), FLAG_turbo_types && !untyped
1032 ? Verifier::TYPED
1033 : Verifier::UNTYPED);
1034 }
1035 };
1036
1037
BeginPhaseKind(const char * phase_kind_name)1038 void Pipeline::BeginPhaseKind(const char* phase_kind_name) {
1039 if (data_->pipeline_statistics() != nullptr) {
1040 data_->pipeline_statistics()->BeginPhaseKind(phase_kind_name);
1041 }
1042 }
1043
1044
RunPrintAndVerify(const char * phase,bool untyped)1045 void Pipeline::RunPrintAndVerify(const char* phase, bool untyped) {
1046 if (FLAG_trace_turbo) {
1047 Run<PrintGraphPhase>(phase);
1048 }
1049 if (FLAG_turbo_verify) {
1050 Run<VerifyGraphPhase>(untyped);
1051 }
1052 }
1053
1054
GenerateCode()1055 Handle<Code> Pipeline::GenerateCode() {
1056 // TODO(mstarzinger): This is just a temporary hack to make TurboFan work,
1057 // the correct solution is to restore the context register after invoking
1058 // builtins from full-codegen.
1059 if (Context::IsJSBuiltin(isolate()->native_context(), info()->closure())) {
1060 return Handle<Code>::null();
1061 }
1062
1063 ZonePool zone_pool;
1064 base::SmartPointer<PipelineStatistics> pipeline_statistics;
1065
1066 if (FLAG_turbo_stats) {
1067 pipeline_statistics.Reset(new PipelineStatistics(info(), &zone_pool));
1068 pipeline_statistics->BeginPhaseKind("initializing");
1069 }
1070
1071 if (FLAG_trace_turbo) {
1072 FILE* json_file = OpenVisualizerLogFile(info(), nullptr, "json", "w+");
1073 if (json_file != nullptr) {
1074 OFStream json_of(json_file);
1075 Handle<Script> script = info()->script();
1076 FunctionLiteral* function = info()->literal();
1077 base::SmartArrayPointer<char> function_name = info()->GetDebugName();
1078 int pos = info()->shared_info()->start_position();
1079 json_of << "{\"function\":\"" << function_name.get()
1080 << "\", \"sourcePosition\":" << pos << ", \"source\":\"";
1081 if (!script->IsUndefined() && !script->source()->IsUndefined()) {
1082 DisallowHeapAllocation no_allocation;
1083 int start = function->start_position();
1084 int len = function->end_position() - start;
1085 String::SubStringRange source(String::cast(script->source()), start,
1086 len);
1087 for (const auto& c : source) {
1088 json_of << AsEscapedUC16ForJSON(c);
1089 }
1090 }
1091 json_of << "\",\n\"phases\":[";
1092 fclose(json_file);
1093 }
1094 }
1095
1096 PipelineData data(&zone_pool, info(), pipeline_statistics.get());
1097 this->data_ = &data;
1098
1099 BeginPhaseKind("graph creation");
1100
1101 if (FLAG_trace_turbo) {
1102 OFStream os(stdout);
1103 os << "---------------------------------------------------\n"
1104 << "Begin compiling method " << info()->GetDebugName().get()
1105 << " using Turbofan" << std::endl;
1106 TurboCfgFile tcf(isolate());
1107 tcf << AsC1VCompilation(info());
1108 }
1109
1110 data.source_positions()->AddDecorator();
1111
1112 if (FLAG_loop_assignment_analysis) {
1113 Run<LoopAssignmentAnalysisPhase>();
1114 }
1115
1116 if (info()->is_typing_enabled()) {
1117 Run<TypeHintAnalysisPhase>();
1118 }
1119
1120 Run<GraphBuilderPhase>();
1121 if (data.compilation_failed()) return Handle<Code>::null();
1122 RunPrintAndVerify("Initial untyped", true);
1123
1124 // Perform OSR deconstruction.
1125 if (info()->is_osr()) {
1126 Run<OsrDeconstructionPhase>();
1127 RunPrintAndVerify("OSR deconstruction", true);
1128 }
1129
1130 // Perform function context specialization and inlining (if enabled).
1131 Run<InliningPhase>();
1132 RunPrintAndVerify("Inlined", true);
1133
1134 // Remove dead->live edges from the graph.
1135 Run<EarlyGraphTrimmingPhase>();
1136 RunPrintAndVerify("Early trimmed", true);
1137
1138 if (FLAG_print_turbo_replay) {
1139 // Print a replay of the initial graph.
1140 GraphReplayPrinter::PrintReplay(data.graph());
1141 }
1142
1143 base::SmartPointer<Typer> typer;
1144 if (info()->is_typing_enabled()) {
1145 // Type the graph.
1146 typer.Reset(new Typer(isolate(), data.graph(),
1147 info()->is_deoptimization_enabled()
1148 ? Typer::kDeoptimizationEnabled
1149 : Typer::kNoFlags,
1150 info()->dependencies()));
1151 Run<TyperPhase>(typer.get());
1152 RunPrintAndVerify("Typed");
1153 }
1154
1155 BeginPhaseKind("lowering");
1156
1157 if (info()->is_typing_enabled()) {
1158 // Lower JSOperators where we can determine types.
1159 Run<TypedLoweringPhase>();
1160 RunPrintAndVerify("Lowered typed");
1161
1162 if (FLAG_turbo_stress_loop_peeling) {
1163 Run<StressLoopPeelingPhase>();
1164 RunPrintAndVerify("Loop peeled");
1165 }
1166
1167 if (FLAG_turbo_escape) {
1168 Run<EscapeAnalysisPhase>();
1169 RunPrintAndVerify("Escape Analysed");
1170 }
1171
1172 // Lower simplified operators and insert changes.
1173 Run<SimplifiedLoweringPhase>();
1174 RunPrintAndVerify("Lowered simplified");
1175
1176 Run<BranchEliminationPhase>();
1177 RunPrintAndVerify("Branch conditions eliminated");
1178
1179 // Optimize control flow.
1180 if (FLAG_turbo_cf_optimization) {
1181 Run<ControlFlowOptimizationPhase>();
1182 RunPrintAndVerify("Control flow optimized");
1183 }
1184
1185 // Lower changes that have been inserted before.
1186 Run<ChangeLoweringPhase>();
1187 // TODO(jarin, rossberg): Remove UNTYPED once machine typing works.
1188 RunPrintAndVerify("Lowered changes", true);
1189 }
1190
1191 // Lower any remaining generic JSOperators.
1192 Run<GenericLoweringPhase>();
1193 // TODO(jarin, rossberg): Remove UNTYPED once machine typing works.
1194 RunPrintAndVerify("Lowered generic", true);
1195
1196 Run<LateGraphTrimmingPhase>();
1197 // TODO(jarin, rossberg): Remove UNTYPED once machine typing works.
1198 RunPrintAndVerify("Late trimmed", true);
1199
1200 BeginPhaseKind("block building");
1201
1202 data.source_positions()->RemoveDecorator();
1203
1204 // Kill the Typer and thereby uninstall the decorator (if any).
1205 typer.Reset(nullptr);
1206
1207 return ScheduleAndGenerateCode(
1208 Linkage::ComputeIncoming(data.instruction_zone(), info()));
1209 }
1210
1211
GenerateCodeForCodeStub(Isolate * isolate,CallDescriptor * call_descriptor,Graph * graph,Schedule * schedule,Code::Kind kind,const char * debug_name)1212 Handle<Code> Pipeline::GenerateCodeForCodeStub(Isolate* isolate,
1213 CallDescriptor* call_descriptor,
1214 Graph* graph, Schedule* schedule,
1215 Code::Kind kind,
1216 const char* debug_name) {
1217 CompilationInfo info(debug_name, isolate, graph->zone());
1218 info.set_output_code_kind(kind);
1219
1220 // Construct a pipeline for scheduling and code generation.
1221 ZonePool zone_pool;
1222 PipelineData data(&zone_pool, &info, graph, schedule);
1223 base::SmartPointer<PipelineStatistics> pipeline_statistics;
1224 if (FLAG_turbo_stats) {
1225 pipeline_statistics.Reset(new PipelineStatistics(&info, &zone_pool));
1226 pipeline_statistics->BeginPhaseKind("stub codegen");
1227 }
1228
1229 Pipeline pipeline(&info);
1230 pipeline.data_ = &data;
1231 DCHECK_NOT_NULL(data.schedule());
1232
1233 if (FLAG_trace_turbo) {
1234 FILE* json_file = OpenVisualizerLogFile(&info, nullptr, "json", "w+");
1235 if (json_file != nullptr) {
1236 OFStream json_of(json_file);
1237 json_of << "{\"function\":\"" << info.GetDebugName().get()
1238 << "\", \"source\":\"\",\n\"phases\":[";
1239 fclose(json_file);
1240 }
1241 pipeline.Run<PrintGraphPhase>("Machine");
1242 }
1243
1244 return pipeline.ScheduleAndGenerateCode(call_descriptor);
1245 }
1246
1247
GenerateCodeForTesting(CompilationInfo * info,Graph * graph,Schedule * schedule)1248 Handle<Code> Pipeline::GenerateCodeForTesting(CompilationInfo* info,
1249 Graph* graph,
1250 Schedule* schedule) {
1251 CallDescriptor* call_descriptor =
1252 Linkage::ComputeIncoming(info->zone(), info);
1253 return GenerateCodeForTesting(info, call_descriptor, graph, schedule);
1254 }
1255
1256
GenerateCodeForTesting(CompilationInfo * info,CallDescriptor * call_descriptor,Graph * graph,Schedule * schedule)1257 Handle<Code> Pipeline::GenerateCodeForTesting(CompilationInfo* info,
1258 CallDescriptor* call_descriptor,
1259 Graph* graph,
1260 Schedule* schedule) {
1261 // Construct a pipeline for scheduling and code generation.
1262 ZonePool zone_pool;
1263 PipelineData data(&zone_pool, info, graph, schedule);
1264 base::SmartPointer<PipelineStatistics> pipeline_statistics;
1265 if (FLAG_turbo_stats) {
1266 pipeline_statistics.Reset(new PipelineStatistics(info, &zone_pool));
1267 pipeline_statistics->BeginPhaseKind("test codegen");
1268 }
1269
1270 Pipeline pipeline(info);
1271 pipeline.data_ = &data;
1272 if (data.schedule() == nullptr) {
1273 // TODO(rossberg): Should this really be untyped?
1274 pipeline.RunPrintAndVerify("Machine", true);
1275 }
1276
1277 return pipeline.ScheduleAndGenerateCode(call_descriptor);
1278 }
1279
1280
AllocateRegistersForTesting(const RegisterConfiguration * config,InstructionSequence * sequence,bool run_verifier)1281 bool Pipeline::AllocateRegistersForTesting(const RegisterConfiguration* config,
1282 InstructionSequence* sequence,
1283 bool run_verifier) {
1284 CompilationInfo info("testing", sequence->isolate(), sequence->zone());
1285 ZonePool zone_pool;
1286 PipelineData data(&zone_pool, &info, sequence);
1287 Pipeline pipeline(&info);
1288 pipeline.data_ = &data;
1289 pipeline.AllocateRegisters(config, nullptr, run_verifier);
1290 return !data.compilation_failed();
1291 }
1292
1293
ScheduleAndGenerateCode(CallDescriptor * call_descriptor)1294 Handle<Code> Pipeline::ScheduleAndGenerateCode(
1295 CallDescriptor* call_descriptor) {
1296 PipelineData* data = this->data_;
1297
1298 DCHECK_NOT_NULL(data->graph());
1299
1300 if (data->schedule() == nullptr) Run<ComputeSchedulePhase>();
1301 TraceSchedule(data->info(), data->schedule());
1302
1303 BasicBlockProfiler::Data* profiler_data = nullptr;
1304 if (FLAG_turbo_profiling) {
1305 profiler_data = BasicBlockInstrumentor::Instrument(info(), data->graph(),
1306 data->schedule());
1307 }
1308
1309 data->InitializeInstructionSequence();
1310
1311 // Select and schedule instructions covering the scheduled graph.
1312 Linkage linkage(call_descriptor);
1313 Run<InstructionSelectionPhase>(&linkage);
1314
1315 if (FLAG_trace_turbo && !data->MayHaveUnverifiableGraph()) {
1316 TurboCfgFile tcf(isolate());
1317 tcf << AsC1V("CodeGen", data->schedule(), data->source_positions(),
1318 data->sequence());
1319 }
1320
1321 std::ostringstream source_position_output;
1322 if (FLAG_trace_turbo) {
1323 // Output source position information before the graph is deleted.
1324 data_->source_positions()->Print(source_position_output);
1325 }
1326
1327 data->DeleteGraphZone();
1328
1329 BeginPhaseKind("register allocation");
1330
1331 bool run_verifier = FLAG_turbo_verify_allocation;
1332 // Allocate registers.
1333 AllocateRegisters(
1334 RegisterConfiguration::ArchDefault(RegisterConfiguration::TURBOFAN),
1335 call_descriptor, run_verifier);
1336 if (data->compilation_failed()) {
1337 info()->AbortOptimization(kNotEnoughVirtualRegistersRegalloc);
1338 return Handle<Code>();
1339 }
1340
1341 BeginPhaseKind("code generation");
1342
1343 // Optimimize jumps.
1344 if (FLAG_turbo_jt) {
1345 Run<JumpThreadingPhase>();
1346 }
1347
1348 // Generate final machine code.
1349 Run<GenerateCodePhase>(&linkage);
1350
1351 Handle<Code> code = data->code();
1352 if (profiler_data != nullptr) {
1353 #if ENABLE_DISASSEMBLER
1354 std::ostringstream os;
1355 code->Disassemble(nullptr, os);
1356 profiler_data->SetCode(&os);
1357 #endif
1358 }
1359
1360 info()->SetCode(code);
1361 v8::internal::CodeGenerator::PrintCode(code, info());
1362
1363 if (FLAG_trace_turbo) {
1364 FILE* json_file = OpenVisualizerLogFile(info(), nullptr, "json", "a+");
1365 if (json_file != nullptr) {
1366 OFStream json_of(json_file);
1367 json_of
1368 << "{\"name\":\"disassembly\",\"type\":\"disassembly\",\"data\":\"";
1369 #if ENABLE_DISASSEMBLER
1370 std::stringstream disassembly_stream;
1371 code->Disassemble(nullptr, disassembly_stream);
1372 std::string disassembly_string(disassembly_stream.str());
1373 for (const auto& c : disassembly_string) {
1374 json_of << AsEscapedUC16ForJSON(c);
1375 }
1376 #endif // ENABLE_DISASSEMBLER
1377 json_of << "\"}\n],\n";
1378 json_of << "\"nodePositions\":";
1379 json_of << source_position_output.str();
1380 json_of << "}";
1381 fclose(json_file);
1382 }
1383 OFStream os(stdout);
1384 os << "---------------------------------------------------\n"
1385 << "Finished compiling method " << info()->GetDebugName().get()
1386 << " using Turbofan" << std::endl;
1387 }
1388
1389 return code;
1390 }
1391
1392
AllocateRegisters(const RegisterConfiguration * config,CallDescriptor * descriptor,bool run_verifier)1393 void Pipeline::AllocateRegisters(const RegisterConfiguration* config,
1394 CallDescriptor* descriptor,
1395 bool run_verifier) {
1396 PipelineData* data = this->data_;
1397
1398 // Don't track usage for this zone in compiler stats.
1399 base::SmartPointer<Zone> verifier_zone;
1400 RegisterAllocatorVerifier* verifier = nullptr;
1401 if (run_verifier) {
1402 verifier_zone.Reset(new Zone());
1403 verifier = new (verifier_zone.get()) RegisterAllocatorVerifier(
1404 verifier_zone.get(), config, data->sequence());
1405 }
1406
1407 base::SmartArrayPointer<char> debug_name;
1408 #ifdef DEBUG
1409 debug_name = info()->GetDebugName();
1410 #endif
1411
1412 data->InitializeRegisterAllocationData(config, descriptor, debug_name.get());
1413 if (info()->is_osr()) {
1414 OsrHelper osr_helper(info());
1415 osr_helper.SetupFrame(data->frame());
1416 }
1417
1418 Run<MeetRegisterConstraintsPhase>();
1419 Run<ResolvePhisPhase>();
1420 Run<BuildLiveRangesPhase>();
1421 if (FLAG_trace_turbo_graph) {
1422 OFStream os(stdout);
1423 PrintableInstructionSequence printable = {config, data->sequence()};
1424 os << "----- Instruction sequence before register allocation -----\n"
1425 << printable;
1426 }
1427 if (verifier != nullptr) {
1428 CHECK(!data->register_allocation_data()->ExistsUseWithoutDefinition());
1429 CHECK(data->register_allocation_data()
1430 ->RangesDefinedInDeferredStayInDeferred());
1431 }
1432
1433 if (FLAG_turbo_preprocess_ranges) {
1434 Run<SplinterLiveRangesPhase>();
1435 }
1436
1437 if (FLAG_turbo_greedy_regalloc) {
1438 Run<AllocateGeneralRegistersPhase<GreedyAllocator>>();
1439 Run<AllocateDoubleRegistersPhase<GreedyAllocator>>();
1440 } else {
1441 Run<AllocateGeneralRegistersPhase<LinearScanAllocator>>();
1442 Run<AllocateDoubleRegistersPhase<LinearScanAllocator>>();
1443 }
1444
1445 if (FLAG_turbo_preprocess_ranges) {
1446 Run<MergeSplintersPhase>();
1447 }
1448
1449 if (FLAG_turbo_frame_elision) {
1450 Run<LocateSpillSlotsPhase>();
1451 Run<FrameElisionPhase>();
1452 }
1453
1454 Run<AssignSpillSlotsPhase>();
1455
1456 Run<CommitAssignmentPhase>();
1457 Run<PopulateReferenceMapsPhase>();
1458 Run<ConnectRangesPhase>();
1459 Run<ResolveControlFlowPhase>();
1460 if (FLAG_turbo_move_optimization) {
1461 Run<OptimizeMovesPhase>();
1462 }
1463
1464 if (FLAG_trace_turbo_graph) {
1465 OFStream os(stdout);
1466 PrintableInstructionSequence printable = {config, data->sequence()};
1467 os << "----- Instruction sequence after register allocation -----\n"
1468 << printable;
1469 }
1470
1471 if (verifier != nullptr) {
1472 verifier->VerifyAssignment();
1473 verifier->VerifyGapMoves();
1474 }
1475
1476 if (FLAG_trace_turbo && !data->MayHaveUnverifiableGraph()) {
1477 TurboCfgFile tcf(data->isolate());
1478 tcf << AsC1VRegisterAllocationData("CodeGen",
1479 data->register_allocation_data());
1480 }
1481
1482 data->DeleteRegisterAllocationZone();
1483 }
1484
1485 } // namespace compiler
1486 } // namespace internal
1487 } // namespace v8
1488