1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/compiler.h"
6
7 #include <algorithm>
8
9 #include "src/ast/ast-numbering.h"
10 #include "src/ast/prettyprinter.h"
11 #include "src/ast/scopeinfo.h"
12 #include "src/ast/scopes.h"
13 #include "src/bootstrapper.h"
14 #include "src/codegen.h"
15 #include "src/compilation-cache.h"
16 #include "src/compiler/pipeline.h"
17 #include "src/crankshaft/hydrogen.h"
18 #include "src/crankshaft/lithium.h"
19 #include "src/crankshaft/typing.h"
20 #include "src/debug/debug.h"
21 #include "src/debug/liveedit.h"
22 #include "src/deoptimizer.h"
23 #include "src/full-codegen/full-codegen.h"
24 #include "src/gdb-jit.h"
25 #include "src/interpreter/interpreter.h"
26 #include "src/isolate-inl.h"
27 #include "src/log-inl.h"
28 #include "src/messages.h"
29 #include "src/parsing/parser.h"
30 #include "src/parsing/rewriter.h"
31 #include "src/parsing/scanner-character-streams.h"
32 #include "src/profiler/cpu-profiler.h"
33 #include "src/runtime-profiler.h"
34 #include "src/snapshot/serialize.h"
35 #include "src/vm-state-inl.h"
36
37 namespace v8 {
38 namespace internal {
39
operator <<(std::ostream & os,const SourcePosition & p)40 std::ostream& operator<<(std::ostream& os, const SourcePosition& p) {
41 if (p.IsUnknown()) {
42 return os << "<?>";
43 } else if (FLAG_hydrogen_track_positions) {
44 return os << "<" << p.inlining_id() << ":" << p.position() << ">";
45 } else {
46 return os << "<0:" << p.raw() << ">";
47 }
48 }
49
50
51 #define PARSE_INFO_GETTER(type, name) \
52 type CompilationInfo::name() const { \
53 CHECK(parse_info()); \
54 return parse_info()->name(); \
55 }
56
57
58 #define PARSE_INFO_GETTER_WITH_DEFAULT(type, name, def) \
59 type CompilationInfo::name() const { \
60 return parse_info() ? parse_info()->name() : def; \
61 }
62
63
64 PARSE_INFO_GETTER(Handle<Script>, script)
65 PARSE_INFO_GETTER(bool, is_eval)
66 PARSE_INFO_GETTER(bool, is_native)
67 PARSE_INFO_GETTER(bool, is_module)
68 PARSE_INFO_GETTER(FunctionLiteral*, literal)
69 PARSE_INFO_GETTER_WITH_DEFAULT(LanguageMode, language_mode, STRICT)
70 PARSE_INFO_GETTER_WITH_DEFAULT(Handle<JSFunction>, closure,
71 Handle<JSFunction>::null())
72 PARSE_INFO_GETTER_WITH_DEFAULT(Scope*, scope, nullptr)
73 PARSE_INFO_GETTER(Handle<Context>, context)
74 PARSE_INFO_GETTER(Handle<SharedFunctionInfo>, shared_info)
75
76 #undef PARSE_INFO_GETTER
77 #undef PARSE_INFO_GETTER_WITH_DEFAULT
78
79
80 // Exactly like a CompilationInfo, except being allocated via {new} and it also
81 // creates and enters a Zone on construction and deallocates it on destruction.
82 class CompilationInfoWithZone : public CompilationInfo {
83 public:
CompilationInfoWithZone(Handle<JSFunction> function)84 explicit CompilationInfoWithZone(Handle<JSFunction> function)
85 : CompilationInfo(new ParseInfo(&zone_, function)) {}
86
87 // Virtual destructor because a CompilationInfoWithZone has to exit the
88 // zone scope and get rid of dependent maps even when the destructor is
89 // called when cast as a CompilationInfo.
~CompilationInfoWithZone()90 virtual ~CompilationInfoWithZone() {
91 DisableFutureOptimization();
92 dependencies()->Rollback();
93 delete parse_info_;
94 parse_info_ = nullptr;
95 }
96
97 private:
98 Zone zone_;
99 };
100
101
has_shared_info() const102 bool CompilationInfo::has_shared_info() const {
103 return parse_info_ && !parse_info_->shared_info().is_null();
104 }
105
106
has_context() const107 bool CompilationInfo::has_context() const {
108 return parse_info_ && !parse_info_->context().is_null();
109 }
110
111
has_literal() const112 bool CompilationInfo::has_literal() const {
113 return parse_info_ && parse_info_->literal() != nullptr;
114 }
115
116
has_scope() const117 bool CompilationInfo::has_scope() const {
118 return parse_info_ && parse_info_->scope() != nullptr;
119 }
120
121
CompilationInfo(ParseInfo * parse_info)122 CompilationInfo::CompilationInfo(ParseInfo* parse_info)
123 : CompilationInfo(parse_info, nullptr, nullptr, BASE, parse_info->isolate(),
124 parse_info->zone()) {
125 // Compiling for the snapshot typically results in different code than
126 // compiling later on. This means that code recompiled with deoptimization
127 // support won't be "equivalent" (as defined by SharedFunctionInfo::
128 // EnableDeoptimizationSupport), so it will replace the old code and all
129 // its type feedback. To avoid this, always compile functions in the snapshot
130 // with deoptimization support.
131 if (isolate_->serializer_enabled()) EnableDeoptimizationSupport();
132
133 if (FLAG_function_context_specialization) MarkAsFunctionContextSpecializing();
134 if (FLAG_turbo_inlining) MarkAsInliningEnabled();
135 if (FLAG_turbo_source_positions) MarkAsSourcePositionsEnabled();
136 if (FLAG_turbo_splitting) MarkAsSplittingEnabled();
137 if (FLAG_turbo_types) MarkAsTypingEnabled();
138
139 if (has_shared_info()) {
140 if (shared_info()->is_compiled()) {
141 // We should initialize the CompilationInfo feedback vector from the
142 // passed in shared info, rather than creating a new one.
143 feedback_vector_ = Handle<TypeFeedbackVector>(
144 shared_info()->feedback_vector(), parse_info->isolate());
145 }
146 if (shared_info()->never_compiled()) MarkAsFirstCompile();
147 }
148 }
149
150
CompilationInfo(CodeStub * stub,Isolate * isolate,Zone * zone)151 CompilationInfo::CompilationInfo(CodeStub* stub, Isolate* isolate, Zone* zone)
152 : CompilationInfo(nullptr, stub, CodeStub::MajorName(stub->MajorKey()),
153 STUB, isolate, zone) {}
154
CompilationInfo(const char * debug_name,Isolate * isolate,Zone * zone)155 CompilationInfo::CompilationInfo(const char* debug_name, Isolate* isolate,
156 Zone* zone)
157 : CompilationInfo(nullptr, nullptr, debug_name, STUB, isolate, zone) {
158 set_output_code_kind(Code::STUB);
159 }
160
CompilationInfo(ParseInfo * parse_info,CodeStub * code_stub,const char * debug_name,Mode mode,Isolate * isolate,Zone * zone)161 CompilationInfo::CompilationInfo(ParseInfo* parse_info, CodeStub* code_stub,
162 const char* debug_name, Mode mode,
163 Isolate* isolate, Zone* zone)
164 : parse_info_(parse_info),
165 isolate_(isolate),
166 flags_(0),
167 code_stub_(code_stub),
168 mode_(mode),
169 osr_ast_id_(BailoutId::None()),
170 zone_(zone),
171 deferred_handles_(nullptr),
172 dependencies_(isolate, zone),
173 bailout_reason_(kNoReason),
174 prologue_offset_(Code::kPrologueOffsetNotSet),
175 track_positions_(FLAG_hydrogen_track_positions ||
176 isolate->cpu_profiler()->is_profiling()),
177 opt_count_(has_shared_info() ? shared_info()->opt_count() : 0),
178 parameter_count_(0),
179 optimization_id_(-1),
180 osr_expr_stack_height_(0),
181 debug_name_(debug_name) {
182 // Parameter count is number of stack parameters.
183 if (code_stub_ != NULL) {
184 CodeStubDescriptor descriptor(code_stub_);
185 parameter_count_ = descriptor.GetStackParameterCount();
186 if (descriptor.function_mode() == NOT_JS_FUNCTION_STUB_MODE) {
187 parameter_count_--;
188 }
189 set_output_code_kind(code_stub->GetCodeKind());
190 } else {
191 set_output_code_kind(Code::FUNCTION);
192 }
193 }
194
195
~CompilationInfo()196 CompilationInfo::~CompilationInfo() {
197 DisableFutureOptimization();
198 delete deferred_handles_;
199 #ifdef DEBUG
200 // Check that no dependent maps have been added or added dependent maps have
201 // been rolled back or committed.
202 DCHECK(dependencies()->IsEmpty());
203 #endif // DEBUG
204 }
205
206
num_parameters() const207 int CompilationInfo::num_parameters() const {
208 return has_scope() ? scope()->num_parameters() : parameter_count_;
209 }
210
211
num_parameters_including_this() const212 int CompilationInfo::num_parameters_including_this() const {
213 return num_parameters() + (is_this_defined() ? 1 : 0);
214 }
215
216
is_this_defined() const217 bool CompilationInfo::is_this_defined() const { return !IsStub(); }
218
219
num_heap_slots() const220 int CompilationInfo::num_heap_slots() const {
221 return has_scope() ? scope()->num_heap_slots() : 0;
222 }
223
224
225 // Primitive functions are unlikely to be picked up by the stack-walking
226 // profiler, so they trigger their own optimization when they're called
227 // for the SharedFunctionInfo::kCallsUntilPrimitiveOptimization-th time.
ShouldSelfOptimize()228 bool CompilationInfo::ShouldSelfOptimize() {
229 return FLAG_crankshaft &&
230 !(literal()->flags() & AstProperties::kDontSelfOptimize) &&
231 !literal()->dont_optimize() &&
232 literal()->scope()->AllowsLazyCompilation() &&
233 (!has_shared_info() || !shared_info()->optimization_disabled());
234 }
235
236
EnsureFeedbackVector()237 void CompilationInfo::EnsureFeedbackVector() {
238 if (feedback_vector_.is_null()) {
239 Handle<TypeFeedbackMetadata> feedback_metadata =
240 TypeFeedbackMetadata::New(isolate(), literal()->feedback_vector_spec());
241 feedback_vector_ = TypeFeedbackVector::New(isolate(), feedback_metadata);
242 }
243
244 // It's very important that recompiles do not alter the structure of the
245 // type feedback vector.
246 CHECK(!feedback_vector_->metadata()->SpecDiffersFrom(
247 literal()->feedback_vector_spec()));
248 }
249
250
has_simple_parameters()251 bool CompilationInfo::has_simple_parameters() {
252 return scope()->has_simple_parameters();
253 }
254
255
TraceInlinedFunction(Handle<SharedFunctionInfo> shared,SourcePosition position,int parent_id)256 int CompilationInfo::TraceInlinedFunction(Handle<SharedFunctionInfo> shared,
257 SourcePosition position,
258 int parent_id) {
259 DCHECK(track_positions_);
260
261 int inline_id = static_cast<int>(inlined_function_infos_.size());
262 InlinedFunctionInfo info(parent_id, position, UnboundScript::kNoScriptId,
263 shared->start_position());
264 if (!shared->script()->IsUndefined()) {
265 Handle<Script> script(Script::cast(shared->script()));
266 info.script_id = script->id();
267
268 if (FLAG_hydrogen_track_positions && !script->source()->IsUndefined()) {
269 CodeTracer::Scope tracing_scope(isolate()->GetCodeTracer());
270 OFStream os(tracing_scope.file());
271 os << "--- FUNCTION SOURCE (" << shared->DebugName()->ToCString().get()
272 << ") id{" << optimization_id() << "," << inline_id << "} ---\n";
273 {
274 DisallowHeapAllocation no_allocation;
275 int start = shared->start_position();
276 int len = shared->end_position() - start;
277 String::SubStringRange source(String::cast(script->source()), start,
278 len);
279 for (const auto& c : source) {
280 os << AsReversiblyEscapedUC16(c);
281 }
282 }
283
284 os << "\n--- END ---\n";
285 }
286 }
287
288 inlined_function_infos_.push_back(info);
289
290 if (FLAG_hydrogen_track_positions && inline_id != 0) {
291 CodeTracer::Scope tracing_scope(isolate()->GetCodeTracer());
292 OFStream os(tracing_scope.file());
293 os << "INLINE (" << shared->DebugName()->ToCString().get() << ") id{"
294 << optimization_id() << "," << inline_id << "} AS " << inline_id
295 << " AT " << position << std::endl;
296 }
297
298 return inline_id;
299 }
300
301
LogDeoptCallPosition(int pc_offset,int inlining_id)302 void CompilationInfo::LogDeoptCallPosition(int pc_offset, int inlining_id) {
303 if (!track_positions_ || IsStub()) return;
304 DCHECK_LT(static_cast<size_t>(inlining_id), inlined_function_infos_.size());
305 inlined_function_infos_.at(inlining_id).deopt_pc_offsets.push_back(pc_offset);
306 }
307
308
GetDebugName() const309 base::SmartArrayPointer<char> CompilationInfo::GetDebugName() const {
310 if (parse_info()) {
311 AllowHandleDereference allow_deref;
312 return parse_info()->literal()->debug_name()->ToCString();
313 }
314 const char* str = debug_name_ ? debug_name_ : "unknown";
315 size_t len = strlen(str) + 1;
316 base::SmartArrayPointer<char> name(new char[len]);
317 memcpy(name.get(), str, len);
318 return name;
319 }
320
321
ExpectsJSReceiverAsReceiver()322 bool CompilationInfo::ExpectsJSReceiverAsReceiver() {
323 return is_sloppy(language_mode()) && !is_native();
324 }
325
326
327 class HOptimizedGraphBuilderWithPositions: public HOptimizedGraphBuilder {
328 public:
HOptimizedGraphBuilderWithPositions(CompilationInfo * info)329 explicit HOptimizedGraphBuilderWithPositions(CompilationInfo* info)
330 : HOptimizedGraphBuilder(info) {
331 }
332
333 #define DEF_VISIT(type) \
334 void Visit##type(type* node) override { \
335 SourcePosition old_position = SourcePosition::Unknown(); \
336 if (node->position() != RelocInfo::kNoPosition) { \
337 old_position = source_position(); \
338 SetSourcePosition(node->position()); \
339 } \
340 HOptimizedGraphBuilder::Visit##type(node); \
341 if (!old_position.IsUnknown()) { \
342 set_source_position(old_position); \
343 } \
344 }
345 EXPRESSION_NODE_LIST(DEF_VISIT)
346 #undef DEF_VISIT
347
348 #define DEF_VISIT(type) \
349 void Visit##type(type* node) override { \
350 SourcePosition old_position = SourcePosition::Unknown(); \
351 if (node->position() != RelocInfo::kNoPosition) { \
352 old_position = source_position(); \
353 SetSourcePosition(node->position()); \
354 } \
355 HOptimizedGraphBuilder::Visit##type(node); \
356 if (!old_position.IsUnknown()) { \
357 set_source_position(old_position); \
358 } \
359 }
360 STATEMENT_NODE_LIST(DEF_VISIT)
361 #undef DEF_VISIT
362
363 #define DEF_VISIT(type) \
364 void Visit##type(type* node) override { \
365 HOptimizedGraphBuilder::Visit##type(node); \
366 }
367 DECLARATION_NODE_LIST(DEF_VISIT)
368 #undef DEF_VISIT
369 };
370
371
CreateGraph()372 OptimizedCompileJob::Status OptimizedCompileJob::CreateGraph() {
373 DCHECK(info()->IsOptimizing());
374
375 // Do not use Crankshaft/TurboFan if we need to be able to set break points.
376 if (info()->shared_info()->HasDebugInfo()) {
377 return AbortOptimization(kFunctionBeingDebugged);
378 }
379
380 // Limit the number of times we try to optimize functions.
381 const int kMaxOptCount =
382 FLAG_deopt_every_n_times == 0 ? FLAG_max_opt_count : 1000;
383 if (info()->opt_count() > kMaxOptCount) {
384 return AbortOptimization(kOptimizedTooManyTimes);
385 }
386
387 // Check the whitelist for Crankshaft.
388 if (!info()->closure()->PassesFilter(FLAG_hydrogen_filter)) {
389 return AbortOptimization(kHydrogenFilter);
390 }
391
392 // Optimization requires a version of fullcode with deoptimization support.
393 // Recompile the unoptimized version of the code if the current version
394 // doesn't have deoptimization support already.
395 // Otherwise, if we are gathering compilation time and space statistics
396 // for hydrogen, gather baseline statistics for a fullcode compilation.
397 bool should_recompile = !info()->shared_info()->has_deoptimization_support();
398 if (should_recompile || FLAG_hydrogen_stats) {
399 base::ElapsedTimer timer;
400 if (FLAG_hydrogen_stats) {
401 timer.Start();
402 }
403 if (!Compiler::EnsureDeoptimizationSupport(info())) {
404 return SetLastStatus(FAILED);
405 }
406 if (FLAG_hydrogen_stats) {
407 isolate()->GetHStatistics()->IncrementFullCodeGen(timer.Elapsed());
408 }
409 }
410
411 DCHECK(info()->shared_info()->has_deoptimization_support());
412 DCHECK(!info()->is_first_compile());
413
414 bool optimization_disabled = info()->shared_info()->optimization_disabled();
415 bool dont_crankshaft = info()->shared_info()->dont_crankshaft();
416
417 // Check the enabling conditions for Turbofan.
418 // 1. "use asm" code.
419 bool is_turbofanable_asm = FLAG_turbo_asm &&
420 info()->shared_info()->asm_function() &&
421 !optimization_disabled;
422
423 // 2. Fallback for features unsupported by Crankshaft.
424 bool is_unsupported_by_crankshaft_but_turbofanable =
425 dont_crankshaft && strcmp(FLAG_turbo_filter, "~~") == 0 &&
426 !optimization_disabled;
427
428 // 3. Explicitly enabled by the command-line filter.
429 bool passes_turbo_filter = info()->closure()->PassesFilter(FLAG_turbo_filter);
430
431 // If this is OSR request, OSR must be enabled by Turbofan.
432 bool passes_osr_test = FLAG_turbo_osr || !info()->is_osr();
433
434 if ((is_turbofanable_asm || is_unsupported_by_crankshaft_but_turbofanable ||
435 passes_turbo_filter) &&
436 passes_osr_test) {
437 // Use TurboFan for the compilation.
438 if (FLAG_trace_opt) {
439 OFStream os(stdout);
440 os << "[compiling method " << Brief(*info()->closure())
441 << " using TurboFan";
442 if (info()->is_osr()) os << " OSR";
443 os << "]" << std::endl;
444 }
445
446 if (info()->shared_info()->asm_function()) {
447 if (info()->osr_frame()) info()->MarkAsFrameSpecializing();
448 info()->MarkAsFunctionContextSpecializing();
449 } else if (info()->has_global_object() &&
450 FLAG_native_context_specialization) {
451 info()->MarkAsNativeContextSpecializing();
452 info()->MarkAsTypingEnabled();
453 }
454 if (!info()->shared_info()->asm_function() ||
455 FLAG_turbo_asm_deoptimization) {
456 info()->MarkAsDeoptimizationEnabled();
457 }
458
459 Timer t(this, &time_taken_to_create_graph_);
460 compiler::Pipeline pipeline(info());
461 pipeline.GenerateCode();
462 if (!info()->code().is_null()) {
463 return SetLastStatus(SUCCEEDED);
464 }
465 }
466
467 if (!isolate()->use_crankshaft() || dont_crankshaft) {
468 // Crankshaft is entirely disabled.
469 return SetLastStatus(FAILED);
470 }
471
472 Scope* scope = info()->scope();
473 if (LUnallocated::TooManyParameters(scope->num_parameters())) {
474 // Crankshaft would require too many Lithium operands.
475 return AbortOptimization(kTooManyParameters);
476 }
477
478 if (info()->is_osr() &&
479 LUnallocated::TooManyParametersOrStackSlots(scope->num_parameters(),
480 scope->num_stack_slots())) {
481 // Crankshaft would require too many Lithium operands.
482 return AbortOptimization(kTooManyParametersLocals);
483 }
484
485 if (scope->HasIllegalRedeclaration()) {
486 // Crankshaft cannot handle illegal redeclarations.
487 return AbortOptimization(kFunctionWithIllegalRedeclaration);
488 }
489
490 if (FLAG_trace_opt) {
491 OFStream os(stdout);
492 os << "[compiling method " << Brief(*info()->closure())
493 << " using Crankshaft";
494 if (info()->is_osr()) os << " OSR";
495 os << "]" << std::endl;
496 }
497
498 if (FLAG_trace_hydrogen) {
499 isolate()->GetHTracer()->TraceCompilation(info());
500 }
501
502 // Type-check the function.
503 AstTyper(info()->isolate(), info()->zone(), info()->closure(),
504 info()->scope(), info()->osr_ast_id(), info()->literal())
505 .Run();
506
507 // Optimization could have been disabled by the parser. Note that this check
508 // is only needed because the Hydrogen graph builder is missing some bailouts.
509 if (info()->shared_info()->optimization_disabled()) {
510 return AbortOptimization(
511 info()->shared_info()->disable_optimization_reason());
512 }
513
514 graph_builder_ = (info()->is_tracking_positions() || FLAG_trace_ic)
515 ? new (info()->zone())
516 HOptimizedGraphBuilderWithPositions(info())
517 : new (info()->zone()) HOptimizedGraphBuilder(info());
518
519 Timer t(this, &time_taken_to_create_graph_);
520 graph_ = graph_builder_->CreateGraph();
521
522 if (isolate()->has_pending_exception()) {
523 return SetLastStatus(FAILED);
524 }
525
526 if (graph_ == NULL) return SetLastStatus(BAILED_OUT);
527
528 if (info()->dependencies()->HasAborted()) {
529 // Dependency has changed during graph creation. Let's try again later.
530 return RetryOptimization(kBailedOutDueToDependencyChange);
531 }
532
533 return SetLastStatus(SUCCEEDED);
534 }
535
536
OptimizeGraph()537 OptimizedCompileJob::Status OptimizedCompileJob::OptimizeGraph() {
538 DisallowHeapAllocation no_allocation;
539 DisallowHandleAllocation no_handles;
540 DisallowHandleDereference no_deref;
541 DisallowCodeDependencyChange no_dependency_change;
542
543 DCHECK(last_status() == SUCCEEDED);
544 // TODO(turbofan): Currently everything is done in the first phase.
545 if (!info()->code().is_null()) {
546 return last_status();
547 }
548
549 Timer t(this, &time_taken_to_optimize_);
550 DCHECK(graph_ != NULL);
551 BailoutReason bailout_reason = kNoReason;
552
553 if (graph_->Optimize(&bailout_reason)) {
554 chunk_ = LChunk::NewChunk(graph_);
555 if (chunk_ != NULL) return SetLastStatus(SUCCEEDED);
556 } else if (bailout_reason != kNoReason) {
557 graph_builder_->Bailout(bailout_reason);
558 }
559
560 return SetLastStatus(BAILED_OUT);
561 }
562
563
564 namespace {
565
AddWeakObjectToCodeDependency(Isolate * isolate,Handle<HeapObject> object,Handle<Code> code)566 void AddWeakObjectToCodeDependency(Isolate* isolate, Handle<HeapObject> object,
567 Handle<Code> code) {
568 Handle<WeakCell> cell = Code::WeakCellFor(code);
569 Heap* heap = isolate->heap();
570 Handle<DependentCode> dep(heap->LookupWeakObjectToCodeDependency(object));
571 dep = DependentCode::InsertWeakCode(dep, DependentCode::kWeakCodeGroup, cell);
572 heap->AddWeakObjectToCodeDependency(object, dep);
573 }
574
575
RegisterWeakObjectsInOptimizedCode(Handle<Code> code)576 void RegisterWeakObjectsInOptimizedCode(Handle<Code> code) {
577 // TODO(turbofan): Move this to pipeline.cc once Crankshaft dies.
578 Isolate* const isolate = code->GetIsolate();
579 DCHECK(code->is_optimized_code());
580 std::vector<Handle<Map>> maps;
581 std::vector<Handle<HeapObject>> objects;
582 {
583 DisallowHeapAllocation no_gc;
584 int const mode_mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
585 RelocInfo::ModeMask(RelocInfo::CELL);
586 for (RelocIterator it(*code, mode_mask); !it.done(); it.next()) {
587 RelocInfo::Mode mode = it.rinfo()->rmode();
588 if (mode == RelocInfo::CELL &&
589 code->IsWeakObjectInOptimizedCode(it.rinfo()->target_cell())) {
590 objects.push_back(handle(it.rinfo()->target_cell(), isolate));
591 } else if (mode == RelocInfo::EMBEDDED_OBJECT &&
592 code->IsWeakObjectInOptimizedCode(
593 it.rinfo()->target_object())) {
594 Handle<HeapObject> object(HeapObject::cast(it.rinfo()->target_object()),
595 isolate);
596 if (object->IsMap()) {
597 maps.push_back(Handle<Map>::cast(object));
598 } else {
599 objects.push_back(object);
600 }
601 }
602 }
603 }
604 for (Handle<Map> map : maps) {
605 if (map->dependent_code()->IsEmpty(DependentCode::kWeakCodeGroup)) {
606 isolate->heap()->AddRetainedMap(map);
607 }
608 Map::AddDependentCode(map, DependentCode::kWeakCodeGroup, code);
609 }
610 for (Handle<HeapObject> object : objects) {
611 AddWeakObjectToCodeDependency(isolate, object, code);
612 }
613 code->set_can_have_weak_objects(true);
614 }
615
616 } // namespace
617
618
GenerateCode()619 OptimizedCompileJob::Status OptimizedCompileJob::GenerateCode() {
620 DCHECK(last_status() == SUCCEEDED);
621 // TODO(turbofan): Currently everything is done in the first phase.
622 if (!info()->code().is_null()) {
623 info()->dependencies()->Commit(info()->code());
624 if (info()->is_deoptimization_enabled()) {
625 info()->parse_info()->context()->native_context()->AddOptimizedCode(
626 *info()->code());
627 RegisterWeakObjectsInOptimizedCode(info()->code());
628 }
629 RecordOptimizationStats();
630 return last_status();
631 }
632
633 DCHECK(!info()->dependencies()->HasAborted());
634 DisallowCodeDependencyChange no_dependency_change;
635 DisallowJavascriptExecution no_js(isolate());
636 { // Scope for timer.
637 Timer timer(this, &time_taken_to_codegen_);
638 DCHECK(chunk_ != NULL);
639 DCHECK(graph_ != NULL);
640 // Deferred handles reference objects that were accessible during
641 // graph creation. To make sure that we don't encounter inconsistencies
642 // between graph creation and code generation, we disallow accessing
643 // objects through deferred handles during the latter, with exceptions.
644 DisallowDeferredHandleDereference no_deferred_handle_deref;
645 Handle<Code> optimized_code = chunk_->Codegen();
646 if (optimized_code.is_null()) {
647 if (info()->bailout_reason() == kNoReason) {
648 return AbortOptimization(kCodeGenerationFailed);
649 }
650 return SetLastStatus(BAILED_OUT);
651 }
652 RegisterWeakObjectsInOptimizedCode(optimized_code);
653 info()->SetCode(optimized_code);
654 }
655 RecordOptimizationStats();
656 // Add to the weak list of optimized code objects.
657 info()->context()->native_context()->AddOptimizedCode(*info()->code());
658 return SetLastStatus(SUCCEEDED);
659 }
660
661
RecordOptimizationStats()662 void OptimizedCompileJob::RecordOptimizationStats() {
663 Handle<JSFunction> function = info()->closure();
664 if (!function->IsOptimized()) {
665 // Concurrent recompilation and OSR may race. Increment only once.
666 int opt_count = function->shared()->opt_count();
667 function->shared()->set_opt_count(opt_count + 1);
668 }
669 double ms_creategraph = time_taken_to_create_graph_.InMillisecondsF();
670 double ms_optimize = time_taken_to_optimize_.InMillisecondsF();
671 double ms_codegen = time_taken_to_codegen_.InMillisecondsF();
672 if (FLAG_trace_opt) {
673 PrintF("[optimizing ");
674 function->ShortPrint();
675 PrintF(" - took %0.3f, %0.3f, %0.3f ms]\n", ms_creategraph, ms_optimize,
676 ms_codegen);
677 }
678 if (FLAG_trace_opt_stats) {
679 static double compilation_time = 0.0;
680 static int compiled_functions = 0;
681 static int code_size = 0;
682
683 compilation_time += (ms_creategraph + ms_optimize + ms_codegen);
684 compiled_functions++;
685 code_size += function->shared()->SourceSize();
686 PrintF("Compiled: %d functions with %d byte source size in %fms.\n",
687 compiled_functions,
688 code_size,
689 compilation_time);
690 }
691 if (FLAG_hydrogen_stats) {
692 isolate()->GetHStatistics()->IncrementSubtotals(time_taken_to_create_graph_,
693 time_taken_to_optimize_,
694 time_taken_to_codegen_);
695 }
696 }
697
698
699 // Sets the expected number of properties based on estimate from compiler.
SetExpectedNofPropertiesFromEstimate(Handle<SharedFunctionInfo> shared,int estimate)700 void SetExpectedNofPropertiesFromEstimate(Handle<SharedFunctionInfo> shared,
701 int estimate) {
702 // If no properties are added in the constructor, they are more likely
703 // to be added later.
704 if (estimate == 0) estimate = 2;
705
706 // TODO(yangguo): check whether those heuristics are still up-to-date.
707 // We do not shrink objects that go into a snapshot (yet), so we adjust
708 // the estimate conservatively.
709 if (shared->GetIsolate()->serializer_enabled()) {
710 estimate += 2;
711 } else {
712 // Inobject slack tracking will reclaim redundant inobject space later,
713 // so we can afford to adjust the estimate generously.
714 estimate += 8;
715 }
716
717 shared->set_expected_nof_properties(estimate);
718 }
719
720
MaybeDisableOptimization(Handle<SharedFunctionInfo> shared_info,BailoutReason bailout_reason)721 static void MaybeDisableOptimization(Handle<SharedFunctionInfo> shared_info,
722 BailoutReason bailout_reason) {
723 if (bailout_reason != kNoReason) {
724 shared_info->DisableOptimization(bailout_reason);
725 }
726 }
727
728
RecordFunctionCompilation(Logger::LogEventsAndTags tag,CompilationInfo * info,Handle<SharedFunctionInfo> shared)729 static void RecordFunctionCompilation(Logger::LogEventsAndTags tag,
730 CompilationInfo* info,
731 Handle<SharedFunctionInfo> shared) {
732 // SharedFunctionInfo is passed separately, because if CompilationInfo
733 // was created using Script object, it will not have it.
734
735 // Log the code generation. If source information is available include
736 // script name and line number. Check explicitly whether logging is
737 // enabled as finding the line number is not free.
738 if (info->isolate()->logger()->is_logging_code_events() ||
739 info->isolate()->cpu_profiler()->is_profiling()) {
740 Handle<Script> script = info->parse_info()->script();
741 Handle<Code> code = info->code();
742 if (code.is_identical_to(info->isolate()->builtins()->CompileLazy())) {
743 return;
744 }
745 int line_num = Script::GetLineNumber(script, shared->start_position()) + 1;
746 int column_num =
747 Script::GetColumnNumber(script, shared->start_position()) + 1;
748 String* script_name = script->name()->IsString()
749 ? String::cast(script->name())
750 : info->isolate()->heap()->empty_string();
751 Logger::LogEventsAndTags log_tag = Logger::ToNativeByScript(tag, *script);
752 PROFILE(info->isolate(),
753 CodeCreateEvent(log_tag, *code, *shared, info, script_name,
754 line_num, column_num));
755 }
756 }
757
758
CompileUnoptimizedCode(CompilationInfo * info)759 static bool CompileUnoptimizedCode(CompilationInfo* info) {
760 DCHECK(AllowCompilation::IsAllowed(info->isolate()));
761 if (!Compiler::Analyze(info->parse_info()) ||
762 !FullCodeGenerator::MakeCode(info)) {
763 Isolate* isolate = info->isolate();
764 if (!isolate->has_pending_exception()) isolate->StackOverflow();
765 return false;
766 }
767 return true;
768 }
769
770
771 // TODO(rmcilroy): Remove this temporary work-around when ignition supports
772 // catch and eval.
IgnitionShouldFallbackToFullCodeGen(Scope * scope)773 static bool IgnitionShouldFallbackToFullCodeGen(Scope* scope) {
774 if (scope->is_eval_scope() || scope->is_catch_scope() ||
775 scope->calls_eval()) {
776 return true;
777 }
778 for (auto inner_scope : *scope->inner_scopes()) {
779 if (IgnitionShouldFallbackToFullCodeGen(inner_scope)) return true;
780 }
781 return false;
782 }
783
784
UseIgnition(CompilationInfo * info)785 static bool UseIgnition(CompilationInfo* info) {
786 // Cannot use Ignition when the {function_data} is already used.
787 if (info->has_shared_info() && info->shared_info()->HasBuiltinFunctionId()) {
788 return false;
789 }
790
791 // Checks whether the scope chain is supported.
792 if (FLAG_ignition_fallback_on_eval_and_catch &&
793 IgnitionShouldFallbackToFullCodeGen(info->scope())) {
794 return false;
795 }
796
797 // Checks whether top level functions should be passed by the filter.
798 if (info->closure().is_null()) {
799 Vector<const char> filter = CStrVector(FLAG_ignition_filter);
800 return (filter.length() == 0) || (filter.length() == 1 && filter[0] == '*');
801 }
802
803 // Finally respect the filter.
804 return info->closure()->PassesFilter(FLAG_ignition_filter);
805 }
806
807
GenerateBaselineCode(CompilationInfo * info)808 static bool GenerateBaselineCode(CompilationInfo* info) {
809 if (FLAG_ignition && UseIgnition(info)) {
810 return interpreter::Interpreter::MakeBytecode(info);
811 } else {
812 return FullCodeGenerator::MakeCode(info);
813 }
814 }
815
816
CompileBaselineCode(CompilationInfo * info)817 static bool CompileBaselineCode(CompilationInfo* info) {
818 DCHECK(AllowCompilation::IsAllowed(info->isolate()));
819 if (!Compiler::Analyze(info->parse_info()) || !GenerateBaselineCode(info)) {
820 Isolate* isolate = info->isolate();
821 if (!isolate->has_pending_exception()) isolate->StackOverflow();
822 return false;
823 }
824 return true;
825 }
826
827
GetUnoptimizedCodeCommon(CompilationInfo * info)828 MUST_USE_RESULT static MaybeHandle<Code> GetUnoptimizedCodeCommon(
829 CompilationInfo* info) {
830 VMState<COMPILER> state(info->isolate());
831 PostponeInterruptsScope postpone(info->isolate());
832
833 // Parse and update CompilationInfo with the results.
834 if (!Parser::ParseStatic(info->parse_info())) return MaybeHandle<Code>();
835 Handle<SharedFunctionInfo> shared = info->shared_info();
836 FunctionLiteral* lit = info->literal();
837 DCHECK_EQ(shared->language_mode(), lit->language_mode());
838 SetExpectedNofPropertiesFromEstimate(shared, lit->expected_property_count());
839 MaybeDisableOptimization(shared, lit->dont_optimize_reason());
840
841 // Compile either unoptimized code or bytecode for the interpreter.
842 if (!CompileBaselineCode(info)) return MaybeHandle<Code>();
843 if (info->code()->kind() == Code::FUNCTION) { // Only for full code.
844 RecordFunctionCompilation(Logger::LAZY_COMPILE_TAG, info, shared);
845 }
846
847 // Update the shared function info with the scope info. Allocating the
848 // ScopeInfo object may cause a GC.
849 Handle<ScopeInfo> scope_info =
850 ScopeInfo::Create(info->isolate(), info->zone(), info->scope());
851 shared->set_scope_info(*scope_info);
852
853 // Update the code and feedback vector for the shared function info.
854 shared->ReplaceCode(*info->code());
855 shared->set_feedback_vector(*info->feedback_vector());
856 if (info->has_bytecode_array()) {
857 DCHECK(shared->function_data()->IsUndefined());
858 shared->set_function_data(*info->bytecode_array());
859 }
860
861 return info->code();
862 }
863
864
GetCodeFromOptimizedCodeMap(Handle<JSFunction> function,BailoutId osr_ast_id)865 MUST_USE_RESULT static MaybeHandle<Code> GetCodeFromOptimizedCodeMap(
866 Handle<JSFunction> function, BailoutId osr_ast_id) {
867 Handle<SharedFunctionInfo> shared(function->shared());
868 DisallowHeapAllocation no_gc;
869 CodeAndLiterals cached = shared->SearchOptimizedCodeMap(
870 function->context()->native_context(), osr_ast_id);
871 if (cached.code != nullptr) {
872 // Caching of optimized code enabled and optimized code found.
873 if (cached.literals != nullptr) function->set_literals(cached.literals);
874 DCHECK(!cached.code->marked_for_deoptimization());
875 DCHECK(function->shared()->is_compiled());
876 return Handle<Code>(cached.code);
877 }
878 return MaybeHandle<Code>();
879 }
880
881
InsertCodeIntoOptimizedCodeMap(CompilationInfo * info)882 static void InsertCodeIntoOptimizedCodeMap(CompilationInfo* info) {
883 Handle<Code> code = info->code();
884 if (code->kind() != Code::OPTIMIZED_FUNCTION) return; // Nothing to do.
885
886 // Function context specialization folds-in the function context,
887 // so no sharing can occur.
888 if (info->is_function_context_specializing()) return;
889 // Frame specialization implies function context specialization.
890 DCHECK(!info->is_frame_specializing());
891
892 // Cache optimized context-specific code.
893 Handle<JSFunction> function = info->closure();
894 Handle<SharedFunctionInfo> shared(function->shared());
895 Handle<LiteralsArray> literals(function->literals());
896 Handle<Context> native_context(function->context()->native_context());
897 SharedFunctionInfo::AddToOptimizedCodeMap(shared, native_context, code,
898 literals, info->osr_ast_id());
899
900 // Do not cache (native) context-independent code compiled for OSR.
901 if (code->is_turbofanned() && info->is_osr()) return;
902
903 // Cache optimized (native) context-independent code.
904 if (FLAG_turbo_cache_shared_code && code->is_turbofanned() &&
905 !info->is_native_context_specializing()) {
906 DCHECK(!info->is_function_context_specializing());
907 DCHECK(info->osr_ast_id().IsNone());
908 Handle<SharedFunctionInfo> shared(function->shared());
909 SharedFunctionInfo::AddSharedCodeToOptimizedCodeMap(shared, code);
910 }
911 }
912
913
Renumber(ParseInfo * parse_info)914 static bool Renumber(ParseInfo* parse_info) {
915 if (!AstNumbering::Renumber(parse_info->isolate(), parse_info->zone(),
916 parse_info->literal())) {
917 return false;
918 }
919 Handle<SharedFunctionInfo> shared_info = parse_info->shared_info();
920 if (!shared_info.is_null()) {
921 FunctionLiteral* lit = parse_info->literal();
922 shared_info->set_ast_node_count(lit->ast_node_count());
923 MaybeDisableOptimization(shared_info, lit->dont_optimize_reason());
924 shared_info->set_dont_crankshaft(lit->flags() &
925 AstProperties::kDontCrankshaft);
926 }
927 return true;
928 }
929
930
Analyze(ParseInfo * info)931 bool Compiler::Analyze(ParseInfo* info) {
932 DCHECK_NOT_NULL(info->literal());
933 if (!Rewriter::Rewrite(info)) return false;
934 if (!Scope::Analyze(info)) return false;
935 if (!Renumber(info)) return false;
936 DCHECK_NOT_NULL(info->scope());
937 return true;
938 }
939
940
ParseAndAnalyze(ParseInfo * info)941 bool Compiler::ParseAndAnalyze(ParseInfo* info) {
942 if (!Parser::ParseStatic(info)) return false;
943 return Compiler::Analyze(info);
944 }
945
946
GetOptimizedCodeNow(CompilationInfo * info)947 static bool GetOptimizedCodeNow(CompilationInfo* info) {
948 Isolate* isolate = info->isolate();
949 CanonicalHandleScope canonical(isolate);
950
951 if (!Compiler::ParseAndAnalyze(info->parse_info())) return false;
952
953 TimerEventScope<TimerEventRecompileSynchronous> timer(isolate);
954
955 OptimizedCompileJob job(info);
956 if (job.CreateGraph() != OptimizedCompileJob::SUCCEEDED ||
957 job.OptimizeGraph() != OptimizedCompileJob::SUCCEEDED ||
958 job.GenerateCode() != OptimizedCompileJob::SUCCEEDED) {
959 if (FLAG_trace_opt) {
960 PrintF("[aborted optimizing ");
961 info->closure()->ShortPrint();
962 PrintF(" because: %s]\n", GetBailoutReason(info->bailout_reason()));
963 }
964 return false;
965 }
966
967 // Success!
968 DCHECK(!isolate->has_pending_exception());
969 InsertCodeIntoOptimizedCodeMap(info);
970 RecordFunctionCompilation(Logger::LAZY_COMPILE_TAG, info,
971 info->shared_info());
972 return true;
973 }
974
975
GetOptimizedCodeLater(CompilationInfo * info)976 static bool GetOptimizedCodeLater(CompilationInfo* info) {
977 Isolate* isolate = info->isolate();
978 CanonicalHandleScope canonical(isolate);
979
980 if (!isolate->optimizing_compile_dispatcher()->IsQueueAvailable()) {
981 if (FLAG_trace_concurrent_recompilation) {
982 PrintF(" ** Compilation queue full, will retry optimizing ");
983 info->closure()->ShortPrint();
984 PrintF(" later.\n");
985 }
986 return false;
987 }
988
989 CompilationHandleScope handle_scope(info);
990 if (!Compiler::ParseAndAnalyze(info->parse_info())) return false;
991
992 // Reopen handles in the new CompilationHandleScope.
993 info->ReopenHandlesInNewHandleScope();
994 info->parse_info()->ReopenHandlesInNewHandleScope();
995
996 TimerEventScope<TimerEventRecompileSynchronous> timer(info->isolate());
997
998 OptimizedCompileJob* job = new (info->zone()) OptimizedCompileJob(info);
999 OptimizedCompileJob::Status status = job->CreateGraph();
1000 if (status != OptimizedCompileJob::SUCCEEDED) return false;
1001 isolate->optimizing_compile_dispatcher()->QueueForOptimization(job);
1002
1003 if (FLAG_trace_concurrent_recompilation) {
1004 PrintF(" ** Queued ");
1005 info->closure()->ShortPrint();
1006 if (info->is_osr()) {
1007 PrintF(" for concurrent OSR at %d.\n", info->osr_ast_id().ToInt());
1008 } else {
1009 PrintF(" for concurrent optimization.\n");
1010 }
1011 }
1012 return true;
1013 }
1014
1015
GetUnoptimizedCode(Handle<JSFunction> function)1016 MaybeHandle<Code> Compiler::GetUnoptimizedCode(Handle<JSFunction> function) {
1017 DCHECK(!function->GetIsolate()->has_pending_exception());
1018 DCHECK(!function->is_compiled());
1019 if (function->shared()->is_compiled()) {
1020 return Handle<Code>(function->shared()->code());
1021 }
1022
1023 CompilationInfoWithZone info(function);
1024 Handle<Code> result;
1025 ASSIGN_RETURN_ON_EXCEPTION(info.isolate(), result,
1026 GetUnoptimizedCodeCommon(&info),
1027 Code);
1028 return result;
1029 }
1030
1031
GetLazyCode(Handle<JSFunction> function)1032 MaybeHandle<Code> Compiler::GetLazyCode(Handle<JSFunction> function) {
1033 Isolate* isolate = function->GetIsolate();
1034 DCHECK(!isolate->has_pending_exception());
1035 DCHECK(!function->is_compiled());
1036 AggregatedHistogramTimerScope timer(isolate->counters()->compile_lazy());
1037 // If the debugger is active, do not compile with turbofan unless we can
1038 // deopt from turbofan code.
1039 if (FLAG_turbo_asm && function->shared()->asm_function() &&
1040 (FLAG_turbo_asm_deoptimization || !isolate->debug()->is_active()) &&
1041 !FLAG_turbo_osr) {
1042 CompilationInfoWithZone info(function);
1043
1044 VMState<COMPILER> state(isolate);
1045 PostponeInterruptsScope postpone(isolate);
1046
1047 info.SetOptimizing(BailoutId::None(), handle(function->shared()->code()));
1048
1049 if (GetOptimizedCodeNow(&info)) {
1050 DCHECK(function->shared()->is_compiled());
1051 return info.code();
1052 }
1053 // We have failed compilation. If there was an exception clear it so that
1054 // we can compile unoptimized code.
1055 if (isolate->has_pending_exception()) isolate->clear_pending_exception();
1056 }
1057
1058 if (function->shared()->is_compiled()) {
1059 return Handle<Code>(function->shared()->code());
1060 }
1061
1062 CompilationInfoWithZone info(function);
1063 Handle<Code> result;
1064 ASSIGN_RETURN_ON_EXCEPTION(isolate, result, GetUnoptimizedCodeCommon(&info),
1065 Code);
1066
1067 if (FLAG_always_opt) {
1068 Handle<Code> opt_code;
1069 if (Compiler::GetOptimizedCode(
1070 function, result,
1071 Compiler::NOT_CONCURRENT).ToHandle(&opt_code)) {
1072 result = opt_code;
1073 }
1074 }
1075
1076 return result;
1077 }
1078
1079
Compile(Handle<JSFunction> function,ClearExceptionFlag flag)1080 bool Compiler::Compile(Handle<JSFunction> function, ClearExceptionFlag flag) {
1081 if (function->is_compiled()) return true;
1082 MaybeHandle<Code> maybe_code = Compiler::GetLazyCode(function);
1083 Handle<Code> code;
1084 if (!maybe_code.ToHandle(&code)) {
1085 if (flag == CLEAR_EXCEPTION) {
1086 function->GetIsolate()->clear_pending_exception();
1087 }
1088 return false;
1089 }
1090 function->ReplaceCode(*code);
1091 DCHECK(function->is_compiled());
1092 return true;
1093 }
1094
1095
1096 // TODO(turbofan): In the future, unoptimized code with deopt support could
1097 // be generated lazily once deopt is triggered.
EnsureDeoptimizationSupport(CompilationInfo * info)1098 bool Compiler::EnsureDeoptimizationSupport(CompilationInfo* info) {
1099 DCHECK_NOT_NULL(info->literal());
1100 DCHECK(info->has_scope());
1101 Handle<SharedFunctionInfo> shared = info->shared_info();
1102 if (!shared->has_deoptimization_support()) {
1103 // TODO(titzer): just reuse the ParseInfo for the unoptimized compile.
1104 CompilationInfoWithZone unoptimized(info->closure());
1105 // Note that we use the same AST that we will use for generating the
1106 // optimized code.
1107 ParseInfo* parse_info = unoptimized.parse_info();
1108 parse_info->set_literal(info->literal());
1109 parse_info->set_scope(info->scope());
1110 parse_info->set_context(info->context());
1111 unoptimized.EnableDeoptimizationSupport();
1112 // If the current code has reloc info for serialization, also include
1113 // reloc info for serialization for the new code, so that deopt support
1114 // can be added without losing IC state.
1115 if (shared->code()->kind() == Code::FUNCTION &&
1116 shared->code()->has_reloc_info_for_serialization()) {
1117 unoptimized.PrepareForSerializing();
1118 }
1119 if (!FullCodeGenerator::MakeCode(&unoptimized)) return false;
1120
1121 shared->EnableDeoptimizationSupport(*unoptimized.code());
1122 shared->set_feedback_vector(*unoptimized.feedback_vector());
1123
1124 info->MarkAsCompiled();
1125
1126 // The scope info might not have been set if a lazily compiled
1127 // function is inlined before being called for the first time.
1128 if (shared->scope_info() == ScopeInfo::Empty(info->isolate())) {
1129 Handle<ScopeInfo> target_scope_info =
1130 ScopeInfo::Create(info->isolate(), info->zone(), info->scope());
1131 shared->set_scope_info(*target_scope_info);
1132 }
1133
1134 // The existing unoptimized code was replaced with the new one.
1135 RecordFunctionCompilation(Logger::LAZY_COMPILE_TAG, &unoptimized, shared);
1136 }
1137 return true;
1138 }
1139
1140
CompileEvalForDebugging(Handle<JSFunction> function,Handle<SharedFunctionInfo> shared)1141 bool CompileEvalForDebugging(Handle<JSFunction> function,
1142 Handle<SharedFunctionInfo> shared) {
1143 Handle<Script> script(Script::cast(shared->script()));
1144 Handle<Context> context(function->context());
1145
1146 Zone zone;
1147 ParseInfo parse_info(&zone, script);
1148 CompilationInfo info(&parse_info);
1149 Isolate* isolate = info.isolate();
1150
1151 parse_info.set_eval();
1152 parse_info.set_context(context);
1153 if (context->IsNativeContext()) parse_info.set_global();
1154 parse_info.set_toplevel();
1155 parse_info.set_allow_lazy_parsing(false);
1156 parse_info.set_language_mode(shared->language_mode());
1157 parse_info.set_parse_restriction(NO_PARSE_RESTRICTION);
1158 info.MarkAsDebug();
1159
1160 VMState<COMPILER> state(info.isolate());
1161
1162 if (!Parser::ParseStatic(&parse_info)) {
1163 isolate->clear_pending_exception();
1164 return false;
1165 }
1166
1167 FunctionLiteral* lit = parse_info.literal();
1168 LiveEditFunctionTracker live_edit_tracker(isolate, lit);
1169
1170 if (!CompileUnoptimizedCode(&info)) {
1171 isolate->clear_pending_exception();
1172 return false;
1173 }
1174 shared->ReplaceCode(*info.code());
1175 return true;
1176 }
1177
1178
CompileForDebugging(CompilationInfo * info)1179 bool CompileForDebugging(CompilationInfo* info) {
1180 info->MarkAsDebug();
1181 if (GetUnoptimizedCodeCommon(info).is_null()) {
1182 info->isolate()->clear_pending_exception();
1183 return false;
1184 }
1185 return true;
1186 }
1187
1188
IsEvalToplevel(Handle<SharedFunctionInfo> shared)1189 static inline bool IsEvalToplevel(Handle<SharedFunctionInfo> shared) {
1190 return shared->is_toplevel() && shared->script()->IsScript() &&
1191 Script::cast(shared->script())->compilation_type() ==
1192 Script::COMPILATION_TYPE_EVAL;
1193 }
1194
1195
CompileDebugCode(Handle<JSFunction> function)1196 bool Compiler::CompileDebugCode(Handle<JSFunction> function) {
1197 Handle<SharedFunctionInfo> shared(function->shared());
1198 if (IsEvalToplevel(shared)) {
1199 return CompileEvalForDebugging(function, shared);
1200 } else {
1201 CompilationInfoWithZone info(function);
1202 return CompileForDebugging(&info);
1203 }
1204 }
1205
1206
CompileDebugCode(Handle<SharedFunctionInfo> shared)1207 bool Compiler::CompileDebugCode(Handle<SharedFunctionInfo> shared) {
1208 DCHECK(shared->allows_lazy_compilation_without_context());
1209 DCHECK(!IsEvalToplevel(shared));
1210 Zone zone;
1211 ParseInfo parse_info(&zone, shared);
1212 CompilationInfo info(&parse_info);
1213 return CompileForDebugging(&info);
1214 }
1215
1216
CompileForLiveEdit(Handle<Script> script)1217 void Compiler::CompileForLiveEdit(Handle<Script> script) {
1218 // TODO(635): support extensions.
1219 Zone zone;
1220 ParseInfo parse_info(&zone, script);
1221 CompilationInfo info(&parse_info);
1222 PostponeInterruptsScope postpone(info.isolate());
1223 VMState<COMPILER> state(info.isolate());
1224
1225 // Get rid of old list of shared function infos.
1226 info.MarkAsFirstCompile();
1227 info.MarkAsDebug();
1228 info.parse_info()->set_global();
1229 if (!Parser::ParseStatic(info.parse_info())) return;
1230
1231 LiveEditFunctionTracker tracker(info.isolate(), parse_info.literal());
1232 if (!CompileUnoptimizedCode(&info)) return;
1233 if (info.has_shared_info()) {
1234 Handle<ScopeInfo> scope_info =
1235 ScopeInfo::Create(info.isolate(), info.zone(), info.scope());
1236 info.shared_info()->set_scope_info(*scope_info);
1237 }
1238 tracker.RecordRootFunctionInfo(info.code());
1239 }
1240
1241
CompileToplevel(CompilationInfo * info)1242 static Handle<SharedFunctionInfo> CompileToplevel(CompilationInfo* info) {
1243 Isolate* isolate = info->isolate();
1244 PostponeInterruptsScope postpone(isolate);
1245 DCHECK(!isolate->native_context().is_null());
1246 ParseInfo* parse_info = info->parse_info();
1247 Handle<Script> script = parse_info->script();
1248
1249 // TODO(svenpanne) Obscure place for this, perhaps move to OnBeforeCompile?
1250 FixedArray* array = isolate->native_context()->embedder_data();
1251 script->set_context_data(array->get(v8::Context::kDebugIdIndex));
1252
1253 isolate->debug()->OnBeforeCompile(script);
1254
1255 DCHECK(parse_info->is_eval() || parse_info->is_global() ||
1256 parse_info->is_module());
1257
1258 parse_info->set_toplevel();
1259
1260 Handle<SharedFunctionInfo> result;
1261
1262 { VMState<COMPILER> state(info->isolate());
1263 if (parse_info->literal() == NULL) {
1264 // Parse the script if needed (if it's already parsed, literal() is
1265 // non-NULL). If compiling for debugging, we may eagerly compile inner
1266 // functions, so do not parse lazily in that case.
1267 ScriptCompiler::CompileOptions options = parse_info->compile_options();
1268 bool parse_allow_lazy = (options == ScriptCompiler::kConsumeParserCache ||
1269 String::cast(script->source())->length() >
1270 FLAG_min_preparse_length) &&
1271 !info->is_debug();
1272
1273 parse_info->set_allow_lazy_parsing(parse_allow_lazy);
1274 if (!parse_allow_lazy &&
1275 (options == ScriptCompiler::kProduceParserCache ||
1276 options == ScriptCompiler::kConsumeParserCache)) {
1277 // We are going to parse eagerly, but we either 1) have cached data
1278 // produced by lazy parsing or 2) are asked to generate cached data.
1279 // Eager parsing cannot benefit from cached data, and producing cached
1280 // data while parsing eagerly is not implemented.
1281 parse_info->set_cached_data(nullptr);
1282 parse_info->set_compile_options(ScriptCompiler::kNoCompileOptions);
1283 }
1284 if (!Parser::ParseStatic(parse_info)) {
1285 return Handle<SharedFunctionInfo>::null();
1286 }
1287 }
1288
1289 DCHECK(!info->is_debug() || !parse_info->allow_lazy_parsing());
1290
1291 info->MarkAsFirstCompile();
1292
1293 FunctionLiteral* lit = parse_info->literal();
1294 LiveEditFunctionTracker live_edit_tracker(isolate, lit);
1295
1296 // Measure how long it takes to do the compilation; only take the
1297 // rest of the function into account to avoid overlap with the
1298 // parsing statistics.
1299 HistogramTimer* rate = info->is_eval()
1300 ? info->isolate()->counters()->compile_eval()
1301 : info->isolate()->counters()->compile();
1302 HistogramTimerScope timer(rate);
1303
1304 // Compile the code.
1305 if (!CompileBaselineCode(info)) {
1306 return Handle<SharedFunctionInfo>::null();
1307 }
1308
1309 // Allocate function.
1310 DCHECK(!info->code().is_null());
1311 result = isolate->factory()->NewSharedFunctionInfo(
1312 lit->name(), lit->materialized_literal_count(), lit->kind(),
1313 info->code(),
1314 ScopeInfo::Create(info->isolate(), info->zone(), info->scope()),
1315 info->feedback_vector());
1316 if (info->has_bytecode_array()) {
1317 DCHECK(result->function_data()->IsUndefined());
1318 result->set_function_data(*info->bytecode_array());
1319 }
1320
1321 DCHECK_EQ(RelocInfo::kNoPosition, lit->function_token_position());
1322 SharedFunctionInfo::InitFromFunctionLiteral(result, lit);
1323 SharedFunctionInfo::SetScript(result, script);
1324 result->set_is_toplevel(true);
1325 if (info->is_eval()) {
1326 // Eval scripts cannot be (re-)compiled without context.
1327 result->set_allows_lazy_compilation_without_context(false);
1328 }
1329
1330 Handle<String> script_name =
1331 script->name()->IsString()
1332 ? Handle<String>(String::cast(script->name()))
1333 : isolate->factory()->empty_string();
1334 Logger::LogEventsAndTags log_tag = info->is_eval()
1335 ? Logger::EVAL_TAG
1336 : Logger::ToNativeByScript(Logger::SCRIPT_TAG, *script);
1337
1338 PROFILE(isolate, CodeCreateEvent(
1339 log_tag, *info->code(), *result, info, *script_name));
1340
1341 // Hint to the runtime system used when allocating space for initial
1342 // property space by setting the expected number of properties for
1343 // the instances of the function.
1344 SetExpectedNofPropertiesFromEstimate(result,
1345 lit->expected_property_count());
1346
1347 if (!script.is_null())
1348 script->set_compilation_state(Script::COMPILATION_STATE_COMPILED);
1349
1350 live_edit_tracker.RecordFunctionInfo(result, lit, info->zone());
1351 }
1352
1353 return result;
1354 }
1355
1356
GetFunctionFromEval(Handle<String> source,Handle<SharedFunctionInfo> outer_info,Handle<Context> context,LanguageMode language_mode,ParseRestriction restriction,int line_offset,int column_offset,Handle<Object> script_name,ScriptOriginOptions options)1357 MaybeHandle<JSFunction> Compiler::GetFunctionFromEval(
1358 Handle<String> source, Handle<SharedFunctionInfo> outer_info,
1359 Handle<Context> context, LanguageMode language_mode,
1360 ParseRestriction restriction, int line_offset, int column_offset,
1361 Handle<Object> script_name, ScriptOriginOptions options) {
1362 Isolate* isolate = source->GetIsolate();
1363 int source_length = source->length();
1364 isolate->counters()->total_eval_size()->Increment(source_length);
1365 isolate->counters()->total_compile_size()->Increment(source_length);
1366
1367 CompilationCache* compilation_cache = isolate->compilation_cache();
1368 MaybeHandle<SharedFunctionInfo> maybe_shared_info =
1369 compilation_cache->LookupEval(source, outer_info, context, language_mode,
1370 line_offset);
1371 Handle<SharedFunctionInfo> shared_info;
1372
1373 Handle<Script> script;
1374 if (!maybe_shared_info.ToHandle(&shared_info)) {
1375 script = isolate->factory()->NewScript(source);
1376 if (!script_name.is_null()) {
1377 script->set_name(*script_name);
1378 script->set_line_offset(line_offset);
1379 script->set_column_offset(column_offset);
1380 }
1381 script->set_origin_options(options);
1382 Zone zone;
1383 ParseInfo parse_info(&zone, script);
1384 CompilationInfo info(&parse_info);
1385 parse_info.set_eval();
1386 if (context->IsNativeContext()) parse_info.set_global();
1387 parse_info.set_language_mode(language_mode);
1388 parse_info.set_parse_restriction(restriction);
1389 parse_info.set_context(context);
1390
1391 Debug::RecordEvalCaller(script);
1392
1393 shared_info = CompileToplevel(&info);
1394
1395 if (shared_info.is_null()) {
1396 return MaybeHandle<JSFunction>();
1397 } else {
1398 // Explicitly disable optimization for eval code. We're not yet prepared
1399 // to handle eval-code in the optimizing compiler.
1400 if (restriction != ONLY_SINGLE_FUNCTION_LITERAL) {
1401 shared_info->DisableOptimization(kEval);
1402 }
1403
1404 // If caller is strict mode, the result must be in strict mode as well.
1405 DCHECK(is_sloppy(language_mode) ||
1406 is_strict(shared_info->language_mode()));
1407 compilation_cache->PutEval(source, outer_info, context, shared_info,
1408 line_offset);
1409 }
1410 } else if (shared_info->ic_age() != isolate->heap()->global_ic_age()) {
1411 shared_info->ResetForNewContext(isolate->heap()->global_ic_age());
1412 }
1413
1414 Handle<JSFunction> result =
1415 isolate->factory()->NewFunctionFromSharedFunctionInfo(
1416 shared_info, context, NOT_TENURED);
1417
1418 // OnAfterCompile has to be called after we create the JSFunction, which we
1419 // may require to recompile the eval for debugging, if we find a function
1420 // that contains break points in the eval script.
1421 isolate->debug()->OnAfterCompile(script);
1422
1423 return result;
1424 }
1425
1426
CompileScript(Handle<String> source,Handle<Object> script_name,int line_offset,int column_offset,ScriptOriginOptions resource_options,Handle<Object> source_map_url,Handle<Context> context,v8::Extension * extension,ScriptData ** cached_data,ScriptCompiler::CompileOptions compile_options,NativesFlag natives,bool is_module)1427 Handle<SharedFunctionInfo> Compiler::CompileScript(
1428 Handle<String> source, Handle<Object> script_name, int line_offset,
1429 int column_offset, ScriptOriginOptions resource_options,
1430 Handle<Object> source_map_url, Handle<Context> context,
1431 v8::Extension* extension, ScriptData** cached_data,
1432 ScriptCompiler::CompileOptions compile_options, NativesFlag natives,
1433 bool is_module) {
1434 Isolate* isolate = source->GetIsolate();
1435 if (compile_options == ScriptCompiler::kNoCompileOptions) {
1436 cached_data = NULL;
1437 } else if (compile_options == ScriptCompiler::kProduceParserCache ||
1438 compile_options == ScriptCompiler::kProduceCodeCache) {
1439 DCHECK(cached_data && !*cached_data);
1440 DCHECK(extension == NULL);
1441 DCHECK(!isolate->debug()->is_loaded());
1442 } else {
1443 DCHECK(compile_options == ScriptCompiler::kConsumeParserCache ||
1444 compile_options == ScriptCompiler::kConsumeCodeCache);
1445 DCHECK(cached_data && *cached_data);
1446 DCHECK(extension == NULL);
1447 }
1448 int source_length = source->length();
1449 isolate->counters()->total_load_size()->Increment(source_length);
1450 isolate->counters()->total_compile_size()->Increment(source_length);
1451
1452 // TODO(rossberg): The natives do not yet obey strong mode rules
1453 // (for example, some macros use '==').
1454 bool use_strong = FLAG_use_strong && !isolate->bootstrapper()->IsActive();
1455 LanguageMode language_mode =
1456 construct_language_mode(FLAG_use_strict, use_strong);
1457
1458 CompilationCache* compilation_cache = isolate->compilation_cache();
1459
1460 // Do a lookup in the compilation cache but not for extensions.
1461 MaybeHandle<SharedFunctionInfo> maybe_result;
1462 Handle<SharedFunctionInfo> result;
1463 if (extension == NULL) {
1464 // First check per-isolate compilation cache.
1465 maybe_result = compilation_cache->LookupScript(
1466 source, script_name, line_offset, column_offset, resource_options,
1467 context, language_mode);
1468 if (maybe_result.is_null() && FLAG_serialize_toplevel &&
1469 compile_options == ScriptCompiler::kConsumeCodeCache &&
1470 !isolate->debug()->is_loaded()) {
1471 // Then check cached code provided by embedder.
1472 HistogramTimerScope timer(isolate->counters()->compile_deserialize());
1473 Handle<SharedFunctionInfo> result;
1474 if (CodeSerializer::Deserialize(isolate, *cached_data, source)
1475 .ToHandle(&result)) {
1476 // Promote to per-isolate compilation cache.
1477 compilation_cache->PutScript(source, context, language_mode, result);
1478 return result;
1479 }
1480 // Deserializer failed. Fall through to compile.
1481 }
1482 }
1483
1484 base::ElapsedTimer timer;
1485 if (FLAG_profile_deserialization && FLAG_serialize_toplevel &&
1486 compile_options == ScriptCompiler::kProduceCodeCache) {
1487 timer.Start();
1488 }
1489
1490 if (!maybe_result.ToHandle(&result)) {
1491 // No cache entry found. Compile the script.
1492
1493 // Create a script object describing the script to be compiled.
1494 Handle<Script> script = isolate->factory()->NewScript(source);
1495 if (natives == NATIVES_CODE) {
1496 script->set_type(Script::TYPE_NATIVE);
1497 script->set_hide_source(true);
1498 }
1499 if (!script_name.is_null()) {
1500 script->set_name(*script_name);
1501 script->set_line_offset(line_offset);
1502 script->set_column_offset(column_offset);
1503 }
1504 script->set_origin_options(resource_options);
1505 if (!source_map_url.is_null()) {
1506 script->set_source_mapping_url(*source_map_url);
1507 }
1508
1509 // Compile the function and add it to the cache.
1510 Zone zone;
1511 ParseInfo parse_info(&zone, script);
1512 CompilationInfo info(&parse_info);
1513 if (FLAG_harmony_modules && is_module) {
1514 parse_info.set_module();
1515 } else {
1516 parse_info.set_global();
1517 }
1518 if (compile_options != ScriptCompiler::kNoCompileOptions) {
1519 parse_info.set_cached_data(cached_data);
1520 }
1521 parse_info.set_compile_options(compile_options);
1522 parse_info.set_extension(extension);
1523 parse_info.set_context(context);
1524 if (FLAG_serialize_toplevel &&
1525 compile_options == ScriptCompiler::kProduceCodeCache) {
1526 info.PrepareForSerializing();
1527 }
1528
1529 parse_info.set_language_mode(
1530 static_cast<LanguageMode>(info.language_mode() | language_mode));
1531 result = CompileToplevel(&info);
1532 if (extension == NULL && !result.is_null()) {
1533 compilation_cache->PutScript(source, context, language_mode, result);
1534 if (FLAG_serialize_toplevel &&
1535 compile_options == ScriptCompiler::kProduceCodeCache) {
1536 HistogramTimerScope histogram_timer(
1537 isolate->counters()->compile_serialize());
1538 *cached_data = CodeSerializer::Serialize(isolate, result, source);
1539 if (FLAG_profile_deserialization) {
1540 PrintF("[Compiling and serializing took %0.3f ms]\n",
1541 timer.Elapsed().InMillisecondsF());
1542 }
1543 }
1544 }
1545
1546 if (result.is_null()) {
1547 isolate->ReportPendingMessages();
1548 } else {
1549 isolate->debug()->OnAfterCompile(script);
1550 }
1551 } else if (result->ic_age() != isolate->heap()->global_ic_age()) {
1552 result->ResetForNewContext(isolate->heap()->global_ic_age());
1553 }
1554 return result;
1555 }
1556
1557
CompileStreamedScript(Handle<Script> script,ParseInfo * parse_info,int source_length)1558 Handle<SharedFunctionInfo> Compiler::CompileStreamedScript(
1559 Handle<Script> script, ParseInfo* parse_info, int source_length) {
1560 Isolate* isolate = script->GetIsolate();
1561 // TODO(titzer): increment the counters in caller.
1562 isolate->counters()->total_load_size()->Increment(source_length);
1563 isolate->counters()->total_compile_size()->Increment(source_length);
1564
1565 LanguageMode language_mode =
1566 construct_language_mode(FLAG_use_strict, FLAG_use_strong);
1567 parse_info->set_language_mode(
1568 static_cast<LanguageMode>(parse_info->language_mode() | language_mode));
1569
1570 CompilationInfo compile_info(parse_info);
1571
1572 // The source was parsed lazily, so compiling for debugging is not possible.
1573 DCHECK(!compile_info.is_debug());
1574
1575 Handle<SharedFunctionInfo> result = CompileToplevel(&compile_info);
1576 if (!result.is_null()) isolate->debug()->OnAfterCompile(script);
1577 return result;
1578 }
1579
1580
GetSharedFunctionInfo(FunctionLiteral * literal,Handle<Script> script,CompilationInfo * outer_info)1581 Handle<SharedFunctionInfo> Compiler::GetSharedFunctionInfo(
1582 FunctionLiteral* literal, Handle<Script> script,
1583 CompilationInfo* outer_info) {
1584 // Precondition: code has been parsed and scopes have been analyzed.
1585 Isolate* isolate = outer_info->isolate();
1586 MaybeHandle<SharedFunctionInfo> maybe_existing;
1587 if (outer_info->is_first_compile()) {
1588 // On the first compile, there are no existing shared function info for
1589 // inner functions yet, so do not try to find them. All bets are off for
1590 // live edit though.
1591 DCHECK(script->FindSharedFunctionInfo(literal).is_null() ||
1592 isolate->debug()->live_edit_enabled());
1593 } else {
1594 maybe_existing = script->FindSharedFunctionInfo(literal);
1595 }
1596 // We found an existing shared function info. If it's already compiled,
1597 // don't worry about compiling it, and simply return it. If it's not yet
1598 // compiled, continue to decide whether to eagerly compile.
1599 // Carry on if we are compiling eager to obtain code for debugging,
1600 // unless we already have code with debut break slots.
1601 Handle<SharedFunctionInfo> existing;
1602 if (maybe_existing.ToHandle(&existing) && existing->is_compiled()) {
1603 if (!outer_info->is_debug() || existing->HasDebugCode()) {
1604 return existing;
1605 }
1606 }
1607
1608 Zone zone;
1609 ParseInfo parse_info(&zone, script);
1610 CompilationInfo info(&parse_info);
1611 parse_info.set_literal(literal);
1612 parse_info.set_scope(literal->scope());
1613 parse_info.set_language_mode(literal->scope()->language_mode());
1614 if (outer_info->will_serialize()) info.PrepareForSerializing();
1615 if (outer_info->is_first_compile()) info.MarkAsFirstCompile();
1616 if (outer_info->is_debug()) info.MarkAsDebug();
1617
1618 LiveEditFunctionTracker live_edit_tracker(isolate, literal);
1619 // Determine if the function can be lazily compiled. This is necessary to
1620 // allow some of our builtin JS files to be lazily compiled. These
1621 // builtins cannot be handled lazily by the parser, since we have to know
1622 // if a function uses the special natives syntax, which is something the
1623 // parser records.
1624 // If the debugger requests compilation for break points, we cannot be
1625 // aggressive about lazy compilation, because it might trigger compilation
1626 // of functions without an outer context when setting a breakpoint through
1627 // Debug::FindSharedFunctionInfoInScript.
1628 bool allow_lazy_without_ctx = literal->AllowsLazyCompilationWithoutContext();
1629 // Compile eagerly for live edit. When compiling debug code, eagerly compile
1630 // unless we can lazily compile without the context.
1631 bool allow_lazy = literal->AllowsLazyCompilation() &&
1632 !LiveEditFunctionTracker::IsActive(isolate) &&
1633 (!info.is_debug() || allow_lazy_without_ctx);
1634
1635 bool lazy = FLAG_lazy && allow_lazy && !literal->should_eager_compile();
1636
1637 // Generate code
1638 Handle<ScopeInfo> scope_info;
1639 if (lazy) {
1640 Handle<Code> code = isolate->builtins()->CompileLazy();
1641 info.SetCode(code);
1642 // There's no need in theory for a lazy-compiled function to have a type
1643 // feedback vector, but some parts of the system expect all
1644 // SharedFunctionInfo instances to have one. The size of the vector depends
1645 // on how many feedback-needing nodes are in the tree, and when lazily
1646 // parsing we might not know that, if this function was never parsed before.
1647 // In that case the vector will be replaced the next time MakeCode is
1648 // called.
1649 info.EnsureFeedbackVector();
1650 scope_info = Handle<ScopeInfo>(ScopeInfo::Empty(isolate));
1651 } else if (Renumber(info.parse_info()) && GenerateBaselineCode(&info)) {
1652 // Code generation will ensure that the feedback vector is present and
1653 // appropriately sized.
1654 DCHECK(!info.code().is_null());
1655 scope_info = ScopeInfo::Create(info.isolate(), info.zone(), info.scope());
1656 if (literal->should_eager_compile() &&
1657 literal->should_be_used_once_hint()) {
1658 info.code()->MarkToBeExecutedOnce(isolate);
1659 }
1660 } else {
1661 return Handle<SharedFunctionInfo>::null();
1662 }
1663
1664 if (maybe_existing.is_null()) {
1665 // Create a shared function info object.
1666 Handle<SharedFunctionInfo> result =
1667 isolate->factory()->NewSharedFunctionInfo(
1668 literal->name(), literal->materialized_literal_count(),
1669 literal->kind(), info.code(), scope_info, info.feedback_vector());
1670 if (info.has_bytecode_array()) {
1671 DCHECK(result->function_data()->IsUndefined());
1672 result->set_function_data(*info.bytecode_array());
1673 }
1674
1675 SharedFunctionInfo::InitFromFunctionLiteral(result, literal);
1676 SharedFunctionInfo::SetScript(result, script);
1677 result->set_is_toplevel(false);
1678 // If the outer function has been compiled before, we cannot be sure that
1679 // shared function info for this function literal has been created for the
1680 // first time. It may have already been compiled previously.
1681 result->set_never_compiled(outer_info->is_first_compile() && lazy);
1682
1683 RecordFunctionCompilation(Logger::FUNCTION_TAG, &info, result);
1684 result->set_allows_lazy_compilation(literal->AllowsLazyCompilation());
1685 result->set_allows_lazy_compilation_without_context(allow_lazy_without_ctx);
1686
1687 // Set the expected number of properties for instances and return
1688 // the resulting function.
1689 SetExpectedNofPropertiesFromEstimate(result,
1690 literal->expected_property_count());
1691 live_edit_tracker.RecordFunctionInfo(result, literal, info.zone());
1692 return result;
1693 } else if (!lazy) {
1694 // Assert that we are not overwriting (possibly patched) debug code.
1695 DCHECK(!existing->HasDebugCode());
1696 existing->ReplaceCode(*info.code());
1697 existing->set_scope_info(*scope_info);
1698 existing->set_feedback_vector(*info.feedback_vector());
1699 }
1700 return existing;
1701 }
1702
1703
GetOptimizedCode(Handle<JSFunction> function,Handle<Code> current_code,ConcurrencyMode mode,BailoutId osr_ast_id,JavaScriptFrame * osr_frame)1704 MaybeHandle<Code> Compiler::GetOptimizedCode(Handle<JSFunction> function,
1705 Handle<Code> current_code,
1706 ConcurrencyMode mode,
1707 BailoutId osr_ast_id,
1708 JavaScriptFrame* osr_frame) {
1709 Isolate* isolate = function->GetIsolate();
1710 Handle<SharedFunctionInfo> shared(function->shared(), isolate);
1711 if (shared->HasDebugInfo()) return MaybeHandle<Code>();
1712
1713 Handle<Code> cached_code;
1714 if (GetCodeFromOptimizedCodeMap(
1715 function, osr_ast_id).ToHandle(&cached_code)) {
1716 if (FLAG_trace_opt) {
1717 PrintF("[found optimized code for ");
1718 function->ShortPrint();
1719 if (!osr_ast_id.IsNone()) {
1720 PrintF(" at OSR AST id %d", osr_ast_id.ToInt());
1721 }
1722 PrintF("]\n");
1723 }
1724 return cached_code;
1725 }
1726
1727 DCHECK(AllowCompilation::IsAllowed(isolate));
1728
1729 if (!shared->is_compiled() ||
1730 shared->scope_info() == ScopeInfo::Empty(isolate)) {
1731 // The function was never compiled. Compile it unoptimized first.
1732 // TODO(titzer): reuse the AST and scope info from this compile.
1733 CompilationInfoWithZone unoptimized(function);
1734 unoptimized.EnableDeoptimizationSupport();
1735 if (!GetUnoptimizedCodeCommon(&unoptimized).ToHandle(¤t_code)) {
1736 return MaybeHandle<Code>();
1737 }
1738 shared->ReplaceCode(*current_code);
1739 }
1740
1741 current_code->set_profiler_ticks(0);
1742
1743 // TODO(mstarzinger): We cannot properly deserialize a scope chain containing
1744 // an eval scope and hence would fail at parsing the eval source again.
1745 if (shared->disable_optimization_reason() == kEval) {
1746 return MaybeHandle<Code>();
1747 }
1748
1749 // TODO(mstarzinger): We cannot properly deserialize a scope chain for the
1750 // builtin context, hence Genesis::InstallExperimentalNatives would fail.
1751 if (shared->is_toplevel() && isolate->bootstrapper()->IsActive()) {
1752 return MaybeHandle<Code>();
1753 }
1754
1755 base::SmartPointer<CompilationInfo> info(
1756 new CompilationInfoWithZone(function));
1757 VMState<COMPILER> state(isolate);
1758 DCHECK(!isolate->has_pending_exception());
1759 PostponeInterruptsScope postpone(isolate);
1760
1761 info->SetOptimizing(osr_ast_id, current_code);
1762
1763 if (mode == CONCURRENT) {
1764 if (GetOptimizedCodeLater(info.get())) {
1765 info.Detach(); // The background recompile job owns this now.
1766 return isolate->builtins()->InOptimizationQueue();
1767 }
1768 } else {
1769 info->set_osr_frame(osr_frame);
1770 if (GetOptimizedCodeNow(info.get())) return info->code();
1771 }
1772
1773 if (isolate->has_pending_exception()) isolate->clear_pending_exception();
1774 return MaybeHandle<Code>();
1775 }
1776
1777
GetConcurrentlyOptimizedCode(OptimizedCompileJob * job)1778 Handle<Code> Compiler::GetConcurrentlyOptimizedCode(OptimizedCompileJob* job) {
1779 // Take ownership of compilation info. Deleting compilation info
1780 // also tears down the zone and the recompile job.
1781 base::SmartPointer<CompilationInfo> info(job->info());
1782 Isolate* isolate = info->isolate();
1783
1784 VMState<COMPILER> state(isolate);
1785 TimerEventScope<TimerEventRecompileSynchronous> timer(info->isolate());
1786
1787 Handle<SharedFunctionInfo> shared = info->shared_info();
1788 shared->code()->set_profiler_ticks(0);
1789
1790 DCHECK(!shared->HasDebugInfo());
1791
1792 // 1) Optimization on the concurrent thread may have failed.
1793 // 2) The function may have already been optimized by OSR. Simply continue.
1794 // Except when OSR already disabled optimization for some reason.
1795 // 3) The code may have already been invalidated due to dependency change.
1796 // 4) Code generation may have failed.
1797 if (job->last_status() == OptimizedCompileJob::SUCCEEDED) {
1798 if (shared->optimization_disabled()) {
1799 job->RetryOptimization(kOptimizationDisabled);
1800 } else if (info->dependencies()->HasAborted()) {
1801 job->RetryOptimization(kBailedOutDueToDependencyChange);
1802 } else if (job->GenerateCode() == OptimizedCompileJob::SUCCEEDED) {
1803 RecordFunctionCompilation(Logger::LAZY_COMPILE_TAG, info.get(), shared);
1804 if (shared->SearchOptimizedCodeMap(info->context()->native_context(),
1805 info->osr_ast_id()).code == nullptr) {
1806 InsertCodeIntoOptimizedCodeMap(info.get());
1807 }
1808 if (FLAG_trace_opt) {
1809 PrintF("[completed optimizing ");
1810 info->closure()->ShortPrint();
1811 PrintF("]\n");
1812 }
1813 return Handle<Code>(*info->code());
1814 }
1815 }
1816
1817 DCHECK(job->last_status() != OptimizedCompileJob::SUCCEEDED);
1818 if (FLAG_trace_opt) {
1819 PrintF("[aborted optimizing ");
1820 info->closure()->ShortPrint();
1821 PrintF(" because: %s]\n", GetBailoutReason(info->bailout_reason()));
1822 }
1823 return Handle<Code>::null();
1824 }
1825
1826
CompilationPhase(const char * name,CompilationInfo * info)1827 CompilationPhase::CompilationPhase(const char* name, CompilationInfo* info)
1828 : name_(name), info_(info) {
1829 if (FLAG_hydrogen_stats) {
1830 info_zone_start_allocation_size_ = info->zone()->allocation_size();
1831 timer_.Start();
1832 }
1833 }
1834
1835
~CompilationPhase()1836 CompilationPhase::~CompilationPhase() {
1837 if (FLAG_hydrogen_stats) {
1838 size_t size = zone()->allocation_size();
1839 size += info_->zone()->allocation_size() - info_zone_start_allocation_size_;
1840 isolate()->GetHStatistics()->SaveTiming(name_, timer_.Elapsed(), size);
1841 }
1842 }
1843
1844
ShouldProduceTraceOutput() const1845 bool CompilationPhase::ShouldProduceTraceOutput() const {
1846 // Trace if the appropriate trace flag is set and the phase name's first
1847 // character is in the FLAG_trace_phase command line parameter.
1848 AllowHandleDereference allow_deref;
1849 bool tracing_on = info()->IsStub()
1850 ? FLAG_trace_hydrogen_stubs
1851 : (FLAG_trace_hydrogen &&
1852 info()->closure()->PassesFilter(FLAG_trace_hydrogen_filter));
1853 return (tracing_on &&
1854 base::OS::StrChr(const_cast<char*>(FLAG_trace_phase), name_[0]) != NULL);
1855 }
1856
1857 #if DEBUG
PrintAstForTesting()1858 void CompilationInfo::PrintAstForTesting() {
1859 PrintF("--- Source from AST ---\n%s\n",
1860 PrettyPrinter(isolate()).PrintProgram(literal()));
1861 }
1862 #endif
1863 } // namespace internal
1864 } // namespace v8
1865