1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/full-codegen/full-codegen.h"
6 
7 #include "src/ast/ast-numbering.h"
8 #include "src/ast/ast.h"
9 #include "src/ast/prettyprinter.h"
10 #include "src/ast/scopes.h"
11 #include "src/code-factory.h"
12 #include "src/codegen.h"
13 #include "src/compilation-info.h"
14 #include "src/compiler.h"
15 #include "src/debug/debug.h"
16 #include "src/debug/liveedit.h"
17 #include "src/frames-inl.h"
18 #include "src/globals.h"
19 #include "src/isolate-inl.h"
20 #include "src/macro-assembler.h"
21 #include "src/snapshot/snapshot.h"
22 #include "src/tracing/trace-event.h"
23 
24 namespace v8 {
25 namespace internal {
26 
27 #define __ ACCESS_MASM(masm())
28 
29 class FullCodegenCompilationJob final : public CompilationJob {
30  public:
FullCodegenCompilationJob(CompilationInfo * info)31   explicit FullCodegenCompilationJob(CompilationInfo* info)
32       : CompilationJob(info->isolate(), info, "Full-Codegen") {}
33 
can_execute_on_background_thread() const34   bool can_execute_on_background_thread() const override { return false; }
35 
PrepareJobImpl()36   CompilationJob::Status PrepareJobImpl() final { return SUCCEEDED; }
37 
ExecuteJobImpl()38   CompilationJob::Status ExecuteJobImpl() final {
39     DCHECK(ThreadId::Current().Equals(isolate()->thread_id()));
40     return FullCodeGenerator::MakeCode(info(), stack_limit()) ? SUCCEEDED
41                                                               : FAILED;
42   }
43 
FinalizeJobImpl()44   CompilationJob::Status FinalizeJobImpl() final { return SUCCEEDED; }
45 };
46 
FullCodeGenerator(MacroAssembler * masm,CompilationInfo * info,uintptr_t stack_limit)47 FullCodeGenerator::FullCodeGenerator(MacroAssembler* masm,
48                                      CompilationInfo* info,
49                                      uintptr_t stack_limit)
50     : masm_(masm),
51       info_(info),
52       isolate_(info->isolate()),
53       zone_(info->zone()),
54       scope_(info->scope()),
55       nesting_stack_(NULL),
56       loop_depth_(0),
57       operand_stack_depth_(0),
58       globals_(NULL),
59       context_(NULL),
60       bailout_entries_(info->HasDeoptimizationSupport()
61                            ? info->literal()->ast_node_count()
62                            : 0,
63                        info->zone()),
64       back_edges_(2, info->zone()),
65       handler_table_(info->zone()),
66       source_position_table_builder_(info->zone(),
67                                      info->SourcePositionRecordingMode()),
68       ic_total_count_(0) {
69   DCHECK(!info->IsStub());
70   Initialize(stack_limit);
71 }
72 
73 // static
NewCompilationJob(CompilationInfo * info)74 CompilationJob* FullCodeGenerator::NewCompilationJob(CompilationInfo* info) {
75   return new FullCodegenCompilationJob(info);
76 }
77 
78 // static
MakeCode(CompilationInfo * info)79 bool FullCodeGenerator::MakeCode(CompilationInfo* info) {
80   return MakeCode(info, info->isolate()->stack_guard()->real_climit());
81 }
82 
83 // static
MakeCode(CompilationInfo * info,uintptr_t stack_limit)84 bool FullCodeGenerator::MakeCode(CompilationInfo* info, uintptr_t stack_limit) {
85   Isolate* isolate = info->isolate();
86 
87   DCHECK(!FLAG_minimal);
88   RuntimeCallTimerScope runtimeTimer(isolate,
89                                      &RuntimeCallStats::CompileFullCode);
90   TimerEventScope<TimerEventCompileFullCode> timer(info->isolate());
91   TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.compile"), "V8.CompileFullCode");
92 
93   Handle<Script> script = info->script();
94   if (!script->IsUndefined(isolate) &&
95       !script->source()->IsUndefined(isolate)) {
96     int len = String::cast(script->source())->length();
97     isolate->counters()->total_full_codegen_source_size()->Increment(len);
98   }
99   CodeGenerator::MakeCodePrologue(info, "full");
100   const int kInitialBufferSize = 4 * KB;
101   MacroAssembler masm(info->isolate(), NULL, kInitialBufferSize,
102                       CodeObjectRequired::kYes);
103   if (info->will_serialize()) masm.enable_serializer();
104 
105   FullCodeGenerator cgen(&masm, info, stack_limit);
106   cgen.Generate();
107   if (cgen.HasStackOverflow()) {
108     DCHECK(!isolate->has_pending_exception());
109     return false;
110   }
111   unsigned table_offset = cgen.EmitBackEdgeTable();
112 
113   Handle<Code> code =
114       CodeGenerator::MakeCodeEpilogue(&masm, nullptr, info, masm.CodeObject());
115   cgen.PopulateDeoptimizationData(code);
116   cgen.PopulateTypeFeedbackInfo(code);
117   cgen.PopulateHandlerTable(code);
118   code->set_has_deoptimization_support(info->HasDeoptimizationSupport());
119   code->set_has_reloc_info_for_serialization(info->will_serialize());
120   code->set_allow_osr_at_loop_nesting_level(0);
121   code->set_profiler_ticks(0);
122   code->set_back_edge_table_offset(table_offset);
123   Handle<ByteArray> source_positions =
124       cgen.source_position_table_builder_.ToSourcePositionTable(
125           isolate, Handle<AbstractCode>::cast(code));
126   code->set_source_position_table(*source_positions);
127   CodeGenerator::PrintCode(code, info);
128   info->SetCode(code);
129 
130 #ifdef DEBUG
131   // Check that no context-specific object has been embedded.
132   code->VerifyEmbeddedObjects(Code::kNoContextSpecificPointers);
133 #endif  // DEBUG
134   return true;
135 }
136 
137 
EmitBackEdgeTable()138 unsigned FullCodeGenerator::EmitBackEdgeTable() {
139   // The back edge table consists of a length (in number of entries)
140   // field, and then a sequence of entries.  Each entry is a pair of AST id
141   // and code-relative pc offset.
142   masm()->Align(kPointerSize);
143   unsigned offset = masm()->pc_offset();
144   unsigned length = back_edges_.length();
145   __ dd(length);
146   for (unsigned i = 0; i < length; ++i) {
147     __ dd(back_edges_[i].id.ToInt());
148     __ dd(back_edges_[i].pc);
149     __ dd(back_edges_[i].loop_depth);
150   }
151   return offset;
152 }
153 
154 
PopulateDeoptimizationData(Handle<Code> code)155 void FullCodeGenerator::PopulateDeoptimizationData(Handle<Code> code) {
156   // Fill in the deoptimization information.
157   DCHECK(info_->HasDeoptimizationSupport() || bailout_entries_.is_empty());
158   if (!info_->HasDeoptimizationSupport()) return;
159   int length = bailout_entries_.length();
160   Handle<DeoptimizationOutputData> data =
161       DeoptimizationOutputData::New(isolate(), length, TENURED);
162   for (int i = 0; i < length; i++) {
163     data->SetAstId(i, bailout_entries_[i].id);
164     data->SetPcAndState(i, Smi::FromInt(bailout_entries_[i].pc_and_state));
165   }
166   code->set_deoptimization_data(*data);
167 }
168 
169 
PopulateTypeFeedbackInfo(Handle<Code> code)170 void FullCodeGenerator::PopulateTypeFeedbackInfo(Handle<Code> code) {
171   Handle<TypeFeedbackInfo> info = isolate()->factory()->NewTypeFeedbackInfo();
172   info->set_ic_total_count(ic_total_count_);
173   DCHECK(!isolate()->heap()->InNewSpace(*info));
174   code->set_type_feedback_info(*info);
175 }
176 
177 
PopulateHandlerTable(Handle<Code> code)178 void FullCodeGenerator::PopulateHandlerTable(Handle<Code> code) {
179   int handler_table_size = static_cast<int>(handler_table_.size());
180   Handle<HandlerTable> table =
181       Handle<HandlerTable>::cast(isolate()->factory()->NewFixedArray(
182           HandlerTable::LengthForRange(handler_table_size), TENURED));
183   for (int i = 0; i < handler_table_size; ++i) {
184     table->SetRangeStart(i, handler_table_[i].range_start);
185     table->SetRangeEnd(i, handler_table_[i].range_end);
186     table->SetRangeHandler(i, handler_table_[i].handler_offset,
187                            handler_table_[i].catch_prediction);
188     table->SetRangeData(i, handler_table_[i].stack_depth);
189   }
190   code->set_handler_table(*table);
191 }
192 
193 
NewHandlerTableEntry()194 int FullCodeGenerator::NewHandlerTableEntry() {
195   int index = static_cast<int>(handler_table_.size());
196   HandlerTableEntry entry = {0, 0, 0, 0, HandlerTable::UNCAUGHT};
197   handler_table_.push_back(entry);
198   return index;
199 }
200 
201 
MustCreateObjectLiteralWithRuntime(ObjectLiteral * expr) const202 bool FullCodeGenerator::MustCreateObjectLiteralWithRuntime(
203     ObjectLiteral* expr) const {
204   return masm()->serializer_enabled() ||
205          !FastCloneShallowObjectStub::IsSupported(expr);
206 }
207 
208 
MustCreateArrayLiteralWithRuntime(ArrayLiteral * expr) const209 bool FullCodeGenerator::MustCreateArrayLiteralWithRuntime(
210     ArrayLiteral* expr) const {
211   return expr->depth() > 1 ||
212          expr->values()->length() > JSArray::kInitialMaxFastElementArray;
213 }
214 
Initialize(uintptr_t stack_limit)215 void FullCodeGenerator::Initialize(uintptr_t stack_limit) {
216   InitializeAstVisitor(stack_limit);
217   masm_->set_emit_debug_code(FLAG_debug_code);
218   masm_->set_predictable_code_size(true);
219 }
220 
PrepareForBailout(Expression * node,BailoutState state)221 void FullCodeGenerator::PrepareForBailout(Expression* node,
222                                           BailoutState state) {
223   PrepareForBailoutForId(node->id(), state);
224 }
225 
CallIC(Handle<Code> code,TypeFeedbackId ast_id)226 void FullCodeGenerator::CallIC(Handle<Code> code, TypeFeedbackId ast_id) {
227   ic_total_count_++;
228   __ Call(code, RelocInfo::CODE_TARGET, ast_id);
229 }
230 
CallLoadIC(FeedbackVectorSlot slot,Handle<Object> name)231 void FullCodeGenerator::CallLoadIC(FeedbackVectorSlot slot,
232                                    Handle<Object> name) {
233   DCHECK(name->IsName());
234   __ Move(LoadDescriptor::NameRegister(), name);
235 
236   EmitLoadSlot(LoadDescriptor::SlotRegister(), slot);
237 
238   Handle<Code> code = CodeFactory::LoadIC(isolate()).code();
239   __ Call(code, RelocInfo::CODE_TARGET);
240   if (FLAG_tf_load_ic_stub) RestoreContext();
241 }
242 
CallStoreIC(FeedbackVectorSlot slot,Handle<Object> name)243 void FullCodeGenerator::CallStoreIC(FeedbackVectorSlot slot,
244                                     Handle<Object> name) {
245   DCHECK(name->IsName());
246   __ Move(StoreDescriptor::NameRegister(), name);
247 
248   STATIC_ASSERT(!StoreDescriptor::kPassLastArgsOnStack ||
249                 StoreDescriptor::kStackArgumentsCount == 2);
250   if (StoreDescriptor::kPassLastArgsOnStack) {
251     __ Push(StoreDescriptor::ValueRegister());
252     EmitPushSlot(slot);
253   } else {
254     EmitLoadSlot(StoreDescriptor::SlotRegister(), slot);
255   }
256 
257   Handle<Code> code = CodeFactory::StoreIC(isolate(), language_mode()).code();
258   __ Call(code, RelocInfo::CODE_TARGET);
259   RestoreContext();
260 }
261 
CallKeyedStoreIC(FeedbackVectorSlot slot)262 void FullCodeGenerator::CallKeyedStoreIC(FeedbackVectorSlot slot) {
263   STATIC_ASSERT(!StoreDescriptor::kPassLastArgsOnStack ||
264                 StoreDescriptor::kStackArgumentsCount == 2);
265   if (StoreDescriptor::kPassLastArgsOnStack) {
266     __ Push(StoreDescriptor::ValueRegister());
267     EmitPushSlot(slot);
268   } else {
269     EmitLoadSlot(StoreDescriptor::SlotRegister(), slot);
270   }
271 
272   Handle<Code> code =
273       CodeFactory::KeyedStoreIC(isolate(), language_mode()).code();
274   __ Call(code, RelocInfo::CODE_TARGET);
275   RestoreContext();
276 }
277 
RecordJSReturnSite(Call * call)278 void FullCodeGenerator::RecordJSReturnSite(Call* call) {
279   // We record the offset of the function return so we can rebuild the frame
280   // if the function was inlined, i.e., this is the return address in the
281   // inlined function's frame.
282   //
283   // The bailout state is ignored.  We defensively set it to TOS_REGISTER, which
284   // is the real state of the unoptimized code at the return site.
285   PrepareForBailoutForId(call->ReturnId(), BailoutState::TOS_REGISTER);
286 #ifdef DEBUG
287   // In debug builds, mark the return so we can verify that this function
288   // was called.
289   DCHECK(!call->return_is_recorded_);
290   call->return_is_recorded_ = true;
291 #endif
292 }
293 
PrepareForBailoutForId(BailoutId id,BailoutState state)294 void FullCodeGenerator::PrepareForBailoutForId(BailoutId id,
295                                                BailoutState state) {
296   // There's no need to prepare this code for bailouts from already optimized
297   // code or code that can't be optimized.
298   if (!info_->HasDeoptimizationSupport()) return;
299   unsigned pc_and_state =
300       BailoutStateField::encode(state) | PcField::encode(masm_->pc_offset());
301   DCHECK(Smi::IsValid(pc_and_state));
302 #ifdef DEBUG
303   for (int i = 0; i < bailout_entries_.length(); ++i) {
304     DCHECK(bailout_entries_[i].id != id);
305   }
306 #endif
307   BailoutEntry entry = { id, pc_and_state };
308   bailout_entries_.Add(entry, zone());
309 }
310 
311 
RecordBackEdge(BailoutId ast_id)312 void FullCodeGenerator::RecordBackEdge(BailoutId ast_id) {
313   // The pc offset does not need to be encoded and packed together with a state.
314   DCHECK(masm_->pc_offset() > 0);
315   DCHECK(loop_depth() > 0);
316   uint8_t depth = Min(loop_depth(), AbstractCode::kMaxLoopNestingMarker);
317   BackEdgeEntry entry =
318       { ast_id, static_cast<unsigned>(masm_->pc_offset()), depth };
319   back_edges_.Add(entry, zone());
320 }
321 
322 
ShouldInlineSmiCase(Token::Value op)323 bool FullCodeGenerator::ShouldInlineSmiCase(Token::Value op) {
324   // Inline smi case inside loops, but not division and modulo which
325   // are too complicated and take up too much space.
326   if (op == Token::DIV ||op == Token::MOD) return false;
327   if (FLAG_always_inline_smi_code) return true;
328   return loop_depth_ > 0;
329 }
330 
331 
Plug(Variable * var) const332 void FullCodeGenerator::EffectContext::Plug(Variable* var) const {
333   DCHECK(var->IsStackAllocated() || var->IsContextSlot());
334 }
335 
336 
Plug(Variable * var) const337 void FullCodeGenerator::AccumulatorValueContext::Plug(Variable* var) const {
338   DCHECK(var->IsStackAllocated() || var->IsContextSlot());
339   codegen()->GetVar(result_register(), var);
340 }
341 
342 
Plug(Variable * var) const343 void FullCodeGenerator::TestContext::Plug(Variable* var) const {
344   DCHECK(var->IsStackAllocated() || var->IsContextSlot());
345   // For simplicity we always test the accumulator register.
346   codegen()->GetVar(result_register(), var);
347   codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
348   codegen()->DoTest(this);
349 }
350 
351 
Plug(Register reg) const352 void FullCodeGenerator::EffectContext::Plug(Register reg) const {
353 }
354 
355 
Plug(Register reg) const356 void FullCodeGenerator::AccumulatorValueContext::Plug(Register reg) const {
357   __ Move(result_register(), reg);
358 }
359 
360 
Plug(Register reg) const361 void FullCodeGenerator::StackValueContext::Plug(Register reg) const {
362   codegen()->PushOperand(reg);
363 }
364 
365 
Plug(Register reg) const366 void FullCodeGenerator::TestContext::Plug(Register reg) const {
367   // For simplicity we always test the accumulator register.
368   __ Move(result_register(), reg);
369   codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
370   codegen()->DoTest(this);
371 }
372 
373 
Plug(bool flag) const374 void FullCodeGenerator::EffectContext::Plug(bool flag) const {}
375 
DropAndPlug(int count,Register reg) const376 void FullCodeGenerator::EffectContext::DropAndPlug(int count,
377                                                    Register reg) const {
378   DCHECK(count > 0);
379   codegen()->DropOperands(count);
380 }
381 
DropAndPlug(int count,Register reg) const382 void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
383     int count, Register reg) const {
384   DCHECK(count > 0);
385   codegen()->DropOperands(count);
386   __ Move(result_register(), reg);
387 }
388 
DropAndPlug(int count,Register reg) const389 void FullCodeGenerator::TestContext::DropAndPlug(int count,
390                                                  Register reg) const {
391   DCHECK(count > 0);
392   // For simplicity we always test the accumulator register.
393   codegen()->DropOperands(count);
394   __ Move(result_register(), reg);
395   codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
396   codegen()->DoTest(this);
397 }
398 
PlugTOS() const399 void FullCodeGenerator::EffectContext::PlugTOS() const {
400   codegen()->DropOperands(1);
401 }
402 
403 
PlugTOS() const404 void FullCodeGenerator::AccumulatorValueContext::PlugTOS() const {
405   codegen()->PopOperand(result_register());
406 }
407 
408 
PlugTOS() const409 void FullCodeGenerator::StackValueContext::PlugTOS() const {
410 }
411 
412 
PlugTOS() const413 void FullCodeGenerator::TestContext::PlugTOS() const {
414   // For simplicity we always test the accumulator register.
415   codegen()->PopOperand(result_register());
416   codegen()->PrepareForBailoutBeforeSplit(condition(), false, NULL, NULL);
417   codegen()->DoTest(this);
418 }
419 
420 
PrepareTest(Label * materialize_true,Label * materialize_false,Label ** if_true,Label ** if_false,Label ** fall_through) const421 void FullCodeGenerator::EffectContext::PrepareTest(
422     Label* materialize_true,
423     Label* materialize_false,
424     Label** if_true,
425     Label** if_false,
426     Label** fall_through) const {
427   // In an effect context, the true and the false case branch to the
428   // same label.
429   *if_true = *if_false = *fall_through = materialize_true;
430 }
431 
432 
PrepareTest(Label * materialize_true,Label * materialize_false,Label ** if_true,Label ** if_false,Label ** fall_through) const433 void FullCodeGenerator::AccumulatorValueContext::PrepareTest(
434     Label* materialize_true,
435     Label* materialize_false,
436     Label** if_true,
437     Label** if_false,
438     Label** fall_through) const {
439   *if_true = *fall_through = materialize_true;
440   *if_false = materialize_false;
441 }
442 
443 
PrepareTest(Label * materialize_true,Label * materialize_false,Label ** if_true,Label ** if_false,Label ** fall_through) const444 void FullCodeGenerator::StackValueContext::PrepareTest(
445     Label* materialize_true,
446     Label* materialize_false,
447     Label** if_true,
448     Label** if_false,
449     Label** fall_through) const {
450   *if_true = *fall_through = materialize_true;
451   *if_false = materialize_false;
452 }
453 
454 
PrepareTest(Label * materialize_true,Label * materialize_false,Label ** if_true,Label ** if_false,Label ** fall_through) const455 void FullCodeGenerator::TestContext::PrepareTest(
456     Label* materialize_true,
457     Label* materialize_false,
458     Label** if_true,
459     Label** if_false,
460     Label** fall_through) const {
461   *if_true = true_label_;
462   *if_false = false_label_;
463   *fall_through = fall_through_;
464 }
465 
466 
DoTest(const TestContext * context)467 void FullCodeGenerator::DoTest(const TestContext* context) {
468   DoTest(context->condition(),
469          context->true_label(),
470          context->false_label(),
471          context->fall_through());
472 }
473 
VisitDeclarations(Declaration::List * declarations)474 void FullCodeGenerator::VisitDeclarations(Declaration::List* declarations) {
475   ZoneList<Handle<Object> >* saved_globals = globals_;
476   ZoneList<Handle<Object> > inner_globals(10, zone());
477   globals_ = &inner_globals;
478 
479   AstVisitor<FullCodeGenerator>::VisitDeclarations(declarations);
480 
481   if (!globals_->is_empty()) {
482     // Invoke the platform-dependent code generator to do the actual
483     // declaration of the global functions and variables.
484     Handle<FixedArray> array =
485        isolate()->factory()->NewFixedArray(globals_->length(), TENURED);
486     for (int i = 0; i < globals_->length(); ++i)
487       array->set(i, *globals_->at(i));
488     DeclareGlobals(array);
489   }
490 
491   globals_ = saved_globals;
492 }
493 
494 
VisitVariableProxy(VariableProxy * expr)495 void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
496   Comment cmnt(masm_, "[ VariableProxy");
497   EmitVariableLoad(expr);
498 }
499 
EmitGlobalVariableLoad(VariableProxy * proxy,TypeofMode typeof_mode)500 void FullCodeGenerator::EmitGlobalVariableLoad(VariableProxy* proxy,
501                                                TypeofMode typeof_mode) {
502 #ifdef DEBUG
503   Variable* var = proxy->var();
504   DCHECK(var->IsUnallocated() ||
505          (var->IsLookupSlot() && var->mode() == DYNAMIC_GLOBAL));
506 #endif
507   EmitLoadSlot(LoadGlobalDescriptor::SlotRegister(),
508                proxy->VariableFeedbackSlot());
509   Handle<Code> code = CodeFactory::LoadGlobalIC(isolate(), typeof_mode).code();
510   __ Call(code, RelocInfo::CODE_TARGET);
511 }
512 
VisitSloppyBlockFunctionStatement(SloppyBlockFunctionStatement * declaration)513 void FullCodeGenerator::VisitSloppyBlockFunctionStatement(
514     SloppyBlockFunctionStatement* declaration) {
515   Visit(declaration->statement());
516 }
517 
518 
DeclareGlobalsFlags()519 int FullCodeGenerator::DeclareGlobalsFlags() {
520   return info_->GetDeclareGlobalsFlags();
521 }
522 
PushOperand(Handle<Object> handle)523 void FullCodeGenerator::PushOperand(Handle<Object> handle) {
524   OperandStackDepthIncrement(1);
525   __ Push(handle);
526 }
527 
PushOperand(Smi * smi)528 void FullCodeGenerator::PushOperand(Smi* smi) {
529   OperandStackDepthIncrement(1);
530   __ Push(smi);
531 }
532 
PushOperand(Register reg)533 void FullCodeGenerator::PushOperand(Register reg) {
534   OperandStackDepthIncrement(1);
535   __ Push(reg);
536 }
537 
PopOperand(Register reg)538 void FullCodeGenerator::PopOperand(Register reg) {
539   OperandStackDepthDecrement(1);
540   __ Pop(reg);
541 }
542 
DropOperands(int count)543 void FullCodeGenerator::DropOperands(int count) {
544   OperandStackDepthDecrement(count);
545   __ Drop(count);
546 }
547 
CallRuntimeWithOperands(Runtime::FunctionId id)548 void FullCodeGenerator::CallRuntimeWithOperands(Runtime::FunctionId id) {
549   OperandStackDepthDecrement(Runtime::FunctionForId(id)->nargs);
550   __ CallRuntime(id);
551 }
552 
OperandStackDepthIncrement(int count)553 void FullCodeGenerator::OperandStackDepthIncrement(int count) {
554   DCHECK_IMPLIES(!HasStackOverflow(), operand_stack_depth_ >= 0);
555   DCHECK_GE(count, 0);
556   operand_stack_depth_ += count;
557 }
558 
OperandStackDepthDecrement(int count)559 void FullCodeGenerator::OperandStackDepthDecrement(int count) {
560   DCHECK_IMPLIES(!HasStackOverflow(), operand_stack_depth_ >= count);
561   DCHECK_GE(count, 0);
562   operand_stack_depth_ -= count;
563 }
564 
EmitSubString(CallRuntime * expr)565 void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
566   // Load the arguments on the stack and call the stub.
567   SubStringStub stub(isolate());
568   ZoneList<Expression*>* args = expr->arguments();
569   DCHECK(args->length() == 3);
570   VisitForStackValue(args->at(0));
571   VisitForStackValue(args->at(1));
572   VisitForStackValue(args->at(2));
573   __ CallStub(&stub);
574   RestoreContext();
575   OperandStackDepthDecrement(3);
576   context()->Plug(result_register());
577 }
578 
579 
EmitRegExpExec(CallRuntime * expr)580 void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
581   // Load the arguments on the stack and call the stub.
582   RegExpExecStub stub(isolate());
583   ZoneList<Expression*>* args = expr->arguments();
584   DCHECK(args->length() == 4);
585   VisitForStackValue(args->at(0));
586   VisitForStackValue(args->at(1));
587   VisitForStackValue(args->at(2));
588   VisitForStackValue(args->at(3));
589   __ CallStub(&stub);
590   OperandStackDepthDecrement(4);
591   context()->Plug(result_register());
592 }
593 
594 
EmitIntrinsicAsStubCall(CallRuntime * expr,const Callable & callable)595 void FullCodeGenerator::EmitIntrinsicAsStubCall(CallRuntime* expr,
596                                                 const Callable& callable) {
597   ZoneList<Expression*>* args = expr->arguments();
598   int param_count = callable.descriptor().GetRegisterParameterCount();
599   DCHECK_EQ(args->length(), param_count);
600 
601   if (param_count > 0) {
602     int last = param_count - 1;
603     // Put all but last arguments on stack.
604     for (int i = 0; i < last; i++) {
605       VisitForStackValue(args->at(i));
606     }
607     // The last argument goes to the accumulator.
608     VisitForAccumulatorValue(args->at(last));
609 
610     // Move the arguments to the registers, as required by the stub.
611     __ Move(callable.descriptor().GetRegisterParameter(last),
612             result_register());
613     for (int i = last; i-- > 0;) {
614       PopOperand(callable.descriptor().GetRegisterParameter(i));
615     }
616   }
617   __ Call(callable.code(), RelocInfo::CODE_TARGET);
618 
619   // Reload the context register after the call as i.e. TurboFan code stubs
620   // won't preserve the context register.
621   LoadFromFrameField(StandardFrameConstants::kContextOffset,
622                      context_register());
623   context()->Plug(result_register());
624 }
625 
EmitNewObject(CallRuntime * expr)626 void FullCodeGenerator::EmitNewObject(CallRuntime* expr) {
627   EmitIntrinsicAsStubCall(expr, CodeFactory::FastNewObject(isolate()));
628 }
629 
EmitNumberToString(CallRuntime * expr)630 void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
631   EmitIntrinsicAsStubCall(expr, CodeFactory::NumberToString(isolate()));
632 }
633 
634 
EmitToString(CallRuntime * expr)635 void FullCodeGenerator::EmitToString(CallRuntime* expr) {
636   EmitIntrinsicAsStubCall(expr, CodeFactory::ToString(isolate()));
637 }
638 
639 
EmitToLength(CallRuntime * expr)640 void FullCodeGenerator::EmitToLength(CallRuntime* expr) {
641   EmitIntrinsicAsStubCall(expr, CodeFactory::ToLength(isolate()));
642 }
643 
EmitToInteger(CallRuntime * expr)644 void FullCodeGenerator::EmitToInteger(CallRuntime* expr) {
645   EmitIntrinsicAsStubCall(expr, CodeFactory::ToInteger(isolate()));
646 }
647 
EmitToNumber(CallRuntime * expr)648 void FullCodeGenerator::EmitToNumber(CallRuntime* expr) {
649   EmitIntrinsicAsStubCall(expr, CodeFactory::ToNumber(isolate()));
650 }
651 
652 
EmitToObject(CallRuntime * expr)653 void FullCodeGenerator::EmitToObject(CallRuntime* expr) {
654   EmitIntrinsicAsStubCall(expr, CodeFactory::ToObject(isolate()));
655 }
656 
657 
EmitHasProperty()658 void FullCodeGenerator::EmitHasProperty() {
659   Callable callable = CodeFactory::HasProperty(isolate());
660   PopOperand(callable.descriptor().GetRegisterParameter(1));
661   PopOperand(callable.descriptor().GetRegisterParameter(0));
662   __ Call(callable.code(), RelocInfo::CODE_TARGET);
663   RestoreContext();
664 }
665 
RecordStatementPosition(int pos)666 void FullCodeGenerator::RecordStatementPosition(int pos) {
667   DCHECK_NE(kNoSourcePosition, pos);
668   source_position_table_builder_.AddPosition(masm_->pc_offset(),
669                                              SourcePosition(pos), true);
670 }
671 
RecordPosition(int pos)672 void FullCodeGenerator::RecordPosition(int pos) {
673   DCHECK_NE(kNoSourcePosition, pos);
674   source_position_table_builder_.AddPosition(masm_->pc_offset(),
675                                              SourcePosition(pos), false);
676 }
677 
678 
SetFunctionPosition(FunctionLiteral * fun)679 void FullCodeGenerator::SetFunctionPosition(FunctionLiteral* fun) {
680   RecordPosition(fun->start_position());
681 }
682 
683 
SetReturnPosition(FunctionLiteral * fun)684 void FullCodeGenerator::SetReturnPosition(FunctionLiteral* fun) {
685   // For default constructors, start position equals end position, and there
686   // is no source code besides the class literal.
687   RecordStatementPosition(fun->return_position());
688   if (info_->is_debug()) {
689     // Always emit a debug break slot before a return.
690     DebugCodegen::GenerateSlot(masm_, RelocInfo::DEBUG_BREAK_SLOT_AT_RETURN);
691   }
692 }
693 
694 
SetStatementPosition(Statement * stmt,FullCodeGenerator::InsertBreak insert_break)695 void FullCodeGenerator::SetStatementPosition(
696     Statement* stmt, FullCodeGenerator::InsertBreak insert_break) {
697   if (stmt->position() == kNoSourcePosition) return;
698   RecordStatementPosition(stmt->position());
699   if (insert_break == INSERT_BREAK && info_->is_debug() &&
700       !stmt->IsDebuggerStatement()) {
701     DebugCodegen::GenerateSlot(masm_, RelocInfo::DEBUG_BREAK_SLOT_AT_POSITION);
702   }
703 }
704 
SetExpressionPosition(Expression * expr)705 void FullCodeGenerator::SetExpressionPosition(Expression* expr) {
706   if (expr->position() == kNoSourcePosition) return;
707   RecordPosition(expr->position());
708 }
709 
710 
SetExpressionAsStatementPosition(Expression * expr)711 void FullCodeGenerator::SetExpressionAsStatementPosition(Expression* expr) {
712   if (expr->position() == kNoSourcePosition) return;
713   RecordStatementPosition(expr->position());
714   if (info_->is_debug()) {
715     DebugCodegen::GenerateSlot(masm_, RelocInfo::DEBUG_BREAK_SLOT_AT_POSITION);
716   }
717 }
718 
SetCallPosition(Expression * expr,TailCallMode tail_call_mode)719 void FullCodeGenerator::SetCallPosition(Expression* expr,
720                                         TailCallMode tail_call_mode) {
721   if (expr->position() == kNoSourcePosition) return;
722   RecordPosition(expr->position());
723   if (info_->is_debug()) {
724     RelocInfo::Mode mode = (tail_call_mode == TailCallMode::kAllow)
725                                ? RelocInfo::DEBUG_BREAK_SLOT_AT_TAIL_CALL
726                                : RelocInfo::DEBUG_BREAK_SLOT_AT_CALL;
727     // Always emit a debug break slot before a call.
728     DebugCodegen::GenerateSlot(masm_, mode);
729   }
730 }
731 
732 
VisitSuperPropertyReference(SuperPropertyReference * super)733 void FullCodeGenerator::VisitSuperPropertyReference(
734     SuperPropertyReference* super) {
735   __ CallRuntime(Runtime::kThrowUnsupportedSuperError);
736   // Even though this expression doesn't produce a value, we need to simulate
737   // plugging of the value context to ensure stack depth tracking is in sync.
738   if (context()->IsStackValue()) OperandStackDepthIncrement(1);
739 }
740 
741 
VisitSuperCallReference(SuperCallReference * super)742 void FullCodeGenerator::VisitSuperCallReference(SuperCallReference* super) {
743   // Handled by VisitCall
744   UNREACHABLE();
745 }
746 
747 
EmitDebugBreakInOptimizedCode(CallRuntime * expr)748 void FullCodeGenerator::EmitDebugBreakInOptimizedCode(CallRuntime* expr) {
749   context()->Plug(handle(Smi::kZero, isolate()));
750 }
751 
752 
VisitBinaryOperation(BinaryOperation * expr)753 void FullCodeGenerator::VisitBinaryOperation(BinaryOperation* expr) {
754   switch (expr->op()) {
755     case Token::COMMA:
756       return VisitComma(expr);
757     case Token::OR:
758     case Token::AND:
759       return VisitLogicalExpression(expr);
760     default:
761       return VisitArithmeticExpression(expr);
762   }
763 }
764 
765 
VisitInDuplicateContext(Expression * expr)766 void FullCodeGenerator::VisitInDuplicateContext(Expression* expr) {
767   if (context()->IsEffect()) {
768     VisitForEffect(expr);
769   } else if (context()->IsAccumulatorValue()) {
770     VisitForAccumulatorValue(expr);
771   } else if (context()->IsStackValue()) {
772     VisitForStackValue(expr);
773   } else if (context()->IsTest()) {
774     const TestContext* test = TestContext::cast(context());
775     VisitForControl(expr, test->true_label(), test->false_label(),
776                     test->fall_through());
777   }
778 }
779 
780 
VisitComma(BinaryOperation * expr)781 void FullCodeGenerator::VisitComma(BinaryOperation* expr) {
782   Comment cmnt(masm_, "[ Comma");
783   VisitForEffect(expr->left());
784   VisitInDuplicateContext(expr->right());
785 }
786 
787 
VisitLogicalExpression(BinaryOperation * expr)788 void FullCodeGenerator::VisitLogicalExpression(BinaryOperation* expr) {
789   bool is_logical_and = expr->op() == Token::AND;
790   Comment cmnt(masm_, is_logical_and ? "[ Logical AND" :  "[ Logical OR");
791   Expression* left = expr->left();
792   Expression* right = expr->right();
793   BailoutId right_id = expr->RightId();
794   Label done;
795 
796   if (context()->IsTest()) {
797     Label eval_right;
798     const TestContext* test = TestContext::cast(context());
799     if (is_logical_and) {
800       VisitForControl(left, &eval_right, test->false_label(), &eval_right);
801     } else {
802       VisitForControl(left, test->true_label(), &eval_right, &eval_right);
803     }
804     PrepareForBailoutForId(right_id, BailoutState::NO_REGISTERS);
805     __ bind(&eval_right);
806 
807   } else if (context()->IsAccumulatorValue()) {
808     VisitForAccumulatorValue(left);
809     // We want the value in the accumulator for the test, and on the stack in
810     // case we need it.
811     __ Push(result_register());
812     Label discard, restore;
813     if (is_logical_and) {
814       DoTest(left, &discard, &restore, &restore);
815     } else {
816       DoTest(left, &restore, &discard, &restore);
817     }
818     __ bind(&restore);
819     __ Pop(result_register());
820     __ jmp(&done);
821     __ bind(&discard);
822     __ Drop(1);
823     PrepareForBailoutForId(right_id, BailoutState::NO_REGISTERS);
824 
825   } else if (context()->IsStackValue()) {
826     VisitForAccumulatorValue(left);
827     // We want the value in the accumulator for the test, and on the stack in
828     // case we need it.
829     __ Push(result_register());
830     Label discard;
831     if (is_logical_and) {
832       DoTest(left, &discard, &done, &discard);
833     } else {
834       DoTest(left, &done, &discard, &discard);
835     }
836     __ bind(&discard);
837     __ Drop(1);
838     PrepareForBailoutForId(right_id, BailoutState::NO_REGISTERS);
839 
840   } else {
841     DCHECK(context()->IsEffect());
842     Label eval_right;
843     if (is_logical_and) {
844       VisitForControl(left, &eval_right, &done, &eval_right);
845     } else {
846       VisitForControl(left, &done, &eval_right, &eval_right);
847     }
848     PrepareForBailoutForId(right_id, BailoutState::NO_REGISTERS);
849     __ bind(&eval_right);
850   }
851 
852   VisitInDuplicateContext(right);
853   __ bind(&done);
854 }
855 
856 
VisitArithmeticExpression(BinaryOperation * expr)857 void FullCodeGenerator::VisitArithmeticExpression(BinaryOperation* expr) {
858   Token::Value op = expr->op();
859   Comment cmnt(masm_, "[ ArithmeticExpression");
860   Expression* left = expr->left();
861   Expression* right = expr->right();
862 
863   VisitForStackValue(left);
864   VisitForAccumulatorValue(right);
865 
866   SetExpressionPosition(expr);
867   if (ShouldInlineSmiCase(op)) {
868     EmitInlineSmiBinaryOp(expr, op, left, right);
869   } else {
870     EmitBinaryOp(expr, op);
871   }
872 }
873 
VisitProperty(Property * expr)874 void FullCodeGenerator::VisitProperty(Property* expr) {
875   Comment cmnt(masm_, "[ Property");
876   SetExpressionPosition(expr);
877 
878   Expression* key = expr->key();
879 
880   if (key->IsPropertyName()) {
881     if (!expr->IsSuperAccess()) {
882       VisitForAccumulatorValue(expr->obj());
883       __ Move(LoadDescriptor::ReceiverRegister(), result_register());
884       EmitNamedPropertyLoad(expr);
885     } else {
886       VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
887       VisitForStackValue(
888           expr->obj()->AsSuperPropertyReference()->home_object());
889       EmitNamedSuperPropertyLoad(expr);
890     }
891   } else {
892     if (!expr->IsSuperAccess()) {
893       VisitForStackValue(expr->obj());
894       VisitForAccumulatorValue(expr->key());
895       __ Move(LoadDescriptor::NameRegister(), result_register());
896       PopOperand(LoadDescriptor::ReceiverRegister());
897       EmitKeyedPropertyLoad(expr);
898     } else {
899       VisitForStackValue(expr->obj()->AsSuperPropertyReference()->this_var());
900       VisitForStackValue(
901           expr->obj()->AsSuperPropertyReference()->home_object());
902       VisitForStackValue(expr->key());
903       EmitKeyedSuperPropertyLoad(expr);
904     }
905   }
906   PrepareForBailoutForId(expr->LoadId(), BailoutState::TOS_REGISTER);
907   context()->Plug(result_register());
908 }
909 
VisitForTypeofValue(Expression * expr)910 void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
911   VariableProxy* proxy = expr->AsVariableProxy();
912   DCHECK(!context()->IsEffect());
913   DCHECK(!context()->IsTest());
914 
915   if (proxy != NULL &&
916       (proxy->var()->IsUnallocated() || proxy->var()->IsLookupSlot())) {
917     EmitVariableLoad(proxy, INSIDE_TYPEOF);
918     PrepareForBailout(proxy, BailoutState::TOS_REGISTER);
919   } else {
920     // This expression cannot throw a reference error at the top level.
921     VisitInDuplicateContext(expr);
922   }
923 }
924 
925 
VisitBlock(Block * stmt)926 void FullCodeGenerator::VisitBlock(Block* stmt) {
927   Comment cmnt(masm_, "[ Block");
928   NestedBlock nested_block(this, stmt);
929 
930   {
931     EnterBlockScopeIfNeeded block_scope_state(
932         this, stmt->scope(), stmt->EntryId(), stmt->DeclsId(), stmt->ExitId());
933     VisitStatements(stmt->statements());
934     __ bind(nested_block.break_label());
935   }
936 }
937 
938 
VisitDoExpression(DoExpression * expr)939 void FullCodeGenerator::VisitDoExpression(DoExpression* expr) {
940   Comment cmnt(masm_, "[ Do Expression");
941   SetExpressionPosition(expr);
942   VisitBlock(expr->block());
943   VisitInDuplicateContext(expr->result());
944 }
945 
946 
VisitExpressionStatement(ExpressionStatement * stmt)947 void FullCodeGenerator::VisitExpressionStatement(ExpressionStatement* stmt) {
948   Comment cmnt(masm_, "[ ExpressionStatement");
949   SetStatementPosition(stmt);
950   VisitForEffect(stmt->expression());
951 }
952 
953 
VisitEmptyStatement(EmptyStatement * stmt)954 void FullCodeGenerator::VisitEmptyStatement(EmptyStatement* stmt) {
955   Comment cmnt(masm_, "[ EmptyStatement");
956 }
957 
958 
VisitIfStatement(IfStatement * stmt)959 void FullCodeGenerator::VisitIfStatement(IfStatement* stmt) {
960   Comment cmnt(masm_, "[ IfStatement");
961   SetStatementPosition(stmt);
962   Label then_part, else_part, done;
963 
964   if (stmt->HasElseStatement()) {
965     VisitForControl(stmt->condition(), &then_part, &else_part, &then_part);
966     PrepareForBailoutForId(stmt->ThenId(), BailoutState::NO_REGISTERS);
967     __ bind(&then_part);
968     Visit(stmt->then_statement());
969     __ jmp(&done);
970 
971     PrepareForBailoutForId(stmt->ElseId(), BailoutState::NO_REGISTERS);
972     __ bind(&else_part);
973     Visit(stmt->else_statement());
974   } else {
975     VisitForControl(stmt->condition(), &then_part, &done, &then_part);
976     PrepareForBailoutForId(stmt->ThenId(), BailoutState::NO_REGISTERS);
977     __ bind(&then_part);
978     Visit(stmt->then_statement());
979 
980     PrepareForBailoutForId(stmt->ElseId(), BailoutState::NO_REGISTERS);
981   }
982   __ bind(&done);
983   PrepareForBailoutForId(stmt->IfId(), BailoutState::NO_REGISTERS);
984 }
985 
EmitContinue(Statement * target)986 void FullCodeGenerator::EmitContinue(Statement* target) {
987   NestedStatement* current = nesting_stack_;
988   int context_length = 0;
989   // When continuing, we clobber the unpredictable value in the accumulator
990   // with one that's safe for GC.  If we hit an exit from the try block of
991   // try...finally on our way out, we will unconditionally preserve the
992   // accumulator on the stack.
993   ClearAccumulator();
994   while (!current->IsContinueTarget(target)) {
995     if (HasStackOverflow()) return;
996     if (current->IsTryFinally()) {
997       Comment cmnt(masm(), "[ Deferred continue through finally");
998       current->Exit(&context_length);
999       DCHECK_EQ(-1, context_length);
1000       current->AsTryFinally()->deferred_commands()->RecordContinue(target);
1001       return;
1002     }
1003     current = current->Exit(&context_length);
1004   }
1005   int stack_depth = current->GetStackDepthAtTarget();
1006   int stack_drop = operand_stack_depth_ - stack_depth;
1007   DCHECK_GE(stack_drop, 0);
1008   __ Drop(stack_drop);
1009   if (context_length > 0) {
1010     while (context_length > 0) {
1011       LoadContextField(context_register(), Context::PREVIOUS_INDEX);
1012       --context_length;
1013     }
1014     StoreToFrameField(StandardFrameConstants::kContextOffset,
1015                       context_register());
1016   }
1017 
1018   __ jmp(current->AsIteration()->continue_label());
1019 }
1020 
VisitContinueStatement(ContinueStatement * stmt)1021 void FullCodeGenerator::VisitContinueStatement(ContinueStatement* stmt) {
1022   Comment cmnt(masm_, "[ ContinueStatement");
1023   SetStatementPosition(stmt);
1024   EmitContinue(stmt->target());
1025 }
1026 
EmitBreak(Statement * target)1027 void FullCodeGenerator::EmitBreak(Statement* target) {
1028   NestedStatement* current = nesting_stack_;
1029   int context_length = 0;
1030   // When breaking, we clobber the unpredictable value in the accumulator
1031   // with one that's safe for GC.  If we hit an exit from the try block of
1032   // try...finally on our way out, we will unconditionally preserve the
1033   // accumulator on the stack.
1034   ClearAccumulator();
1035   while (!current->IsBreakTarget(target)) {
1036     if (HasStackOverflow()) return;
1037     if (current->IsTryFinally()) {
1038       Comment cmnt(masm(), "[ Deferred break through finally");
1039       current->Exit(&context_length);
1040       DCHECK_EQ(-1, context_length);
1041       current->AsTryFinally()->deferred_commands()->RecordBreak(target);
1042       return;
1043     }
1044     current = current->Exit(&context_length);
1045   }
1046   int stack_depth = current->GetStackDepthAtTarget();
1047   int stack_drop = operand_stack_depth_ - stack_depth;
1048   DCHECK_GE(stack_drop, 0);
1049   __ Drop(stack_drop);
1050   if (context_length > 0) {
1051     while (context_length > 0) {
1052       LoadContextField(context_register(), Context::PREVIOUS_INDEX);
1053       --context_length;
1054     }
1055     StoreToFrameField(StandardFrameConstants::kContextOffset,
1056                       context_register());
1057   }
1058 
1059   __ jmp(current->AsBreakable()->break_label());
1060 }
1061 
VisitBreakStatement(BreakStatement * stmt)1062 void FullCodeGenerator::VisitBreakStatement(BreakStatement* stmt) {
1063   Comment cmnt(masm_, "[ BreakStatement");
1064   SetStatementPosition(stmt);
1065   EmitBreak(stmt->target());
1066 }
1067 
EmitUnwindAndReturn()1068 void FullCodeGenerator::EmitUnwindAndReturn() {
1069   NestedStatement* current = nesting_stack_;
1070   int context_length = 0;
1071   while (current != NULL) {
1072     if (HasStackOverflow()) return;
1073     if (current->IsTryFinally()) {
1074       Comment cmnt(masm(), "[ Deferred return through finally");
1075       current->Exit(&context_length);
1076       DCHECK_EQ(-1, context_length);
1077       current->AsTryFinally()->deferred_commands()->RecordReturn();
1078       return;
1079     }
1080     current = current->Exit(&context_length);
1081   }
1082   EmitReturnSequence();
1083 }
1084 
EmitNewClosure(Handle<SharedFunctionInfo> info,bool pretenure)1085 void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
1086                                        bool pretenure) {
1087   // If we're running with the --always-opt or the --prepare-always-opt
1088   // flag, we need to use the runtime function so that the new function
1089   // we are creating here gets a chance to have its code optimized and
1090   // doesn't just get a copy of the existing unoptimized code.
1091   if (!FLAG_always_opt && !FLAG_prepare_always_opt && !pretenure &&
1092       scope()->is_function_scope()) {
1093     FastNewClosureStub stub(isolate());
1094     __ Move(stub.GetCallInterfaceDescriptor().GetRegisterParameter(0), info);
1095     __ CallStub(&stub);
1096   } else {
1097     __ Push(info);
1098     __ CallRuntime(pretenure ? Runtime::kNewClosure_Tenured
1099                              : Runtime::kNewClosure);
1100   }
1101   context()->Plug(result_register());
1102 }
1103 
EmitNamedPropertyLoad(Property * prop)1104 void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
1105   SetExpressionPosition(prop);
1106   Literal* key = prop->key()->AsLiteral();
1107   DCHECK(!key->value()->IsSmi());
1108   DCHECK(!prop->IsSuperAccess());
1109 
1110   CallLoadIC(prop->PropertyFeedbackSlot(), key->value());
1111 }
1112 
EmitNamedSuperPropertyLoad(Property * prop)1113 void FullCodeGenerator::EmitNamedSuperPropertyLoad(Property* prop) {
1114   // Stack: receiver, home_object
1115   SetExpressionPosition(prop);
1116   Literal* key = prop->key()->AsLiteral();
1117   DCHECK(!key->value()->IsSmi());
1118   DCHECK(prop->IsSuperAccess());
1119 
1120   PushOperand(key->value());
1121   CallRuntimeWithOperands(Runtime::kLoadFromSuper);
1122 }
1123 
EmitKeyedPropertyLoad(Property * prop)1124 void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
1125   SetExpressionPosition(prop);
1126 
1127   EmitLoadSlot(LoadDescriptor::SlotRegister(), prop->PropertyFeedbackSlot());
1128 
1129   Handle<Code> code = CodeFactory::KeyedLoadIC(isolate()).code();
1130   __ Call(code, RelocInfo::CODE_TARGET);
1131   RestoreContext();
1132 }
1133 
EmitKeyedSuperPropertyLoad(Property * prop)1134 void FullCodeGenerator::EmitKeyedSuperPropertyLoad(Property* prop) {
1135   // Stack: receiver, home_object, key.
1136   SetExpressionPosition(prop);
1137   CallRuntimeWithOperands(Runtime::kLoadKeyedFromSuper);
1138 }
1139 
EmitPropertyKey(LiteralProperty * property,BailoutId bailout_id)1140 void FullCodeGenerator::EmitPropertyKey(LiteralProperty* property,
1141                                         BailoutId bailout_id) {
1142   VisitForStackValue(property->key());
1143   CallRuntimeWithOperands(Runtime::kToName);
1144   PrepareForBailoutForId(bailout_id, BailoutState::TOS_REGISTER);
1145   PushOperand(result_register());
1146 }
1147 
EmitLoadSlot(Register destination,FeedbackVectorSlot slot)1148 void FullCodeGenerator::EmitLoadSlot(Register destination,
1149                                      FeedbackVectorSlot slot) {
1150   DCHECK(!slot.IsInvalid());
1151   __ Move(destination, SmiFromSlot(slot));
1152 }
1153 
EmitPushSlot(FeedbackVectorSlot slot)1154 void FullCodeGenerator::EmitPushSlot(FeedbackVectorSlot slot) {
1155   __ Push(SmiFromSlot(slot));
1156 }
1157 
VisitReturnStatement(ReturnStatement * stmt)1158 void FullCodeGenerator::VisitReturnStatement(ReturnStatement* stmt) {
1159   Comment cmnt(masm_, "[ ReturnStatement");
1160   SetStatementPosition(stmt);
1161   Expression* expr = stmt->expression();
1162   VisitForAccumulatorValue(expr);
1163   EmitUnwindAndReturn();
1164 }
1165 
1166 
VisitWithStatement(WithStatement * stmt)1167 void FullCodeGenerator::VisitWithStatement(WithStatement* stmt) {
1168   Comment cmnt(masm_, "[ WithStatement");
1169   SetStatementPosition(stmt);
1170 
1171   VisitForAccumulatorValue(stmt->expression());
1172   Callable callable = CodeFactory::ToObject(isolate());
1173   __ Move(callable.descriptor().GetRegisterParameter(0), result_register());
1174   __ Call(callable.code(), RelocInfo::CODE_TARGET);
1175   RestoreContext();
1176   PrepareForBailoutForId(stmt->ToObjectId(), BailoutState::TOS_REGISTER);
1177   PushOperand(result_register());
1178   PushOperand(stmt->scope()->scope_info());
1179   PushFunctionArgumentForContextAllocation();
1180   CallRuntimeWithOperands(Runtime::kPushWithContext);
1181   StoreToFrameField(StandardFrameConstants::kContextOffset, context_register());
1182   PrepareForBailoutForId(stmt->EntryId(), BailoutState::NO_REGISTERS);
1183 
1184   Scope* saved_scope = scope();
1185   scope_ = stmt->scope();
1186   { WithOrCatch body(this);
1187     Visit(stmt->statement());
1188   }
1189   scope_ = saved_scope;
1190 
1191   // Pop context.
1192   LoadContextField(context_register(), Context::PREVIOUS_INDEX);
1193   // Update local stack frame context field.
1194   StoreToFrameField(StandardFrameConstants::kContextOffset, context_register());
1195 }
1196 
1197 
VisitDoWhileStatement(DoWhileStatement * stmt)1198 void FullCodeGenerator::VisitDoWhileStatement(DoWhileStatement* stmt) {
1199   Comment cmnt(masm_, "[ DoWhileStatement");
1200   // Do not insert break location as we do that below.
1201   SetStatementPosition(stmt, SKIP_BREAK);
1202 
1203   Label body, book_keeping;
1204 
1205   Iteration loop_statement(this, stmt);
1206   increment_loop_depth();
1207 
1208   __ bind(&body);
1209   Visit(stmt->body());
1210 
1211   // Record the position of the do while condition and make sure it is
1212   // possible to break on the condition.
1213   __ bind(loop_statement.continue_label());
1214   PrepareForBailoutForId(stmt->ContinueId(), BailoutState::NO_REGISTERS);
1215 
1216   // Here is the actual 'while' keyword.
1217   SetExpressionAsStatementPosition(stmt->cond());
1218   VisitForControl(stmt->cond(),
1219                   &book_keeping,
1220                   loop_statement.break_label(),
1221                   &book_keeping);
1222 
1223   // Check stack before looping.
1224   PrepareForBailoutForId(stmt->BackEdgeId(), BailoutState::NO_REGISTERS);
1225   __ bind(&book_keeping);
1226   EmitBackEdgeBookkeeping(stmt, &body);
1227   __ jmp(&body);
1228 
1229   PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS);
1230   __ bind(loop_statement.break_label());
1231   decrement_loop_depth();
1232 }
1233 
1234 
VisitWhileStatement(WhileStatement * stmt)1235 void FullCodeGenerator::VisitWhileStatement(WhileStatement* stmt) {
1236   Comment cmnt(masm_, "[ WhileStatement");
1237   Label loop, body;
1238 
1239   Iteration loop_statement(this, stmt);
1240   increment_loop_depth();
1241 
1242   __ bind(&loop);
1243 
1244   SetExpressionAsStatementPosition(stmt->cond());
1245   VisitForControl(stmt->cond(),
1246                   &body,
1247                   loop_statement.break_label(),
1248                   &body);
1249 
1250   PrepareForBailoutForId(stmt->BodyId(), BailoutState::NO_REGISTERS);
1251   __ bind(&body);
1252   Visit(stmt->body());
1253 
1254   __ bind(loop_statement.continue_label());
1255 
1256   // Check stack before looping.
1257   EmitBackEdgeBookkeeping(stmt, &loop);
1258   __ jmp(&loop);
1259 
1260   PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS);
1261   __ bind(loop_statement.break_label());
1262   decrement_loop_depth();
1263 }
1264 
1265 
VisitForStatement(ForStatement * stmt)1266 void FullCodeGenerator::VisitForStatement(ForStatement* stmt) {
1267   Comment cmnt(masm_, "[ ForStatement");
1268   // Do not insert break location as we do it below.
1269   SetStatementPosition(stmt, SKIP_BREAK);
1270 
1271   Label test, body;
1272 
1273   Iteration loop_statement(this, stmt);
1274 
1275   if (stmt->init() != NULL) {
1276     Visit(stmt->init());
1277   }
1278 
1279   increment_loop_depth();
1280   // Emit the test at the bottom of the loop (even if empty).
1281   __ jmp(&test);
1282 
1283   PrepareForBailoutForId(stmt->BodyId(), BailoutState::NO_REGISTERS);
1284   __ bind(&body);
1285   Visit(stmt->body());
1286 
1287   PrepareForBailoutForId(stmt->ContinueId(), BailoutState::NO_REGISTERS);
1288   __ bind(loop_statement.continue_label());
1289   if (stmt->next() != NULL) {
1290     SetStatementPosition(stmt->next());
1291     Visit(stmt->next());
1292   }
1293 
1294   // Check stack before looping.
1295   EmitBackEdgeBookkeeping(stmt, &body);
1296 
1297   __ bind(&test);
1298   if (stmt->cond() != NULL) {
1299     SetExpressionAsStatementPosition(stmt->cond());
1300     VisitForControl(stmt->cond(),
1301                     &body,
1302                     loop_statement.break_label(),
1303                     loop_statement.break_label());
1304   } else {
1305     __ jmp(&body);
1306   }
1307 
1308   PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS);
1309   __ bind(loop_statement.break_label());
1310   decrement_loop_depth();
1311 }
1312 
1313 
VisitForOfStatement(ForOfStatement * stmt)1314 void FullCodeGenerator::VisitForOfStatement(ForOfStatement* stmt) {
1315   Comment cmnt(masm_, "[ ForOfStatement");
1316 
1317   Iteration loop_statement(this, stmt);
1318   increment_loop_depth();
1319 
1320   // var iterator = iterable[Symbol.iterator]();
1321   SetExpressionAsStatementPosition(stmt->assign_iterator());
1322   VisitForEffect(stmt->assign_iterator());
1323 
1324   // Loop entry.
1325   __ bind(loop_statement.continue_label());
1326 
1327   // result = iterator.next()
1328   SetExpressionAsStatementPosition(stmt->next_result());
1329   VisitForEffect(stmt->next_result());
1330 
1331   // if (result.done) break;
1332   Label result_not_done;
1333   VisitForControl(stmt->result_done(), loop_statement.break_label(),
1334                   &result_not_done, &result_not_done);
1335   __ bind(&result_not_done);
1336 
1337   // each = result.value
1338   VisitForEffect(stmt->assign_each());
1339 
1340   // Generate code for the body of the loop.
1341   Visit(stmt->body());
1342 
1343   // Check stack before looping.
1344   PrepareForBailoutForId(stmt->BackEdgeId(), BailoutState::NO_REGISTERS);
1345   EmitBackEdgeBookkeeping(stmt, loop_statement.continue_label());
1346   __ jmp(loop_statement.continue_label());
1347 
1348   // Exit and decrement the loop depth.
1349   PrepareForBailoutForId(stmt->ExitId(), BailoutState::NO_REGISTERS);
1350   __ bind(loop_statement.break_label());
1351   decrement_loop_depth();
1352 }
1353 
VisitThisFunction(ThisFunction * expr)1354 void FullCodeGenerator::VisitThisFunction(ThisFunction* expr) {
1355   LoadFromFrameField(JavaScriptFrameConstants::kFunctionOffset,
1356                      result_register());
1357   context()->Plug(result_register());
1358 }
1359 
VisitTryCatchStatement(TryCatchStatement * stmt)1360 void FullCodeGenerator::VisitTryCatchStatement(TryCatchStatement* stmt) {
1361   Comment cmnt(masm_, "[ TryCatchStatement");
1362   SetStatementPosition(stmt, SKIP_BREAK);
1363 
1364   // The try block adds a handler to the exception handler chain before
1365   // entering, and removes it again when exiting normally.  If an exception
1366   // is thrown during execution of the try block, the handler is consumed
1367   // and control is passed to the catch block with the exception in the
1368   // result register.
1369 
1370   Label try_entry, handler_entry, exit;
1371   __ jmp(&try_entry);
1372   __ bind(&handler_entry);
1373   if (stmt->clear_pending_message()) ClearPendingMessage();
1374 
1375   // Exception handler code, the exception is in the result register.
1376   // Extend the context before executing the catch block.
1377   { Comment cmnt(masm_, "[ Extend catch context");
1378     PushOperand(stmt->variable()->name());
1379     PushOperand(result_register());
1380     PushOperand(stmt->scope()->scope_info());
1381     PushFunctionArgumentForContextAllocation();
1382     CallRuntimeWithOperands(Runtime::kPushCatchContext);
1383     StoreToFrameField(StandardFrameConstants::kContextOffset,
1384                       context_register());
1385   }
1386 
1387   Scope* saved_scope = scope();
1388   scope_ = stmt->scope();
1389   DCHECK(scope_->declarations()->is_empty());
1390   { WithOrCatch catch_body(this);
1391     Visit(stmt->catch_block());
1392   }
1393   // Restore the context.
1394   LoadContextField(context_register(), Context::PREVIOUS_INDEX);
1395   StoreToFrameField(StandardFrameConstants::kContextOffset, context_register());
1396   scope_ = saved_scope;
1397   __ jmp(&exit);
1398 
1399   // Try block code. Sets up the exception handler chain.
1400   __ bind(&try_entry);
1401 
1402   int handler_index = NewHandlerTableEntry();
1403   EnterTryBlock(handler_index, &handler_entry, stmt->catch_prediction());
1404   {
1405     Comment cmnt_try(masm(), "[ Try block");
1406     Visit(stmt->try_block());
1407   }
1408   ExitTryBlock(handler_index);
1409   __ bind(&exit);
1410 }
1411 
1412 
VisitTryFinallyStatement(TryFinallyStatement * stmt)1413 void FullCodeGenerator::VisitTryFinallyStatement(TryFinallyStatement* stmt) {
1414   Comment cmnt(masm_, "[ TryFinallyStatement");
1415   SetStatementPosition(stmt, SKIP_BREAK);
1416 
1417   // Try finally is compiled by setting up a try-handler on the stack while
1418   // executing the try body, and removing it again afterwards.
1419   //
1420   // The try-finally construct can enter the finally block in three ways:
1421   // 1. By exiting the try-block normally. This exits the try block,
1422   //    pushes the continuation token and falls through to the finally
1423   //    block.
1424   // 2. By exiting the try-block with a function-local control flow transfer
1425   //    (break/continue/return). The site of the, e.g., break exits the
1426   //    try block, pushes the continuation token and jumps to the
1427   //    finally block. After the finally block executes, the execution
1428   //    continues based on the continuation token to a block that
1429   //    continues with the control flow transfer.
1430   // 3. By exiting the try-block with a thrown exception. In the handler,
1431   //    we push the exception and continuation token and jump to the
1432   //    finally block (which will again dispatch based on the token once
1433   //    it is finished).
1434 
1435   Label try_entry, handler_entry, finally_entry;
1436   DeferredCommands deferred(this, &finally_entry);
1437 
1438   // Jump to try-handler setup and try-block code.
1439   __ jmp(&try_entry);
1440   __ bind(&handler_entry);
1441 
1442   // Exception handler code.  This code is only executed when an exception
1443   // is thrown.  Record the continuation and jump to the finally block.
1444   {
1445     Comment cmnt_handler(masm(), "[ Finally handler");
1446     deferred.RecordThrow();
1447   }
1448 
1449   // Set up try handler.
1450   __ bind(&try_entry);
1451   int handler_index = NewHandlerTableEntry();
1452   EnterTryBlock(handler_index, &handler_entry, stmt->catch_prediction());
1453   {
1454     Comment cmnt_try(masm(), "[ Try block");
1455     TryFinally try_body(this, &deferred);
1456     Visit(stmt->try_block());
1457   }
1458   ExitTryBlock(handler_index);
1459   // Execute the finally block on the way out.  Clobber the unpredictable
1460   // value in the result register with one that's safe for GC because the
1461   // finally block will unconditionally preserve the result register on the
1462   // stack.
1463   ClearAccumulator();
1464   deferred.EmitFallThrough();
1465   // Fall through to the finally block.
1466 
1467   // Finally block implementation.
1468   __ bind(&finally_entry);
1469   {
1470     Comment cmnt_finally(masm(), "[ Finally block");
1471     OperandStackDepthIncrement(2);  // Token and accumulator are on stack.
1472     EnterFinallyBlock();
1473     Visit(stmt->finally_block());
1474     ExitFinallyBlock();
1475     OperandStackDepthDecrement(2);  // Token and accumulator were on stack.
1476   }
1477 
1478   {
1479     Comment cmnt_deferred(masm(), "[ Post-finally dispatch");
1480     deferred.EmitCommands();  // Return to the calling code.
1481   }
1482 }
1483 
1484 
VisitDebuggerStatement(DebuggerStatement * stmt)1485 void FullCodeGenerator::VisitDebuggerStatement(DebuggerStatement* stmt) {
1486   Comment cmnt(masm_, "[ DebuggerStatement");
1487   SetStatementPosition(stmt);
1488 
1489   __ DebugBreak();
1490   // Ignore the return value.
1491 
1492   PrepareForBailoutForId(stmt->DebugBreakId(), BailoutState::NO_REGISTERS);
1493 }
1494 
1495 
VisitCaseClause(CaseClause * clause)1496 void FullCodeGenerator::VisitCaseClause(CaseClause* clause) {
1497   UNREACHABLE();
1498 }
1499 
1500 
VisitConditional(Conditional * expr)1501 void FullCodeGenerator::VisitConditional(Conditional* expr) {
1502   Comment cmnt(masm_, "[ Conditional");
1503   Label true_case, false_case, done;
1504   VisitForControl(expr->condition(), &true_case, &false_case, &true_case);
1505 
1506   int original_stack_depth = operand_stack_depth_;
1507   PrepareForBailoutForId(expr->ThenId(), BailoutState::NO_REGISTERS);
1508   __ bind(&true_case);
1509   SetExpressionPosition(expr->then_expression());
1510   if (context()->IsTest()) {
1511     const TestContext* for_test = TestContext::cast(context());
1512     VisitForControl(expr->then_expression(),
1513                     for_test->true_label(),
1514                     for_test->false_label(),
1515                     NULL);
1516   } else {
1517     VisitInDuplicateContext(expr->then_expression());
1518     __ jmp(&done);
1519   }
1520 
1521   operand_stack_depth_ = original_stack_depth;
1522   PrepareForBailoutForId(expr->ElseId(), BailoutState::NO_REGISTERS);
1523   __ bind(&false_case);
1524   SetExpressionPosition(expr->else_expression());
1525   VisitInDuplicateContext(expr->else_expression());
1526   // If control flow falls through Visit, merge it with true case here.
1527   if (!context()->IsTest()) {
1528     __ bind(&done);
1529   }
1530 }
1531 
1532 
VisitLiteral(Literal * expr)1533 void FullCodeGenerator::VisitLiteral(Literal* expr) {
1534   Comment cmnt(masm_, "[ Literal");
1535   context()->Plug(expr->value());
1536 }
1537 
1538 
VisitFunctionLiteral(FunctionLiteral * expr)1539 void FullCodeGenerator::VisitFunctionLiteral(FunctionLiteral* expr) {
1540   Comment cmnt(masm_, "[ FunctionLiteral");
1541 
1542   // Build the function boilerplate and instantiate it.
1543   Handle<SharedFunctionInfo> function_info =
1544       Compiler::GetSharedFunctionInfo(expr, script(), info_);
1545   if (function_info.is_null()) {
1546     SetStackOverflow();
1547     return;
1548   }
1549   EmitNewClosure(function_info, expr->pretenure());
1550 }
1551 
1552 
VisitClassLiteral(ClassLiteral * lit)1553 void FullCodeGenerator::VisitClassLiteral(ClassLiteral* lit) {
1554   Comment cmnt(masm_, "[ ClassLiteral");
1555 
1556   if (lit->extends() != NULL) {
1557     VisitForStackValue(lit->extends());
1558   } else {
1559     PushOperand(isolate()->factory()->the_hole_value());
1560   }
1561 
1562   VisitForStackValue(lit->constructor());
1563 
1564   PushOperand(Smi::FromInt(lit->start_position()));
1565   PushOperand(Smi::FromInt(lit->end_position()));
1566 
1567   CallRuntimeWithOperands(Runtime::kDefineClass);
1568   PrepareForBailoutForId(lit->CreateLiteralId(), BailoutState::TOS_REGISTER);
1569   PushOperand(result_register());
1570 
1571   // Load the "prototype" from the constructor.
1572   __ Move(LoadDescriptor::ReceiverRegister(), result_register());
1573   CallLoadIC(lit->PrototypeSlot(), isolate()->factory()->prototype_string());
1574   PrepareForBailoutForId(lit->PrototypeId(), BailoutState::TOS_REGISTER);
1575   PushOperand(result_register());
1576 
1577   EmitClassDefineProperties(lit);
1578   DropOperands(1);
1579 
1580   // Set the constructor to have fast properties.
1581   CallRuntimeWithOperands(Runtime::kToFastProperties);
1582 
1583   if (lit->class_variable_proxy() != nullptr) {
1584     EmitVariableAssignment(lit->class_variable_proxy()->var(), Token::INIT,
1585                            lit->ProxySlot(), HoleCheckMode::kElided);
1586   }
1587 
1588   context()->Plug(result_register());
1589 }
1590 
VisitRegExpLiteral(RegExpLiteral * expr)1591 void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
1592   Comment cmnt(masm_, "[ RegExpLiteral");
1593   Callable callable = CodeFactory::FastCloneRegExp(isolate());
1594   CallInterfaceDescriptor descriptor = callable.descriptor();
1595   LoadFromFrameField(JavaScriptFrameConstants::kFunctionOffset,
1596                      descriptor.GetRegisterParameter(0));
1597   __ Move(descriptor.GetRegisterParameter(1),
1598           Smi::FromInt(expr->literal_index()));
1599   __ Move(descriptor.GetRegisterParameter(2), expr->pattern());
1600   __ Move(descriptor.GetRegisterParameter(3), Smi::FromInt(expr->flags()));
1601   __ Call(callable.code(), RelocInfo::CODE_TARGET);
1602 
1603   // Reload the context register after the call as i.e. TurboFan code stubs
1604   // won't preserve the context register.
1605   LoadFromFrameField(StandardFrameConstants::kContextOffset,
1606                      context_register());
1607   context()->Plug(result_register());
1608 }
1609 
VisitNativeFunctionLiteral(NativeFunctionLiteral * expr)1610 void FullCodeGenerator::VisitNativeFunctionLiteral(
1611     NativeFunctionLiteral* expr) {
1612   Comment cmnt(masm_, "[ NativeFunctionLiteral");
1613   Handle<SharedFunctionInfo> shared =
1614       Compiler::GetSharedFunctionInfoForNative(expr->extension(), expr->name());
1615   EmitNewClosure(shared, false);
1616 }
1617 
1618 
VisitThrow(Throw * expr)1619 void FullCodeGenerator::VisitThrow(Throw* expr) {
1620   Comment cmnt(masm_, "[ Throw");
1621   VisitForStackValue(expr->exception());
1622   SetExpressionPosition(expr);
1623   CallRuntimeWithOperands(Runtime::kThrow);
1624   // Never returns here.
1625 
1626   // Even though this expression doesn't produce a value, we need to simulate
1627   // plugging of the value context to ensure stack depth tracking is in sync.
1628   if (context()->IsStackValue()) OperandStackDepthIncrement(1);
1629 }
1630 
EnterTryBlock(int handler_index,Label * handler,HandlerTable::CatchPrediction catch_prediction)1631 void FullCodeGenerator::EnterTryBlock(
1632     int handler_index, Label* handler,
1633     HandlerTable::CatchPrediction catch_prediction) {
1634   HandlerTableEntry* entry = &handler_table_[handler_index];
1635   entry->range_start = masm()->pc_offset();
1636   entry->handler_offset = handler->pos();
1637   entry->stack_depth = operand_stack_depth_;
1638   entry->catch_prediction = catch_prediction;
1639 
1640   // We are using the operand stack depth, check for accuracy.
1641   EmitOperandStackDepthCheck();
1642 
1643   // Push context onto operand stack.
1644   STATIC_ASSERT(TryBlockConstant::kElementCount == 1);
1645   PushOperand(context_register());
1646 }
1647 
1648 
ExitTryBlock(int handler_index)1649 void FullCodeGenerator::ExitTryBlock(int handler_index) {
1650   HandlerTableEntry* entry = &handler_table_[handler_index];
1651   entry->range_end = masm()->pc_offset();
1652 
1653   // Drop context from operand stack.
1654   DropOperands(TryBlockConstant::kElementCount);
1655 }
1656 
1657 
VisitCall(Call * expr)1658 void FullCodeGenerator::VisitCall(Call* expr) {
1659 #ifdef DEBUG
1660   // We want to verify that RecordJSReturnSite gets called on all paths
1661   // through this function.  Avoid early returns.
1662   expr->return_is_recorded_ = false;
1663 #endif
1664 
1665   Comment cmnt(masm_, (expr->tail_call_mode() == TailCallMode::kAllow)
1666                           ? "[ TailCall"
1667                           : "[ Call");
1668   Expression* callee = expr->expression();
1669   Call::CallType call_type = expr->GetCallType();
1670 
1671   if (expr->is_possibly_eval()) {
1672     EmitPossiblyEvalCall(expr);
1673   } else {
1674     switch (call_type) {
1675       case Call::GLOBAL_CALL:
1676         EmitCallWithLoadIC(expr);
1677         break;
1678       case Call::WITH_CALL:
1679         // Call to a lookup slot looked up through a with scope.
1680         PushCalleeAndWithBaseObject(expr);
1681         EmitCall(expr);
1682         break;
1683       case Call::NAMED_PROPERTY_CALL: {
1684         Property* property = callee->AsProperty();
1685         VisitForStackValue(property->obj());
1686         EmitCallWithLoadIC(expr);
1687         break;
1688       }
1689       case Call::KEYED_PROPERTY_CALL: {
1690         Property* property = callee->AsProperty();
1691         VisitForStackValue(property->obj());
1692         EmitKeyedCallWithLoadIC(expr, property->key());
1693         break;
1694       }
1695       case Call::NAMED_SUPER_PROPERTY_CALL:
1696         EmitSuperCallWithLoadIC(expr);
1697         break;
1698       case Call::KEYED_SUPER_PROPERTY_CALL:
1699         EmitKeyedSuperCallWithLoadIC(expr);
1700         break;
1701       case Call::SUPER_CALL:
1702         EmitSuperConstructorCall(expr);
1703         break;
1704       case Call::OTHER_CALL:
1705         // Call to an arbitrary expression not handled specially above.
1706         VisitForStackValue(callee);
1707         OperandStackDepthIncrement(1);
1708         __ PushRoot(Heap::kUndefinedValueRootIndex);
1709         // Emit function call.
1710         EmitCall(expr);
1711         break;
1712     }
1713   }
1714 
1715 #ifdef DEBUG
1716   // RecordJSReturnSite should have been called.
1717   DCHECK(expr->return_is_recorded_);
1718 #endif
1719 }
1720 
VisitCallRuntime(CallRuntime * expr)1721 void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
1722   ZoneList<Expression*>* args = expr->arguments();
1723   int arg_count = args->length();
1724 
1725   if (expr->is_jsruntime()) {
1726     Comment cmnt(masm_, "[ CallRuntime");
1727     EmitLoadJSRuntimeFunction(expr);
1728 
1729     // Push the arguments ("left-to-right").
1730     for (int i = 0; i < arg_count; i++) {
1731       VisitForStackValue(args->at(i));
1732     }
1733 
1734     PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS);
1735     EmitCallJSRuntimeFunction(expr);
1736     context()->DropAndPlug(1, result_register());
1737 
1738   } else {
1739     const Runtime::Function* function = expr->function();
1740     switch (function->function_id) {
1741 #define CALL_INTRINSIC_GENERATOR(Name)     \
1742   case Runtime::kInline##Name: {           \
1743     Comment cmnt(masm_, "[ Inline" #Name); \
1744     return Emit##Name(expr);               \
1745   }
1746       FOR_EACH_FULL_CODE_INTRINSIC(CALL_INTRINSIC_GENERATOR)
1747 #undef CALL_INTRINSIC_GENERATOR
1748       default: {
1749         Comment cmnt(masm_, "[ CallRuntime for unhandled intrinsic");
1750         // Push the arguments ("left-to-right").
1751         for (int i = 0; i < arg_count; i++) {
1752           VisitForStackValue(args->at(i));
1753         }
1754 
1755         // Call the C runtime function.
1756         PrepareForBailoutForId(expr->CallId(), BailoutState::NO_REGISTERS);
1757         __ CallRuntime(expr->function(), arg_count);
1758         OperandStackDepthDecrement(arg_count);
1759         context()->Plug(result_register());
1760       }
1761     }
1762   }
1763 }
1764 
VisitSpread(Spread * expr)1765 void FullCodeGenerator::VisitSpread(Spread* expr) { UNREACHABLE(); }
1766 
1767 
VisitEmptyParentheses(EmptyParentheses * expr)1768 void FullCodeGenerator::VisitEmptyParentheses(EmptyParentheses* expr) {
1769   UNREACHABLE();
1770 }
1771 
1772 
VisitRewritableExpression(RewritableExpression * expr)1773 void FullCodeGenerator::VisitRewritableExpression(RewritableExpression* expr) {
1774   Visit(expr->expression());
1775 }
1776 
Exit(int * context_length)1777 FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit(
1778     int* context_length) {
1779   // The macros used here must preserve the result register.
1780 
1781   // Calculate how many operands to drop to get down to handler block.
1782   int stack_drop = codegen_->operand_stack_depth_ - GetStackDepthAtTarget();
1783   DCHECK_GE(stack_drop, 0);
1784 
1785   // Because the handler block contains the context of the finally
1786   // code, we can restore it directly from there for the finally code
1787   // rather than iteratively unwinding contexts via their previous
1788   // links.
1789   if (*context_length > 0) {
1790     __ Drop(stack_drop);  // Down to the handler block.
1791     // Restore the context to its dedicated register and the stack.
1792     STATIC_ASSERT(TryBlockConstant::kElementCount == 1);
1793     __ Pop(codegen_->context_register());
1794     codegen_->StoreToFrameField(StandardFrameConstants::kContextOffset,
1795                                 codegen_->context_register());
1796   } else {
1797     // Down to the handler block and also drop context.
1798     __ Drop(stack_drop + TryBlockConstant::kElementCount);
1799   }
1800 
1801   // The caller will ignore outputs.
1802   *context_length = -1;
1803   return previous_;
1804 }
1805 
RecordBreak(Statement * target)1806 void FullCodeGenerator::DeferredCommands::RecordBreak(Statement* target) {
1807   TokenId token = dispenser_.GetBreakContinueToken();
1808   commands_.push_back({kBreak, token, target});
1809   EmitJumpToFinally(token);
1810 }
1811 
RecordContinue(Statement * target)1812 void FullCodeGenerator::DeferredCommands::RecordContinue(Statement* target) {
1813   TokenId token = dispenser_.GetBreakContinueToken();
1814   commands_.push_back({kContinue, token, target});
1815   EmitJumpToFinally(token);
1816 }
1817 
RecordReturn()1818 void FullCodeGenerator::DeferredCommands::RecordReturn() {
1819   if (return_token_ == TokenDispenserForFinally::kInvalidToken) {
1820     return_token_ = TokenDispenserForFinally::kReturnToken;
1821     commands_.push_back({kReturn, return_token_, nullptr});
1822   }
1823   EmitJumpToFinally(return_token_);
1824 }
1825 
RecordThrow()1826 void FullCodeGenerator::DeferredCommands::RecordThrow() {
1827   if (throw_token_ == TokenDispenserForFinally::kInvalidToken) {
1828     throw_token_ = TokenDispenserForFinally::kThrowToken;
1829     commands_.push_back({kThrow, throw_token_, nullptr});
1830   }
1831   EmitJumpToFinally(throw_token_);
1832 }
1833 
EmitFallThrough()1834 void FullCodeGenerator::DeferredCommands::EmitFallThrough() {
1835   __ Push(Smi::FromInt(TokenDispenserForFinally::kFallThroughToken));
1836   __ Push(result_register());
1837 }
1838 
EmitJumpToFinally(TokenId token)1839 void FullCodeGenerator::DeferredCommands::EmitJumpToFinally(TokenId token) {
1840   __ Push(Smi::FromInt(token));
1841   __ Push(result_register());
1842   __ jmp(finally_entry_);
1843 }
1844 
TryLiteralCompare(CompareOperation * expr)1845 bool FullCodeGenerator::TryLiteralCompare(CompareOperation* expr) {
1846   Expression* sub_expr;
1847   Handle<String> check;
1848   if (expr->IsLiteralCompareTypeof(&sub_expr, &check)) {
1849     SetExpressionPosition(expr);
1850     EmitLiteralCompareTypeof(expr, sub_expr, check);
1851     return true;
1852   }
1853 
1854   if (expr->IsLiteralCompareUndefined(&sub_expr)) {
1855     SetExpressionPosition(expr);
1856     EmitLiteralCompareNil(expr, sub_expr, kUndefinedValue);
1857     return true;
1858   }
1859 
1860   if (expr->IsLiteralCompareNull(&sub_expr)) {
1861     SetExpressionPosition(expr);
1862     EmitLiteralCompareNil(expr, sub_expr, kNullValue);
1863     return true;
1864   }
1865 
1866   return false;
1867 }
1868 
1869 
Patch(Isolate * isolate,Code * unoptimized)1870 void BackEdgeTable::Patch(Isolate* isolate, Code* unoptimized) {
1871   DisallowHeapAllocation no_gc;
1872   Code* patch = isolate->builtins()->builtin(Builtins::kOnStackReplacement);
1873 
1874   // Increment loop nesting level by one and iterate over the back edge table
1875   // to find the matching loops to patch the interrupt
1876   // call to an unconditional call to the replacement code.
1877   int loop_nesting_level = unoptimized->allow_osr_at_loop_nesting_level() + 1;
1878   if (loop_nesting_level > AbstractCode::kMaxLoopNestingMarker) return;
1879 
1880   BackEdgeTable back_edges(unoptimized, &no_gc);
1881   for (uint32_t i = 0; i < back_edges.length(); i++) {
1882     if (static_cast<int>(back_edges.loop_depth(i)) == loop_nesting_level) {
1883       DCHECK_EQ(INTERRUPT, GetBackEdgeState(isolate,
1884                                             unoptimized,
1885                                             back_edges.pc(i)));
1886       PatchAt(unoptimized, back_edges.pc(i), ON_STACK_REPLACEMENT, patch);
1887     }
1888   }
1889 
1890   unoptimized->set_allow_osr_at_loop_nesting_level(loop_nesting_level);
1891   DCHECK(Verify(isolate, unoptimized));
1892 }
1893 
1894 
Revert(Isolate * isolate,Code * unoptimized)1895 void BackEdgeTable::Revert(Isolate* isolate, Code* unoptimized) {
1896   DisallowHeapAllocation no_gc;
1897   Code* patch = isolate->builtins()->builtin(Builtins::kInterruptCheck);
1898 
1899   // Iterate over the back edge table and revert the patched interrupt calls.
1900   int loop_nesting_level = unoptimized->allow_osr_at_loop_nesting_level();
1901 
1902   BackEdgeTable back_edges(unoptimized, &no_gc);
1903   for (uint32_t i = 0; i < back_edges.length(); i++) {
1904     if (static_cast<int>(back_edges.loop_depth(i)) <= loop_nesting_level) {
1905       DCHECK_NE(INTERRUPT, GetBackEdgeState(isolate,
1906                                             unoptimized,
1907                                             back_edges.pc(i)));
1908       PatchAt(unoptimized, back_edges.pc(i), INTERRUPT, patch);
1909     }
1910   }
1911 
1912   unoptimized->set_allow_osr_at_loop_nesting_level(0);
1913   // Assert that none of the back edges are patched anymore.
1914   DCHECK(Verify(isolate, unoptimized));
1915 }
1916 
1917 
1918 #ifdef DEBUG
Verify(Isolate * isolate,Code * unoptimized)1919 bool BackEdgeTable::Verify(Isolate* isolate, Code* unoptimized) {
1920   DisallowHeapAllocation no_gc;
1921   int loop_nesting_level = unoptimized->allow_osr_at_loop_nesting_level();
1922   BackEdgeTable back_edges(unoptimized, &no_gc);
1923   for (uint32_t i = 0; i < back_edges.length(); i++) {
1924     uint32_t loop_depth = back_edges.loop_depth(i);
1925     CHECK_LE(static_cast<int>(loop_depth), AbstractCode::kMaxLoopNestingMarker);
1926     // Assert that all back edges for shallower loops (and only those)
1927     // have already been patched.
1928     CHECK_EQ((static_cast<int>(loop_depth) <= loop_nesting_level),
1929              GetBackEdgeState(isolate,
1930                               unoptimized,
1931                               back_edges.pc(i)) != INTERRUPT);
1932   }
1933   return true;
1934 }
1935 #endif  // DEBUG
1936 
1937 
EnterBlockScopeIfNeeded(FullCodeGenerator * codegen,Scope * scope,BailoutId entry_id,BailoutId declarations_id,BailoutId exit_id)1938 FullCodeGenerator::EnterBlockScopeIfNeeded::EnterBlockScopeIfNeeded(
1939     FullCodeGenerator* codegen, Scope* scope, BailoutId entry_id,
1940     BailoutId declarations_id, BailoutId exit_id)
1941     : codegen_(codegen), exit_id_(exit_id) {
1942   saved_scope_ = codegen_->scope();
1943 
1944   if (scope == NULL) {
1945     codegen_->PrepareForBailoutForId(entry_id, BailoutState::NO_REGISTERS);
1946     needs_block_context_ = false;
1947   } else {
1948     needs_block_context_ = scope->NeedsContext();
1949     codegen_->scope_ = scope;
1950     {
1951       if (needs_block_context_) {
1952         Comment cmnt(masm(), "[ Extend block context");
1953         codegen_->PushOperand(scope->scope_info());
1954         codegen_->PushFunctionArgumentForContextAllocation();
1955         codegen_->CallRuntimeWithOperands(Runtime::kPushBlockContext);
1956 
1957         // Replace the context stored in the frame.
1958         codegen_->StoreToFrameField(StandardFrameConstants::kContextOffset,
1959                                     codegen_->context_register());
1960       }
1961       CHECK_EQ(0, scope->num_stack_slots());
1962       codegen_->PrepareForBailoutForId(entry_id, BailoutState::NO_REGISTERS);
1963     }
1964     {
1965       Comment cmnt(masm(), "[ Declarations");
1966       codegen_->VisitDeclarations(scope->declarations());
1967       codegen_->PrepareForBailoutForId(declarations_id,
1968                                        BailoutState::NO_REGISTERS);
1969     }
1970   }
1971 }
1972 
1973 
~EnterBlockScopeIfNeeded()1974 FullCodeGenerator::EnterBlockScopeIfNeeded::~EnterBlockScopeIfNeeded() {
1975   if (needs_block_context_) {
1976     codegen_->LoadContextField(codegen_->context_register(),
1977                                Context::PREVIOUS_INDEX);
1978     // Update local stack frame context field.
1979     codegen_->StoreToFrameField(StandardFrameConstants::kContextOffset,
1980                                 codegen_->context_register());
1981   }
1982   codegen_->PrepareForBailoutForId(exit_id_, BailoutState::NO_REGISTERS);
1983   codegen_->scope_ = saved_scope_;
1984 }
1985 
script()1986 Handle<Script> FullCodeGenerator::script() { return info_->script(); }
1987 
language_mode()1988 LanguageMode FullCodeGenerator::language_mode() {
1989   return scope()->language_mode();
1990 }
1991 
has_simple_parameters()1992 bool FullCodeGenerator::has_simple_parameters() {
1993   return info_->has_simple_parameters();
1994 }
1995 
literal() const1996 FunctionLiteral* FullCodeGenerator::literal() const { return info_->literal(); }
1997 
1998 #undef __
1999 
2000 
2001 }  // namespace internal
2002 }  // namespace v8
2003