1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef V8_FULL_CODEGEN_FULL_CODEGEN_H_
6 #define V8_FULL_CODEGEN_FULL_CODEGEN_H_
7 
8 #include "src/allocation.h"
9 #include "src/assert-scope.h"
10 #include "src/ast/ast.h"
11 #include "src/ast/scopes.h"
12 #include "src/bit-vector.h"
13 #include "src/code-factory.h"
14 #include "src/code-stubs.h"
15 #include "src/codegen.h"
16 #include "src/deoptimizer.h"
17 #include "src/globals.h"
18 #include "src/objects.h"
19 
20 namespace v8 {
21 namespace internal {
22 
23 // Forward declarations.
24 class CompilationInfo;
25 class CompilationJob;
26 class JumpPatchSite;
27 class Scope;
28 
29 // -----------------------------------------------------------------------------
30 // Full code generator.
31 
32 class FullCodeGenerator final : public AstVisitor<FullCodeGenerator> {
33  public:
34   FullCodeGenerator(MacroAssembler* masm, CompilationInfo* info,
35                     uintptr_t stack_limit);
36 
37   void Initialize(uintptr_t stack_limit);
38 
39   static CompilationJob* NewCompilationJob(CompilationInfo* info);
40 
41   static bool MakeCode(CompilationInfo* info, uintptr_t stack_limit);
42   static bool MakeCode(CompilationInfo* info);
43 
44   // Encode bailout state and pc-offset as a BitField<type, start, size>.
45   // Only use 30 bits because we encode the result as a smi.
46   class BailoutStateField : public BitField<Deoptimizer::BailoutState, 0, 1> {};
47   class PcField : public BitField<unsigned, 1, 30 - 1> {};
48 
49   static const int kMaxBackEdgeWeight = 127;
50 
51   // Platform-specific code size multiplier.
52 #if V8_TARGET_ARCH_IA32 || V8_TARGET_ARCH_X87
53   static const int kCodeSizeMultiplier = 105;
54 #elif V8_TARGET_ARCH_X64
55   static const int kCodeSizeMultiplier = 165;
56 #elif V8_TARGET_ARCH_ARM
57   static const int kCodeSizeMultiplier = 149;
58 #elif V8_TARGET_ARCH_ARM64
59   static const int kCodeSizeMultiplier = 220;
60 #elif V8_TARGET_ARCH_PPC64
61   static const int kCodeSizeMultiplier = 200;
62 #elif V8_TARGET_ARCH_PPC
63   static const int kCodeSizeMultiplier = 200;
64 #elif V8_TARGET_ARCH_MIPS
65   static const int kCodeSizeMultiplier = 149;
66 #elif V8_TARGET_ARCH_MIPS64
67   static const int kCodeSizeMultiplier = 149;
68 #elif V8_TARGET_ARCH_S390
69 // TODO(joransiu): Copied PPC value. Check this is sensible for S390.
70   static const int kCodeSizeMultiplier = 200;
71 #elif V8_TARGET_ARCH_S390X
72 // TODO(joransiu): Copied PPC value. Check this is sensible for S390X.
73   static const int kCodeSizeMultiplier = 200;
74 #else
75 #error Unsupported target architecture.
76 #endif
77 
78   static Register result_register();
79 
80  private:
81   typedef Deoptimizer::BailoutState BailoutState;
82 
83   class Breakable;
84   class Iteration;
85   class TryFinally;
86 
87   class TestContext;
88 
89   class NestedStatement BASE_EMBEDDED {
90    public:
NestedStatement(FullCodeGenerator * codegen)91     explicit NestedStatement(FullCodeGenerator* codegen)
92         : codegen_(codegen),
93           stack_depth_at_target_(codegen->operand_stack_depth_) {
94       // Link into codegen's nesting stack.
95       previous_ = codegen->nesting_stack_;
96       codegen->nesting_stack_ = this;
97     }
~NestedStatement()98     virtual ~NestedStatement() {
99       // Unlink from codegen's nesting stack.
100       DCHECK_EQ(this, codegen_->nesting_stack_);
101       codegen_->nesting_stack_ = previous_;
102     }
103 
AsBreakable()104     virtual Breakable* AsBreakable() { return nullptr; }
AsIteration()105     virtual Iteration* AsIteration() { return nullptr; }
AsTryFinally()106     virtual TryFinally* AsTryFinally() { return nullptr; }
107 
IsContinueTarget(Statement * target)108     virtual bool IsContinueTarget(Statement* target) { return false; }
IsBreakTarget(Statement * target)109     virtual bool IsBreakTarget(Statement* target) { return false; }
IsTryFinally()110     virtual bool IsTryFinally() { return false; }
111 
112     // Notify the statement that we are exiting it via break, continue, or
113     // return and give it a chance to generate cleanup code.  Return the
114     // next outer statement in the nesting stack.  We accumulate in
115     // {*context_length} the number of context chain links to unwind as we
116     // traverse the nesting stack from an exit to its target.
Exit(int * context_length)117     virtual NestedStatement* Exit(int* context_length) { return previous_; }
118 
119     // Determine the expected operand stack depth when this statement is being
120     // used as the target of an exit. The caller will drop to this depth.
GetStackDepthAtTarget()121     int GetStackDepthAtTarget() { return stack_depth_at_target_; }
122 
123    protected:
masm()124     MacroAssembler* masm() { return codegen_->masm(); }
125 
126     FullCodeGenerator* codegen_;
127     NestedStatement* previous_;
128     int stack_depth_at_target_;
129 
130    private:
131     DISALLOW_COPY_AND_ASSIGN(NestedStatement);
132   };
133 
134   // A breakable statement such as a block.
135   class Breakable : public NestedStatement {
136    public:
Breakable(FullCodeGenerator * codegen,BreakableStatement * statement)137     Breakable(FullCodeGenerator* codegen, BreakableStatement* statement)
138         : NestedStatement(codegen), statement_(statement) {
139     }
140 
AsBreakable()141     Breakable* AsBreakable() override { return this; }
IsBreakTarget(Statement * target)142     bool IsBreakTarget(Statement* target) override {
143       return statement() == target;
144     }
145 
statement()146     BreakableStatement* statement() { return statement_; }
break_label()147     Label* break_label() { return &break_label_; }
148 
149    private:
150     BreakableStatement* statement_;
151     Label break_label_;
152   };
153 
154   // An iteration statement such as a while, for, or do loop.
155   class Iteration : public Breakable {
156    public:
Iteration(FullCodeGenerator * codegen,IterationStatement * statement)157     Iteration(FullCodeGenerator* codegen, IterationStatement* statement)
158         : Breakable(codegen, statement) {
159     }
160 
AsIteration()161     Iteration* AsIteration() override { return this; }
IsContinueTarget(Statement * target)162     bool IsContinueTarget(Statement* target) override {
163       return statement() == target;
164     }
165 
continue_label()166     Label* continue_label() { return &continue_label_; }
167 
168    private:
169     Label continue_label_;
170   };
171 
172   // A nested block statement.
173   class NestedBlock : public Breakable {
174    public:
NestedBlock(FullCodeGenerator * codegen,Block * block)175     NestedBlock(FullCodeGenerator* codegen, Block* block)
176         : Breakable(codegen, block) {
177     }
178 
Exit(int * context_length)179     NestedStatement* Exit(int* context_length) override {
180       auto block_scope = statement()->AsBlock()->scope();
181       if (block_scope != nullptr) {
182         if (block_scope->ContextLocalCount() > 0) ++(*context_length);
183       }
184       return previous_;
185     }
186   };
187 
188   class DeferredCommands {
189    public:
190     enum Command { kReturn, kThrow, kBreak, kContinue };
191     typedef int TokenId;
192     struct DeferredCommand {
193       Command command;
194       TokenId token;
195       Statement* target;
196     };
197 
DeferredCommands(FullCodeGenerator * codegen,Label * finally_entry)198     DeferredCommands(FullCodeGenerator* codegen, Label* finally_entry)
199         : codegen_(codegen),
200           commands_(codegen->zone()),
201           return_token_(TokenDispenserForFinally::kInvalidToken),
202           throw_token_(TokenDispenserForFinally::kInvalidToken),
203           finally_entry_(finally_entry) {}
204 
205     void EmitCommands();
206 
207     void RecordBreak(Statement* target);
208     void RecordContinue(Statement* target);
209     void RecordReturn();
210     void RecordThrow();
211     void EmitFallThrough();
212 
213    private:
masm()214     MacroAssembler* masm() { return codegen_->masm(); }
215     void EmitJumpToFinally(TokenId token);
216 
217     FullCodeGenerator* codegen_;
218     ZoneVector<DeferredCommand> commands_;
219     TokenDispenserForFinally dispenser_;
220     TokenId return_token_;
221     TokenId throw_token_;
222     Label* finally_entry_;
223   };
224 
225   // The try block of a try/finally statement.
226   class TryFinally : public NestedStatement {
227    public:
TryFinally(FullCodeGenerator * codegen,DeferredCommands * commands)228     TryFinally(FullCodeGenerator* codegen, DeferredCommands* commands)
229         : NestedStatement(codegen), deferred_commands_(commands) {}
230 
231     NestedStatement* Exit(int* context_length) override;
232 
IsTryFinally()233     bool IsTryFinally() override { return true; }
AsTryFinally()234     TryFinally* AsTryFinally() override { return this; }
235 
deferred_commands()236     DeferredCommands* deferred_commands() { return deferred_commands_; }
237 
238    private:
239     DeferredCommands* deferred_commands_;
240   };
241 
242   // The body of a with or catch.
243   class WithOrCatch : public NestedStatement {
244    public:
WithOrCatch(FullCodeGenerator * codegen)245     explicit WithOrCatch(FullCodeGenerator* codegen)
246         : NestedStatement(codegen) {
247     }
248 
Exit(int * context_length)249     NestedStatement* Exit(int* context_length) override {
250       ++(*context_length);
251       return previous_;
252     }
253   };
254 
255   // A platform-specific utility to overwrite the accumulator register
256   // with a GC-safe value.
257   void ClearAccumulator();
258 
259   // Determine whether or not to inline the smi case for the given
260   // operation.
261   bool ShouldInlineSmiCase(Token::Value op);
262 
263   // Helper function to convert a pure value into a test context.  The value
264   // is expected on the stack or the accumulator, depending on the platform.
265   // See the platform-specific implementation for details.
266   void DoTest(Expression* condition,
267               Label* if_true,
268               Label* if_false,
269               Label* fall_through);
270   void DoTest(const TestContext* context);
271 
272   // Helper function to split control flow and avoid a branch to the
273   // fall-through label if it is set up.
274 #if V8_TARGET_ARCH_MIPS
275   void Split(Condition cc,
276              Register lhs,
277              const Operand&  rhs,
278              Label* if_true,
279              Label* if_false,
280              Label* fall_through);
281 #elif V8_TARGET_ARCH_MIPS64
282   void Split(Condition cc,
283              Register lhs,
284              const Operand&  rhs,
285              Label* if_true,
286              Label* if_false,
287              Label* fall_through);
288 #elif V8_TARGET_ARCH_PPC
289   void Split(Condition cc, Label* if_true, Label* if_false, Label* fall_through,
290              CRegister cr = cr7);
291 #else  // All other arch.
292   void Split(Condition cc,
293              Label* if_true,
294              Label* if_false,
295              Label* fall_through);
296 #endif
297 
298   // Load the value of a known (PARAMETER, LOCAL, or CONTEXT) variable into
299   // a register.  Emits a context chain walk if if necessary (so does
300   // SetVar) so avoid calling both on the same variable.
301   void GetVar(Register destination, Variable* var);
302 
303   // Assign to a known (PARAMETER, LOCAL, or CONTEXT) variable.  If it's in
304   // the context, the write barrier will be emitted and source, scratch0,
305   // scratch1 will be clobbered.  Emits a context chain walk if if necessary
306   // (so does GetVar) so avoid calling both on the same variable.
307   void SetVar(Variable* var,
308               Register source,
309               Register scratch0,
310               Register scratch1);
311 
312   // An operand used to read/write a stack-allocated (PARAMETER or LOCAL)
313   // variable.  Writing does not need the write barrier.
314   MemOperand StackOperand(Variable* var);
315 
316   // An operand used to read/write a known (PARAMETER, LOCAL, or CONTEXT)
317   // variable.  May emit code to traverse the context chain, loading the
318   // found context into the scratch register.  Writing to this operand will
319   // need the write barrier if location is CONTEXT.
320   MemOperand VarOperand(Variable* var, Register scratch);
321 
VisitForEffect(Expression * expr)322   void VisitForEffect(Expression* expr) {
323     if (FLAG_verify_operand_stack_depth) EmitOperandStackDepthCheck();
324     EffectContext context(this);
325     Visit(expr);
326     PrepareForBailout(expr, BailoutState::NO_REGISTERS);
327   }
328 
VisitForAccumulatorValue(Expression * expr)329   void VisitForAccumulatorValue(Expression* expr) {
330     if (FLAG_verify_operand_stack_depth) EmitOperandStackDepthCheck();
331     AccumulatorValueContext context(this);
332     Visit(expr);
333     PrepareForBailout(expr, BailoutState::TOS_REGISTER);
334   }
335 
VisitForStackValue(Expression * expr)336   void VisitForStackValue(Expression* expr) {
337     if (FLAG_verify_operand_stack_depth) EmitOperandStackDepthCheck();
338     StackValueContext context(this);
339     Visit(expr);
340     PrepareForBailout(expr, BailoutState::NO_REGISTERS);
341   }
342 
VisitForControl(Expression * expr,Label * if_true,Label * if_false,Label * fall_through)343   void VisitForControl(Expression* expr,
344                        Label* if_true,
345                        Label* if_false,
346                        Label* fall_through) {
347     if (FLAG_verify_operand_stack_depth) EmitOperandStackDepthCheck();
348     TestContext context(this, expr, if_true, if_false, fall_through);
349     Visit(expr);
350     // For test contexts, we prepare for bailout before branching, not at
351     // the end of the entire expression.  This happens as part of visiting
352     // the expression.
353   }
354 
355   void VisitInDuplicateContext(Expression* expr);
356 
357   void VisitDeclarations(Declaration::List* declarations);
358   void DeclareGlobals(Handle<FixedArray> pairs);
359   int DeclareGlobalsFlags();
360 
361   // Push, pop or drop values onto/from the operand stack.
362   void PushOperand(Register reg);
363   void PopOperand(Register reg);
364   void DropOperands(int count);
365 
366   // Convenience helpers for pushing onto the operand stack.
367   void PushOperand(MemOperand operand);
368   void PushOperand(Handle<Object> handle);
369   void PushOperand(Smi* smi);
370 
371   // Convenience helpers for pushing/popping multiple operands.
372   void PushOperands(Register reg1, Register reg2);
373   void PushOperands(Register reg1, Register reg2, Register reg3);
374   void PushOperands(Register reg1, Register reg2, Register reg3, Register reg4);
375   void PopOperands(Register reg1, Register reg2);
376 
377   // Convenience helper for calling a runtime function that consumes arguments
378   // from the operand stack (only usable for functions with known arity).
379   void CallRuntimeWithOperands(Runtime::FunctionId function_id);
380 
381   // Static tracking of the operand stack depth.
382   void OperandStackDepthDecrement(int count);
383   void OperandStackDepthIncrement(int count);
384 
385   // Generate debug code that verifies that our static tracking of the operand
386   // stack depth is in sync with the actual operand stack during runtime.
387   void EmitOperandStackDepthCheck();
388 
389   // Generate code to create an iterator result object.  The "value" property is
390   // set to a value popped from the stack, and "done" is set according to the
391   // argument.  The result object is left in the result register.
392   void EmitCreateIteratorResult(bool done);
393 
394   // Try to perform a comparison as a fast inlined literal compare if
395   // the operands allow it.  Returns true if the compare operations
396   // has been matched and all code generated; false otherwise.
397   bool TryLiteralCompare(CompareOperation* compare);
398 
399   // Platform-specific code for comparing the type of a value with
400   // a given literal string.
401   void EmitLiteralCompareTypeof(Expression* expr,
402                                 Expression* sub_expr,
403                                 Handle<String> check);
404 
405   // Platform-specific code for equality comparison with a nil-like value.
406   void EmitLiteralCompareNil(CompareOperation* expr,
407                              Expression* sub_expr,
408                              NilValue nil);
409 
410   // Bailout support.
411   void PrepareForBailout(Expression* node, Deoptimizer::BailoutState state);
412   void PrepareForBailoutForId(BailoutId id, Deoptimizer::BailoutState state);
413 
414   // Returns a smi for the index into the FixedArray that backs the feedback
415   // vector
SmiFromSlot(FeedbackVectorSlot slot)416   Smi* SmiFromSlot(FeedbackVectorSlot slot) const {
417     return Smi::FromInt(TypeFeedbackVector::GetIndexFromSpec(
418         literal()->feedback_vector_spec(), slot));
419   }
420 
421   // Record a call's return site offset, used to rebuild the frame if the
422   // called function was inlined at the site.
423   void RecordJSReturnSite(Call* call);
424 
425   // Prepare for bailout before a test (or compare) and branch.  If
426   // should_normalize, then the following comparison will not handle the
427   // canonical JS true value so we will insert a (dead) test against true at
428   // the actual bailout target from the optimized code. If not
429   // should_normalize, the true and false labels are ignored.
430   void PrepareForBailoutBeforeSplit(Expression* expr,
431                                     bool should_normalize,
432                                     Label* if_true,
433                                     Label* if_false);
434 
435   // If enabled, emit debug code for checking that the current context is
436   // neither a with nor a catch context.
437   void EmitDebugCheckDeclarationContext(Variable* variable);
438 
439   // This is meant to be called at loop back edges, |back_edge_target| is
440   // the jump target of the back edge and is used to approximate the amount
441   // of code inside the loop.
442   void EmitBackEdgeBookkeeping(IterationStatement* stmt,
443                                Label* back_edge_target);
444   // Record the OSR AST id corresponding to a back edge in the code.
445   void RecordBackEdge(BailoutId osr_ast_id);
446   // Emit a table of back edge ids, pcs and loop depths into the code stream.
447   // Return the offset of the start of the table.
448   unsigned EmitBackEdgeTable();
449 
450   void EmitProfilingCounterDecrement(int delta);
451   void EmitProfilingCounterReset();
452 
453   // Emit code to pop values from the stack associated with nested statements
454   // like try/catch, try/finally, etc, running the finallies and unwinding the
455   // handlers as needed. Also emits the return sequence if necessary (i.e.,
456   // if the return is not delayed by a finally block).
457   void EmitUnwindAndReturn();
458 
459   // Platform-specific return sequence
460   void EmitReturnSequence();
461   void EmitProfilingCounterHandlingForReturnSequence(bool is_tail_call);
462 
463   // Platform-specific code sequences for calls
464   void EmitCall(Call* expr, ConvertReceiverMode = ConvertReceiverMode::kAny);
465   void EmitSuperConstructorCall(Call* expr);
466   void EmitCallWithLoadIC(Call* expr);
467   void EmitSuperCallWithLoadIC(Call* expr);
468   void EmitKeyedCallWithLoadIC(Call* expr, Expression* key);
469   void EmitKeyedSuperCallWithLoadIC(Call* expr);
470   void EmitPossiblyEvalCall(Call* expr);
471 
472 #define FOR_EACH_FULL_CODE_INTRINSIC(F) \
473   F(IsSmi)                              \
474   F(IsArray)                            \
475   F(IsTypedArray)                       \
476   F(IsRegExp)                           \
477   F(IsJSProxy)                          \
478   F(Call)                               \
479   F(NewObject)                          \
480   F(IsJSReceiver)                       \
481   F(GetSuperConstructor)                \
482   F(DebugBreakInOptimizedCode)          \
483   F(ClassOf)                            \
484   F(StringCharCodeAt)                   \
485   F(SubString)                          \
486   F(RegExpExec)                         \
487   F(ToInteger)                          \
488   F(NumberToString)                     \
489   F(ToString)                           \
490   F(ToLength)                           \
491   F(ToNumber)                           \
492   F(ToObject)                           \
493   F(DebugIsActive)                      \
494   F(CreateIterResultObject)
495 
496 #define GENERATOR_DECLARATION(Name) void Emit##Name(CallRuntime* call);
497   FOR_EACH_FULL_CODE_INTRINSIC(GENERATOR_DECLARATION)
498 #undef GENERATOR_DECLARATION
499 
500   void EmitIntrinsicAsStubCall(CallRuntime* expr, const Callable& callable);
501 
502   // Emits call to respective code stub.
503   void EmitHasProperty();
504 
505   // Platform-specific code for restoring context from current JS frame.
506   void RestoreContext();
507 
508   // Platform-specific code for loading variables.
509   void EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
510                                      TypeofMode typeof_mode, Label* slow);
511   MemOperand ContextSlotOperandCheckExtensions(Variable* var, Label* slow);
512   void EmitDynamicLookupFastCase(VariableProxy* proxy, TypeofMode typeof_mode,
513                                  Label* slow, Label* done);
514   void EmitGlobalVariableLoad(VariableProxy* proxy, TypeofMode typeof_mode);
515   void EmitVariableLoad(VariableProxy* proxy,
516                         TypeofMode typeof_mode = NOT_INSIDE_TYPEOF);
517 
518   void EmitAccessor(ObjectLiteralProperty* property);
519 
520   // Expects the arguments and the function already pushed.
521   void EmitResolvePossiblyDirectEval(Call* expr);
522 
523   // Platform-specific support for allocating a new closure based on
524   // the given function info.
525   void EmitNewClosure(Handle<SharedFunctionInfo> info, bool pretenure);
526 
527   // Re-usable portions of CallRuntime
528   void EmitLoadJSRuntimeFunction(CallRuntime* expr);
529   void EmitCallJSRuntimeFunction(CallRuntime* expr);
530 
531   // Load a value from a named property.
532   // The receiver is left on the stack by the IC.
533   void EmitNamedPropertyLoad(Property* expr);
534 
535   // Load a value from super.named property.
536   // Expect receiver ('this' value) and home_object on the stack.
537   void EmitNamedSuperPropertyLoad(Property* expr);
538 
539   // Load a value from super[keyed] property.
540   // Expect receiver ('this' value), home_object and key on the stack.
541   void EmitKeyedSuperPropertyLoad(Property* expr);
542 
543   // Load a value from a keyed property.
544   // The receiver and the key is left on the stack by the IC.
545   void EmitKeyedPropertyLoad(Property* expr);
546 
547   // Adds the properties to the class (function) object and to its prototype.
548   // Expects the class (function) in the accumulator. The class (function) is
549   // in the accumulator after installing all the properties.
550   void EmitClassDefineProperties(ClassLiteral* lit);
551 
552   // Pushes the property key as a Name on the stack.
553   void EmitPropertyKey(LiteralProperty* property, BailoutId bailout_id);
554 
555   // Apply the compound assignment operator. Expects the left operand on top
556   // of the stack and the right one in the accumulator.
557   void EmitBinaryOp(BinaryOperation* expr, Token::Value op);
558 
559   // Helper functions for generating inlined smi code for certain
560   // binary operations.
561   void EmitInlineSmiBinaryOp(BinaryOperation* expr,
562                              Token::Value op,
563                              Expression* left,
564                              Expression* right);
565 
566   // Assign to the given expression as if via '='. The right-hand-side value
567   // is expected in the accumulator. slot is only used if FLAG_vector_stores
568   // is true.
569   void EmitAssignment(Expression* expr, FeedbackVectorSlot slot);
570 
571   // Complete a variable assignment.  The right-hand-side value is expected
572   // in the accumulator.
573   void EmitVariableAssignment(Variable* var, Token::Value op,
574                               FeedbackVectorSlot slot,
575                               HoleCheckMode hole_check_mode);
576 
577   // Helper functions to EmitVariableAssignment
578   void EmitStoreToStackLocalOrContextSlot(Variable* var,
579                                           MemOperand location);
580 
581   // Complete a named property assignment.  The receiver is expected on top
582   // of the stack and the right-hand-side value in the accumulator.
583   void EmitNamedPropertyAssignment(Assignment* expr);
584 
585   // Complete a super named property assignment. The right-hand-side value
586   // is expected in accumulator.
587   void EmitNamedSuperPropertyStore(Property* prop);
588 
589   // Complete a super named property assignment. The right-hand-side value
590   // is expected in accumulator.
591   void EmitKeyedSuperPropertyStore(Property* prop);
592 
593   // Complete a keyed property assignment.  The receiver and key are
594   // expected on top of the stack and the right-hand-side value in the
595   // accumulator.
596   void EmitKeyedPropertyAssignment(Assignment* expr);
597 
NeedsHomeObject(Expression * expr)598   static bool NeedsHomeObject(Expression* expr) {
599     return FunctionLiteral::NeedsHomeObject(expr);
600   }
601 
602   // Adds the [[HomeObject]] to |initializer| if it is a FunctionLiteral.
603   // The value of the initializer is expected to be at the top of the stack.
604   // |offset| is the offset in the stack where the home object can be found.
605   void EmitSetHomeObject(Expression* initializer, int offset,
606                          FeedbackVectorSlot slot);
607 
608   void EmitSetHomeObjectAccumulator(Expression* initializer, int offset,
609                                     FeedbackVectorSlot slot);
610 
611   // Platform-specific code for loading a slot to a register.
612   void EmitLoadSlot(Register destination, FeedbackVectorSlot slot);
613   // Platform-specific code for pushing a slot to the stack.
614   void EmitPushSlot(FeedbackVectorSlot slot);
615 
616   void CallIC(Handle<Code> code,
617               TypeFeedbackId id = TypeFeedbackId::None());
618 
619   void CallLoadIC(FeedbackVectorSlot slot, Handle<Object> name);
620   void CallStoreIC(FeedbackVectorSlot slot, Handle<Object> name);
621   void CallKeyedStoreIC(FeedbackVectorSlot slot);
622 
623   void SetFunctionPosition(FunctionLiteral* fun);
624   void SetReturnPosition(FunctionLiteral* fun);
625 
626   enum InsertBreak { INSERT_BREAK, SKIP_BREAK };
627 
628   // During stepping we want to be able to break at each statement, but not at
629   // every (sub-)expression. That is why by default we insert breaks at every
630   // statement position, but not at every expression position, unless stated
631   // otherwise.
632   void SetStatementPosition(Statement* stmt,
633                             InsertBreak insert_break = INSERT_BREAK);
634   void SetExpressionPosition(Expression* expr);
635 
636   // Consider an expression a statement. As such, we also insert a break.
637   // This is used in loop headers where we want to break for each iteration.
638   void SetExpressionAsStatementPosition(Expression* expr);
639 
640   void SetCallPosition(Expression* expr,
641                        TailCallMode tail_call_mode = TailCallMode::kDisallow);
642 
SetConstructCallPosition(Expression * expr)643   void SetConstructCallPosition(Expression* expr) {
644     // Currently call and construct calls are treated the same wrt debugging.
645     SetCallPosition(expr);
646   }
647 
648   void RecordStatementPosition(int pos);
649   void RecordPosition(int pos);
650 
651   // Non-local control flow support.
652   void EnterTryBlock(int handler_index, Label* handler,
653                      HandlerTable::CatchPrediction catch_prediction);
654   void ExitTryBlock(int handler_index);
655   void EnterFinallyBlock();
656   void ExitFinallyBlock();
657   void ClearPendingMessage();
658 
659   void EmitContinue(Statement* target);
660   void EmitBreak(Statement* target);
661 
662   // Loop nesting counter.
loop_depth()663   int loop_depth() { return loop_depth_; }
increment_loop_depth()664   void increment_loop_depth() { loop_depth_++; }
decrement_loop_depth()665   void decrement_loop_depth() {
666     DCHECK(loop_depth_ > 0);
667     loop_depth_--;
668   }
669 
masm()670   MacroAssembler* masm() const { return masm_; }
671 
672   class ExpressionContext;
context()673   const ExpressionContext* context() { return context_; }
set_new_context(const ExpressionContext * context)674   void set_new_context(const ExpressionContext* context) { context_ = context; }
675 
isolate()676   Isolate* isolate() const { return isolate_; }
zone()677   Zone* zone() const { return zone_; }
678   Handle<Script> script();
679   LanguageMode language_mode();
680   bool has_simple_parameters();
681   FunctionLiteral* literal() const;
scope()682   Scope* scope() { return scope_; }
683 
684   static Register context_register();
685 
686   // Get fields from the stack frame. Offsets are the frame pointer relative
687   // offsets defined in, e.g., StandardFrameConstants.
688   void LoadFromFrameField(int frame_offset, Register value);
689   // Set fields in the stack frame. Offsets are the frame pointer relative
690   // offsets defined in, e.g., StandardFrameConstants.
691   void StoreToFrameField(int frame_offset, Register value);
692 
693   // Load a value from the current context. Indices are defined as an enum
694   // in v8::internal::Context.
695   void LoadContextField(Register dst, int context_index);
696 
697   // Push the function argument for the runtime functions PushWithContext
698   // and PushCatchContext.
699   void PushFunctionArgumentForContextAllocation();
700 
701   void PushCalleeAndWithBaseObject(Call* expr);
702 
703   // AST node visit functions.
704 #define DECLARE_VISIT(type) void Visit##type(type* node);
705   AST_NODE_LIST(DECLARE_VISIT)
706 #undef DECLARE_VISIT
707 
708   void VisitComma(BinaryOperation* expr);
709   void VisitLogicalExpression(BinaryOperation* expr);
710   void VisitArithmeticExpression(BinaryOperation* expr);
711 
712   void VisitForTypeofValue(Expression* expr);
713 
714   void Generate();
715   void PopulateDeoptimizationData(Handle<Code> code);
716   void PopulateTypeFeedbackInfo(Handle<Code> code);
717   void PopulateHandlerTable(Handle<Code> code);
718 
719   bool MustCreateObjectLiteralWithRuntime(ObjectLiteral* expr) const;
720   bool MustCreateArrayLiteralWithRuntime(ArrayLiteral* expr) const;
721 
722   int NewHandlerTableEntry();
723 
724   struct BailoutEntry {
725     BailoutId id;
726     unsigned pc_and_state;
727   };
728 
729   struct BackEdgeEntry {
730     BailoutId id;
731     unsigned pc;
732     uint32_t loop_depth;
733   };
734 
735   struct HandlerTableEntry {
736     unsigned range_start;
737     unsigned range_end;
738     unsigned handler_offset;
739     int stack_depth;
740     HandlerTable::CatchPrediction catch_prediction;
741   };
742 
743   class ExpressionContext BASE_EMBEDDED {
744    public:
ExpressionContext(FullCodeGenerator * codegen)745     explicit ExpressionContext(FullCodeGenerator* codegen)
746         : masm_(codegen->masm()), old_(codegen->context()), codegen_(codegen) {
747       codegen->set_new_context(this);
748     }
749 
~ExpressionContext()750     virtual ~ExpressionContext() {
751       codegen_->set_new_context(old_);
752     }
753 
isolate()754     Isolate* isolate() const { return codegen_->isolate(); }
755 
756     // Convert constant control flow (true or false) to the result expected for
757     // this expression context.
758     virtual void Plug(bool flag) const = 0;
759 
760     // Emit code to convert a pure value (in a register, known variable
761     // location, as a literal, or on top of the stack) into the result
762     // expected according to this expression context.
763     virtual void Plug(Register reg) const = 0;
764     virtual void Plug(Variable* var) const = 0;
765     virtual void Plug(Handle<Object> lit) const = 0;
766     virtual void Plug(Heap::RootListIndex index) const = 0;
767     virtual void PlugTOS() const = 0;
768 
769     // Emit code to convert pure control flow to a pair of unbound labels into
770     // the result expected according to this expression context.  The
771     // implementation will bind both labels unless it's a TestContext, which
772     // won't bind them at this point.
773     virtual void Plug(Label* materialize_true,
774                       Label* materialize_false) const = 0;
775 
776     // Emit code to discard count elements from the top of stack, then convert
777     // a pure value into the result expected according to this expression
778     // context.
779     virtual void DropAndPlug(int count, Register reg) const = 0;
780 
781     // Set up branch labels for a test expression.  The three Label** parameters
782     // are output parameters.
783     virtual void PrepareTest(Label* materialize_true,
784                              Label* materialize_false,
785                              Label** if_true,
786                              Label** if_false,
787                              Label** fall_through) const = 0;
788 
789     // Returns true if we are evaluating only for side effects (i.e. if the
790     // result will be discarded).
IsEffect()791     virtual bool IsEffect() const { return false; }
792 
793     // Returns true if we are evaluating for the value (in accu/on stack).
IsAccumulatorValue()794     virtual bool IsAccumulatorValue() const { return false; }
IsStackValue()795     virtual bool IsStackValue() const { return false; }
796 
797     // Returns true if we are branching on the value rather than materializing
798     // it.  Only used for asserts.
IsTest()799     virtual bool IsTest() const { return false; }
800 
801    protected:
codegen()802     FullCodeGenerator* codegen() const { return codegen_; }
masm()803     MacroAssembler* masm() const { return masm_; }
804     MacroAssembler* masm_;
805 
806    private:
807     const ExpressionContext* old_;
808     FullCodeGenerator* codegen_;
809   };
810 
811   class AccumulatorValueContext : public ExpressionContext {
812    public:
AccumulatorValueContext(FullCodeGenerator * codegen)813     explicit AccumulatorValueContext(FullCodeGenerator* codegen)
814         : ExpressionContext(codegen) { }
815 
816     void Plug(bool flag) const override;
817     void Plug(Register reg) const override;
818     void Plug(Label* materialize_true, Label* materialize_false) const override;
819     void Plug(Variable* var) const override;
820     void Plug(Handle<Object> lit) const override;
821     void Plug(Heap::RootListIndex) const override;
822     void PlugTOS() const override;
823     void DropAndPlug(int count, Register reg) const override;
824     void PrepareTest(Label* materialize_true, Label* materialize_false,
825                      Label** if_true, Label** if_false,
826                      Label** fall_through) const override;
IsAccumulatorValue()827     bool IsAccumulatorValue() const override { return true; }
828   };
829 
830   class StackValueContext : public ExpressionContext {
831    public:
StackValueContext(FullCodeGenerator * codegen)832     explicit StackValueContext(FullCodeGenerator* codegen)
833         : ExpressionContext(codegen) { }
834 
835     void Plug(bool flag) const override;
836     void Plug(Register reg) const override;
837     void Plug(Label* materialize_true, Label* materialize_false) const override;
838     void Plug(Variable* var) const override;
839     void Plug(Handle<Object> lit) const override;
840     void Plug(Heap::RootListIndex) const override;
841     void PlugTOS() const override;
842     void DropAndPlug(int count, Register reg) const override;
843     void PrepareTest(Label* materialize_true, Label* materialize_false,
844                      Label** if_true, Label** if_false,
845                      Label** fall_through) const override;
IsStackValue()846     bool IsStackValue() const override { return true; }
847   };
848 
849   class TestContext : public ExpressionContext {
850    public:
TestContext(FullCodeGenerator * codegen,Expression * condition,Label * true_label,Label * false_label,Label * fall_through)851     TestContext(FullCodeGenerator* codegen,
852                 Expression* condition,
853                 Label* true_label,
854                 Label* false_label,
855                 Label* fall_through)
856         : ExpressionContext(codegen),
857           condition_(condition),
858           true_label_(true_label),
859           false_label_(false_label),
860           fall_through_(fall_through) { }
861 
cast(const ExpressionContext * context)862     static const TestContext* cast(const ExpressionContext* context) {
863       DCHECK(context->IsTest());
864       return reinterpret_cast<const TestContext*>(context);
865     }
866 
condition()867     Expression* condition() const { return condition_; }
true_label()868     Label* true_label() const { return true_label_; }
false_label()869     Label* false_label() const { return false_label_; }
fall_through()870     Label* fall_through() const { return fall_through_; }
871 
872     void Plug(bool flag) const override;
873     void Plug(Register reg) const override;
874     void Plug(Label* materialize_true, Label* materialize_false) const override;
875     void Plug(Variable* var) const override;
876     void Plug(Handle<Object> lit) const override;
877     void Plug(Heap::RootListIndex) const override;
878     void PlugTOS() const override;
879     void DropAndPlug(int count, Register reg) const override;
880     void PrepareTest(Label* materialize_true, Label* materialize_false,
881                      Label** if_true, Label** if_false,
882                      Label** fall_through) const override;
IsTest()883     bool IsTest() const override { return true; }
884 
885    private:
886     Expression* condition_;
887     Label* true_label_;
888     Label* false_label_;
889     Label* fall_through_;
890   };
891 
892   class EffectContext : public ExpressionContext {
893    public:
EffectContext(FullCodeGenerator * codegen)894     explicit EffectContext(FullCodeGenerator* codegen)
895         : ExpressionContext(codegen) { }
896 
897     void Plug(bool flag) const override;
898     void Plug(Register reg) const override;
899     void Plug(Label* materialize_true, Label* materialize_false) const override;
900     void Plug(Variable* var) const override;
901     void Plug(Handle<Object> lit) const override;
902     void Plug(Heap::RootListIndex) const override;
903     void PlugTOS() const override;
904     void DropAndPlug(int count, Register reg) const override;
905     void PrepareTest(Label* materialize_true, Label* materialize_false,
906                      Label** if_true, Label** if_false,
907                      Label** fall_through) const override;
IsEffect()908     bool IsEffect() const override { return true; }
909   };
910 
911   class EnterBlockScopeIfNeeded {
912    public:
913     EnterBlockScopeIfNeeded(FullCodeGenerator* codegen, Scope* scope,
914                             BailoutId entry_id, BailoutId declarations_id,
915                             BailoutId exit_id);
916     ~EnterBlockScopeIfNeeded();
917 
918    private:
masm()919     MacroAssembler* masm() const { return codegen_->masm(); }
920 
921     FullCodeGenerator* codegen_;
922     Scope* saved_scope_;
923     BailoutId exit_id_;
924     bool needs_block_context_;
925   };
926 
927   MacroAssembler* masm_;
928   CompilationInfo* info_;
929   Isolate* isolate_;
930   Zone* zone_;
931   Scope* scope_;
932   Label return_label_;
933   NestedStatement* nesting_stack_;
934   int loop_depth_;
935   int operand_stack_depth_;
936   ZoneList<Handle<Object> >* globals_;
937   const ExpressionContext* context_;
938   ZoneList<BailoutEntry> bailout_entries_;
939   ZoneList<BackEdgeEntry> back_edges_;
940   ZoneVector<HandlerTableEntry> handler_table_;
941   SourcePositionTableBuilder source_position_table_builder_;
942   int ic_total_count_;
943   Handle<Cell> profiling_counter_;
944 
945   friend class NestedStatement;
946 
947   DEFINE_AST_VISITOR_SUBCLASS_MEMBERS();
948   DISALLOW_COPY_AND_ASSIGN(FullCodeGenerator);
949 };
950 
951 
952 class BackEdgeTable {
953  public:
BackEdgeTable(Code * code,DisallowHeapAllocation * required)954   BackEdgeTable(Code* code, DisallowHeapAllocation* required) {
955     DCHECK(code->kind() == Code::FUNCTION);
956     instruction_start_ = code->instruction_start();
957     Address table_address = instruction_start_ + code->back_edge_table_offset();
958     length_ = Memory::uint32_at(table_address);
959     start_ = table_address + kTableLengthSize;
960   }
961 
length()962   uint32_t length() { return length_; }
963 
ast_id(uint32_t index)964   BailoutId ast_id(uint32_t index) {
965     return BailoutId(static_cast<int>(
966         Memory::uint32_at(entry_at(index) + kAstIdOffset)));
967   }
968 
loop_depth(uint32_t index)969   uint32_t loop_depth(uint32_t index) {
970     return Memory::uint32_at(entry_at(index) + kLoopDepthOffset);
971   }
972 
pc_offset(uint32_t index)973   uint32_t pc_offset(uint32_t index) {
974     return Memory::uint32_at(entry_at(index) + kPcOffsetOffset);
975   }
976 
pc(uint32_t index)977   Address pc(uint32_t index) {
978     return instruction_start_ + pc_offset(index);
979   }
980 
981   enum BackEdgeState { INTERRUPT, ON_STACK_REPLACEMENT };
982 
983   // Increase allowed loop nesting level by one and patch those matching loops.
984   static void Patch(Isolate* isolate, Code* unoptimized_code);
985 
986   // Patch the back edge to the target state, provided the correct callee.
987   static void PatchAt(Code* unoptimized_code,
988                       Address pc,
989                       BackEdgeState target_state,
990                       Code* replacement_code);
991 
992   // Change all patched back edges back to normal interrupts.
993   static void Revert(Isolate* isolate,
994                      Code* unoptimized_code);
995 
996   // Return the current patch state of the back edge.
997   static BackEdgeState GetBackEdgeState(Isolate* isolate,
998                                         Code* unoptimized_code,
999                                         Address pc_after);
1000 
1001 #ifdef DEBUG
1002   // Verify that all back edges of a certain loop depth are patched.
1003   static bool Verify(Isolate* isolate, Code* unoptimized_code);
1004 #endif  // DEBUG
1005 
1006  private:
entry_at(uint32_t index)1007   Address entry_at(uint32_t index) {
1008     DCHECK(index < length_);
1009     return start_ + index * kEntrySize;
1010   }
1011 
1012   static const int kTableLengthSize = kIntSize;
1013   static const int kAstIdOffset = 0 * kIntSize;
1014   static const int kPcOffsetOffset = 1 * kIntSize;
1015   static const int kLoopDepthOffset = 2 * kIntSize;
1016   static const int kEntrySize = 3 * kIntSize;
1017 
1018   Address start_;
1019   Address instruction_start_;
1020   uint32_t length_;
1021 };
1022 
1023 
1024 }  // namespace internal
1025 }  // namespace v8
1026 
1027 #endif  // V8_FULL_CODEGEN_FULL_CODEGEN_H_
1028