1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef V8_X64_LITHIUM_CODEGEN_X64_H_
6 #define V8_X64_LITHIUM_CODEGEN_X64_H_
7 
8 #include "src/x64/lithium-x64.h"
9 
10 #include "src/base/logging.h"
11 #include "src/deoptimizer.h"
12 #include "src/lithium-codegen.h"
13 #include "src/safepoint-table.h"
14 #include "src/scopes.h"
15 #include "src/utils.h"
16 #include "src/x64/lithium-gap-resolver-x64.h"
17 
18 namespace v8 {
19 namespace internal {
20 
21 // Forward declarations.
22 class LDeferredCode;
23 class SafepointGenerator;
24 
25 class LCodeGen: public LCodeGenBase {
26  public:
LCodeGen(LChunk * chunk,MacroAssembler * assembler,CompilationInfo * info)27   LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info)
28       : LCodeGenBase(chunk, assembler, info),
29         deoptimizations_(4, info->zone()),
30         jump_table_(4, info->zone()),
31         deoptimization_literals_(8, info->zone()),
32         inlined_function_count_(0),
33         scope_(info->scope()),
34         translations_(info->zone()),
35         deferred_(8, info->zone()),
36         osr_pc_offset_(-1),
37         frame_is_built_(false),
38         safepoints_(info->zone()),
39         resolver_(this),
40         expected_safepoint_kind_(Safepoint::kSimple) {
41     PopulateDeoptimizationLiteralsWithInlinedFunctions();
42   }
43 
LookupDestination(int block_id)44   int LookupDestination(int block_id) const {
45     return chunk()->LookupDestination(block_id);
46   }
47 
IsNextEmittedBlock(int block_id)48   bool IsNextEmittedBlock(int block_id) const {
49     return LookupDestination(block_id) == GetNextEmittedBlock();
50   }
51 
NeedsEagerFrame()52   bool NeedsEagerFrame() const {
53     return GetStackSlotCount() > 0 ||
54         info()->is_non_deferred_calling() ||
55         !info()->IsStub() ||
56         info()->requires_frame();
57   }
NeedsDeferredFrame()58   bool NeedsDeferredFrame() const {
59     return !NeedsEagerFrame() && info()->is_deferred_calling();
60   }
61 
62   // Support for converting LOperands to assembler types.
63   Register ToRegister(LOperand* op) const;
64   XMMRegister ToDoubleRegister(LOperand* op) const;
65   bool IsInteger32Constant(LConstantOperand* op) const;
66   bool IsDehoistedKeyConstant(LConstantOperand* op) const;
67   bool IsSmiConstant(LConstantOperand* op) const;
68   int32_t ToRepresentation(LConstantOperand* op, const Representation& r) const;
69   int32_t ToInteger32(LConstantOperand* op) const;
70   Smi* ToSmi(LConstantOperand* op) const;
71   double ToDouble(LConstantOperand* op) const;
72   ExternalReference ToExternalReference(LConstantOperand* op) const;
73   Handle<Object> ToHandle(LConstantOperand* op) const;
74   Operand ToOperand(LOperand* op) const;
75 
76   // Try to generate code for the entire chunk, but it may fail if the
77   // chunk contains constructs we cannot handle. Returns true if the
78   // code generation attempt succeeded.
79   bool GenerateCode();
80 
81   // Finish the code by setting stack height, safepoint, and bailout
82   // information on it.
83   void FinishCode(Handle<Code> code);
84 
85   // Deferred code support.
86   void DoDeferredNumberTagD(LNumberTagD* instr);
87 
88   enum IntegerSignedness { SIGNED_INT32, UNSIGNED_INT32 };
89   void DoDeferredNumberTagIU(LInstruction* instr,
90                              LOperand* value,
91                              LOperand* temp1,
92                              LOperand* temp2,
93                              IntegerSignedness signedness);
94 
95   void DoDeferredTaggedToI(LTaggedToI* instr, Label* done);
96   void DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr);
97   void DoDeferredStackCheck(LStackCheck* instr);
98   void DoDeferredStringCharCodeAt(LStringCharCodeAt* instr);
99   void DoDeferredStringCharFromCode(LStringCharFromCode* instr);
100   void DoDeferredAllocate(LAllocate* instr);
101   void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
102                                        Label* map_check);
103   void DoDeferredInstanceMigration(LCheckMaps* instr, Register object);
104   void DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr,
105                                    Register object,
106                                    Register index);
107 
108 // Parallel move support.
109   void DoParallelMove(LParallelMove* move);
110   void DoGap(LGap* instr);
111 
112   // Emit frame translation commands for an environment.
113   void WriteTranslation(LEnvironment* environment, Translation* translation);
114 
115   // Declare methods that deal with the individual node types.
116 #define DECLARE_DO(type) void Do##type(L##type* node);
LITHIUM_CONCRETE_INSTRUCTION_LIST(DECLARE_DO)117   LITHIUM_CONCRETE_INSTRUCTION_LIST(DECLARE_DO)
118 #undef DECLARE_DO
119 
120  private:
121   StrictMode strict_mode() const { return info()->strict_mode(); }
122 
chunk()123   LPlatformChunk* chunk() const { return chunk_; }
scope()124   Scope* scope() const { return scope_; }
graph()125   HGraph* graph() const { return chunk()->graph(); }
126 
double_scratch0()127   XMMRegister double_scratch0() const { return xmm0; }
128 
129   void EmitClassOfTest(Label* if_true,
130                        Label* if_false,
131                        Handle<String> class_name,
132                        Register input,
133                        Register temporary,
134                        Register scratch);
135 
GetStackSlotCount()136   int GetStackSlotCount() const { return chunk()->spill_slot_count(); }
137 
AddDeferredCode(LDeferredCode * code)138   void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); }
139 
140 
141   void SaveCallerDoubles();
142   void RestoreCallerDoubles();
143 
144   // Code generation passes.  Returns true if code generation should
145   // continue.
146   void GenerateBodyInstructionPre(LInstruction* instr) OVERRIDE;
147   void GenerateBodyInstructionPost(LInstruction* instr) OVERRIDE;
148   bool GeneratePrologue();
149   bool GenerateDeferredCode();
150   bool GenerateJumpTable();
151   bool GenerateSafepointTable();
152 
153   // Generates the custom OSR entrypoint and sets the osr_pc_offset.
154   void GenerateOsrPrologue();
155 
156   enum SafepointMode {
157     RECORD_SIMPLE_SAFEPOINT,
158     RECORD_SAFEPOINT_WITH_REGISTERS
159   };
160 
161   void CallCodeGeneric(Handle<Code> code,
162                        RelocInfo::Mode mode,
163                        LInstruction* instr,
164                        SafepointMode safepoint_mode,
165                        int argc);
166 
167 
168   void CallCode(Handle<Code> code,
169                 RelocInfo::Mode mode,
170                 LInstruction* instr);
171 
172   void CallRuntime(const Runtime::Function* function,
173                    int num_arguments,
174                    LInstruction* instr,
175                    SaveFPRegsMode save_doubles = kDontSaveFPRegs);
176 
CallRuntime(Runtime::FunctionId id,int num_arguments,LInstruction * instr)177   void CallRuntime(Runtime::FunctionId id,
178                    int num_arguments,
179                    LInstruction* instr) {
180     const Runtime::Function* function = Runtime::FunctionForId(id);
181     CallRuntime(function, num_arguments, instr);
182   }
183 
184   void CallRuntimeFromDeferred(Runtime::FunctionId id,
185                                int argc,
186                                LInstruction* instr,
187                                LOperand* context);
188 
189   void LoadContextFromDeferred(LOperand* context);
190 
191   enum RDIState {
192     RDI_UNINITIALIZED,
193     RDI_CONTAINS_TARGET
194   };
195 
196   // Generate a direct call to a known function.  Expects the function
197   // to be in rdi.
198   void CallKnownFunction(Handle<JSFunction> function,
199                          int formal_parameter_count,
200                          int arity,
201                          LInstruction* instr,
202                          RDIState rdi_state);
203 
204   void RecordSafepointWithLazyDeopt(LInstruction* instr,
205                                     SafepointMode safepoint_mode,
206                                     int argc);
207   void RegisterEnvironmentForDeoptimization(LEnvironment* environment,
208                                             Safepoint::DeoptMode mode);
209   void DeoptimizeIf(Condition cc, LInstruction* instr, const char* detail,
210                     Deoptimizer::BailoutType bailout_type);
211   void DeoptimizeIf(Condition cc, LInstruction* instr, const char* detail);
212 
DeoptEveryNTimes()213   bool DeoptEveryNTimes() {
214     return FLAG_deopt_every_n_times != 0 && !info()->IsStub();
215   }
216 
217   void AddToTranslation(LEnvironment* environment,
218                         Translation* translation,
219                         LOperand* op,
220                         bool is_tagged,
221                         bool is_uint32,
222                         int* object_index_pointer,
223                         int* dematerialized_index_pointer);
224   void PopulateDeoptimizationData(Handle<Code> code);
225   int DefineDeoptimizationLiteral(Handle<Object> literal);
226 
227   void PopulateDeoptimizationLiteralsWithInlinedFunctions();
228 
229   Register ToRegister(int index) const;
230   XMMRegister ToDoubleRegister(int index) const;
231   Operand BuildFastArrayOperand(
232       LOperand* elements_pointer,
233       LOperand* key,
234       Representation key_representation,
235       ElementsKind elements_kind,
236       uint32_t base_offset);
237 
238   Operand BuildSeqStringOperand(Register string,
239                                 LOperand* index,
240                                 String::Encoding encoding);
241 
242   void EmitIntegerMathAbs(LMathAbs* instr);
243   void EmitSmiMathAbs(LMathAbs* instr);
244 
245   // Support for recording safepoint and position information.
246   void RecordSafepoint(LPointerMap* pointers,
247                        Safepoint::Kind kind,
248                        int arguments,
249                        Safepoint::DeoptMode mode);
250   void RecordSafepoint(LPointerMap* pointers, Safepoint::DeoptMode mode);
251   void RecordSafepoint(Safepoint::DeoptMode mode);
252   void RecordSafepointWithRegisters(LPointerMap* pointers,
253                                     int arguments,
254                                     Safepoint::DeoptMode mode);
255   void RecordAndWritePosition(int position) OVERRIDE;
256 
257   static Condition TokenToCondition(Token::Value op, bool is_unsigned);
258   void EmitGoto(int block);
259 
260   // EmitBranch expects to be the last instruction of a block.
261   template<class InstrType>
262   void EmitBranch(InstrType instr, Condition cc);
263   template<class InstrType>
264   void EmitFalseBranch(InstrType instr, Condition cc);
265   void EmitNumberUntagD(LNumberUntagD* instr, Register input,
266                         XMMRegister result, NumberUntagDMode mode);
267 
268   // Emits optimized code for typeof x == "y".  Modifies input register.
269   // Returns the condition on which a final split to
270   // true and false label should be made, to optimize fallthrough.
271   Condition EmitTypeofIs(LTypeofIsAndBranch* instr, Register input);
272 
273   // Emits optimized code for %_IsObject(x).  Preserves input register.
274   // Returns the condition on which a final split to
275   // true and false label should be made, to optimize fallthrough.
276   Condition EmitIsObject(Register input,
277                          Label* is_not_object,
278                          Label* is_object);
279 
280   // Emits optimized code for %_IsString(x).  Preserves input register.
281   // Returns the condition on which a final split to
282   // true and false label should be made, to optimize fallthrough.
283   Condition EmitIsString(Register input,
284                          Register temp1,
285                          Label* is_not_string,
286                          SmiCheck check_needed);
287 
288   // Emits optimized code for %_IsConstructCall().
289   // Caller should branch on equal condition.
290   void EmitIsConstructCall(Register temp);
291 
292   // Emits code for pushing either a tagged constant, a (non-double)
293   // register, or a stack slot operand.
294   void EmitPushTaggedOperand(LOperand* operand);
295 
296   // Emits optimized code to deep-copy the contents of statically known
297   // object graphs (e.g. object literal boilerplate).
298   void EmitDeepCopy(Handle<JSObject> object,
299                     Register result,
300                     Register source,
301                     int* offset,
302                     AllocationSiteMode mode);
303 
304   void EnsureSpaceForLazyDeopt(int space_needed) OVERRIDE;
305   void DoLoadKeyedExternalArray(LLoadKeyed* instr);
306   void DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr);
307   void DoLoadKeyedFixedArray(LLoadKeyed* instr);
308   void DoStoreKeyedExternalArray(LStoreKeyed* instr);
309   void DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr);
310   void DoStoreKeyedFixedArray(LStoreKeyed* instr);
311 
312   template <class T>
313   void EmitVectorLoadICRegisters(T* instr);
314 
315 #ifdef _MSC_VER
316   // On windows, you may not access the stack more than one page below
317   // the most recently mapped page. To make the allocated area randomly
318   // accessible, we write an arbitrary value to each page in range
319   // rsp + offset - page_size .. rsp in turn.
320   void MakeSureStackPagesMapped(int offset);
321 #endif
322 
323   ZoneList<LEnvironment*> deoptimizations_;
324   ZoneList<Deoptimizer::JumpTableEntry> jump_table_;
325   ZoneList<Handle<Object> > deoptimization_literals_;
326   int inlined_function_count_;
327   Scope* const scope_;
328   TranslationBuffer translations_;
329   ZoneList<LDeferredCode*> deferred_;
330   int osr_pc_offset_;
331   bool frame_is_built_;
332 
333   // Builder that keeps track of safepoints in the code. The table
334   // itself is emitted at the end of the generated code.
335   SafepointTableBuilder safepoints_;
336 
337   // Compiler from a set of parallel moves to a sequential list of moves.
338   LGapResolver resolver_;
339 
340   Safepoint::Kind expected_safepoint_kind_;
341 
342   class PushSafepointRegistersScope FINAL BASE_EMBEDDED {
343    public:
PushSafepointRegistersScope(LCodeGen * codegen)344     explicit PushSafepointRegistersScope(LCodeGen* codegen)
345         : codegen_(codegen) {
346       DCHECK(codegen_->info()->is_calling());
347       DCHECK(codegen_->expected_safepoint_kind_ == Safepoint::kSimple);
348       codegen_->masm_->PushSafepointRegisters();
349       codegen_->expected_safepoint_kind_ = Safepoint::kWithRegisters;
350     }
351 
~PushSafepointRegistersScope()352     ~PushSafepointRegistersScope() {
353       DCHECK(codegen_->expected_safepoint_kind_ == Safepoint::kWithRegisters);
354       codegen_->masm_->PopSafepointRegisters();
355       codegen_->expected_safepoint_kind_ = Safepoint::kSimple;
356     }
357 
358    private:
359     LCodeGen* codegen_;
360   };
361 
362   friend class LDeferredCode;
363   friend class LEnvironment;
364   friend class SafepointGenerator;
365   DISALLOW_COPY_AND_ASSIGN(LCodeGen);
366 };
367 
368 
369 class LDeferredCode: public ZoneObject {
370  public:
LDeferredCode(LCodeGen * codegen)371   explicit LDeferredCode(LCodeGen* codegen)
372       : codegen_(codegen),
373         external_exit_(NULL),
374         instruction_index_(codegen->current_instruction_) {
375     codegen->AddDeferredCode(this);
376   }
377 
~LDeferredCode()378   virtual ~LDeferredCode() {}
379   virtual void Generate() = 0;
380   virtual LInstruction* instr() = 0;
381 
SetExit(Label * exit)382   void SetExit(Label* exit) { external_exit_ = exit; }
entry()383   Label* entry() { return &entry_; }
exit()384   Label* exit() { return external_exit_ != NULL ? external_exit_ : &exit_; }
done()385   Label* done() { return codegen_->NeedsDeferredFrame() ? &done_ : exit(); }
instruction_index()386   int instruction_index() const { return instruction_index_; }
387 
388  protected:
codegen()389   LCodeGen* codegen() const { return codegen_; }
masm()390   MacroAssembler* masm() const { return codegen_->masm(); }
391 
392  private:
393   LCodeGen* codegen_;
394   Label entry_;
395   Label exit_;
396   Label done_;
397   Label* external_exit_;
398   int instruction_index_;
399 };
400 
401 } }  // namespace v8::internal
402 
403 #endif  // V8_X64_LITHIUM_CODEGEN_X64_H_
404