1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef V8_ARM_LITHIUM_CODEGEN_ARM_H_
6 #define V8_ARM_LITHIUM_CODEGEN_ARM_H_
7 
8 #include "src/arm/lithium-arm.h"
9 
10 #include "src/arm/lithium-gap-resolver-arm.h"
11 #include "src/deoptimizer.h"
12 #include "src/lithium-codegen.h"
13 #include "src/safepoint-table.h"
14 #include "src/scopes.h"
15 #include "src/utils.h"
16 
17 namespace v8 {
18 namespace internal {
19 
20 // Forward declarations.
21 class LDeferredCode;
22 class SafepointGenerator;
23 
24 class LCodeGen: public LCodeGenBase {
25  public:
LCodeGen(LChunk * chunk,MacroAssembler * assembler,CompilationInfo * info)26   LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info)
27       : LCodeGenBase(chunk, assembler, info),
28         deoptimizations_(4, info->zone()),
29         jump_table_(4, info->zone()),
30         deoptimization_literals_(8, info->zone()),
31         inlined_function_count_(0),
32         scope_(info->scope()),
33         translations_(info->zone()),
34         deferred_(8, info->zone()),
35         osr_pc_offset_(-1),
36         frame_is_built_(false),
37         safepoints_(info->zone()),
38         resolver_(this),
39         expected_safepoint_kind_(Safepoint::kSimple) {
40     PopulateDeoptimizationLiteralsWithInlinedFunctions();
41   }
42 
43 
LookupDestination(int block_id)44   int LookupDestination(int block_id) const {
45     return chunk()->LookupDestination(block_id);
46   }
47 
IsNextEmittedBlock(int block_id)48   bool IsNextEmittedBlock(int block_id) const {
49     return LookupDestination(block_id) == GetNextEmittedBlock();
50   }
51 
NeedsEagerFrame()52   bool NeedsEagerFrame() const {
53     return GetStackSlotCount() > 0 ||
54         info()->is_non_deferred_calling() ||
55         !info()->IsStub() ||
56         info()->requires_frame();
57   }
NeedsDeferredFrame()58   bool NeedsDeferredFrame() const {
59     return !NeedsEagerFrame() && info()->is_deferred_calling();
60   }
61 
GetLinkRegisterState()62   LinkRegisterStatus GetLinkRegisterState() const {
63     return frame_is_built_ ? kLRHasBeenSaved : kLRHasNotBeenSaved;
64   }
65 
66   // Support for converting LOperands to assembler types.
67   // LOperand must be a register.
68   Register ToRegister(LOperand* op) const;
69 
70   // LOperand is loaded into scratch, unless already a register.
71   Register EmitLoadRegister(LOperand* op, Register scratch);
72 
73   // LOperand must be a double register.
74   DwVfpRegister ToDoubleRegister(LOperand* op) const;
75 
76   // LOperand is loaded into dbl_scratch, unless already a double register.
77   DwVfpRegister EmitLoadDoubleRegister(LOperand* op,
78                                        SwVfpRegister flt_scratch,
79                                        DwVfpRegister dbl_scratch);
80   int32_t ToRepresentation(LConstantOperand* op, const Representation& r) const;
81   int32_t ToInteger32(LConstantOperand* op) const;
82   Smi* ToSmi(LConstantOperand* op) const;
83   double ToDouble(LConstantOperand* op) const;
84   Operand ToOperand(LOperand* op);
85   MemOperand ToMemOperand(LOperand* op) const;
86   // Returns a MemOperand pointing to the high word of a DoubleStackSlot.
87   MemOperand ToHighMemOperand(LOperand* op) const;
88 
89   bool IsInteger32(LConstantOperand* op) const;
90   bool IsSmi(LConstantOperand* op) const;
91   Handle<Object> ToHandle(LConstantOperand* op) const;
92 
93   // Try to generate code for the entire chunk, but it may fail if the
94   // chunk contains constructs we cannot handle. Returns true if the
95   // code generation attempt succeeded.
96   bool GenerateCode();
97 
98   // Finish the code by setting stack height, safepoint, and bailout
99   // information on it.
100   void FinishCode(Handle<Code> code);
101 
102   // Deferred code support.
103   void DoDeferredNumberTagD(LNumberTagD* instr);
104 
105   enum IntegerSignedness { SIGNED_INT32, UNSIGNED_INT32 };
106   void DoDeferredNumberTagIU(LInstruction* instr,
107                              LOperand* value,
108                              LOperand* temp1,
109                              LOperand* temp2,
110                              IntegerSignedness signedness);
111 
112   void DoDeferredTaggedToI(LTaggedToI* instr);
113   void DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr);
114   void DoDeferredStackCheck(LStackCheck* instr);
115   void DoDeferredStringCharCodeAt(LStringCharCodeAt* instr);
116   void DoDeferredStringCharFromCode(LStringCharFromCode* instr);
117   void DoDeferredAllocate(LAllocate* instr);
118   void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
119                                        Label* map_check, Label* bool_load);
120   void DoDeferredInstanceMigration(LCheckMaps* instr, Register object);
121   void DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr,
122                                    Register result,
123                                    Register object,
124                                    Register index);
125 
126   // Parallel move support.
127   void DoParallelMove(LParallelMove* move);
128   void DoGap(LGap* instr);
129 
130   MemOperand PrepareKeyedOperand(Register key,
131                                  Register base,
132                                  bool key_is_constant,
133                                  int constant_key,
134                                  int element_size,
135                                  int shift_size,
136                                  int base_offset);
137 
138   // Emit frame translation commands for an environment.
139   void WriteTranslation(LEnvironment* environment, Translation* translation);
140 
141   // Declare methods that deal with the individual node types.
142 #define DECLARE_DO(type) void Do##type(L##type* node);
LITHIUM_CONCRETE_INSTRUCTION_LIST(DECLARE_DO)143   LITHIUM_CONCRETE_INSTRUCTION_LIST(DECLARE_DO)
144 #undef DECLARE_DO
145 
146  private:
147   StrictMode strict_mode() const { return info()->strict_mode(); }
148 
scope()149   Scope* scope() const { return scope_; }
150 
scratch0()151   Register scratch0() { return r9; }
double_scratch0()152   LowDwVfpRegister double_scratch0() { return kScratchDoubleReg; }
153 
154   LInstruction* GetNextInstruction();
155 
156   void EmitClassOfTest(Label* if_true,
157                        Label* if_false,
158                        Handle<String> class_name,
159                        Register input,
160                        Register temporary,
161                        Register temporary2);
162 
GetStackSlotCount()163   int GetStackSlotCount() const { return chunk()->spill_slot_count(); }
164 
AddDeferredCode(LDeferredCode * code)165   void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); }
166 
167   void SaveCallerDoubles();
168   void RestoreCallerDoubles();
169 
170   // Code generation passes.  Returns true if code generation should
171   // continue.
172   void GenerateBodyInstructionPre(LInstruction* instr) OVERRIDE;
173   bool GeneratePrologue();
174   bool GenerateDeferredCode();
175   bool GenerateJumpTable();
176   bool GenerateSafepointTable();
177 
178   // Generates the custom OSR entrypoint and sets the osr_pc_offset.
179   void GenerateOsrPrologue();
180 
181   enum SafepointMode {
182     RECORD_SIMPLE_SAFEPOINT,
183     RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS
184   };
185 
186   int CallCodeSize(Handle<Code> code, RelocInfo::Mode mode);
187 
188   void CallCode(
189       Handle<Code> code,
190       RelocInfo::Mode mode,
191       LInstruction* instr,
192       TargetAddressStorageMode storage_mode = CAN_INLINE_TARGET_ADDRESS);
193 
194   void CallCodeGeneric(
195       Handle<Code> code,
196       RelocInfo::Mode mode,
197       LInstruction* instr,
198       SafepointMode safepoint_mode,
199       TargetAddressStorageMode storage_mode = CAN_INLINE_TARGET_ADDRESS);
200 
201   void CallRuntime(const Runtime::Function* function,
202                    int num_arguments,
203                    LInstruction* instr,
204                    SaveFPRegsMode save_doubles = kDontSaveFPRegs);
205 
CallRuntime(Runtime::FunctionId id,int num_arguments,LInstruction * instr)206   void CallRuntime(Runtime::FunctionId id,
207                    int num_arguments,
208                    LInstruction* instr) {
209     const Runtime::Function* function = Runtime::FunctionForId(id);
210     CallRuntime(function, num_arguments, instr);
211   }
212 
213   void LoadContextFromDeferred(LOperand* context);
214   void CallRuntimeFromDeferred(Runtime::FunctionId id,
215                                int argc,
216                                LInstruction* instr,
217                                LOperand* context);
218 
219   enum R1State {
220     R1_UNINITIALIZED,
221     R1_CONTAINS_TARGET
222   };
223 
224   // Generate a direct call to a known function.  Expects the function
225   // to be in r1.
226   void CallKnownFunction(Handle<JSFunction> function,
227                          int formal_parameter_count,
228                          int arity,
229                          LInstruction* instr,
230                          R1State r1_state);
231 
232   void RecordSafepointWithLazyDeopt(LInstruction* instr,
233                                     SafepointMode safepoint_mode);
234 
235   void RegisterEnvironmentForDeoptimization(LEnvironment* environment,
236                                             Safepoint::DeoptMode mode);
237   void DeoptimizeIf(Condition condition, LInstruction* instr,
238                     const char* detail, Deoptimizer::BailoutType bailout_type);
239   void DeoptimizeIf(Condition condition, LInstruction* instr,
240                     const char* detail = NULL);
241 
242   void AddToTranslation(LEnvironment* environment,
243                         Translation* translation,
244                         LOperand* op,
245                         bool is_tagged,
246                         bool is_uint32,
247                         int* object_index_pointer,
248                         int* dematerialized_index_pointer);
249   void PopulateDeoptimizationData(Handle<Code> code);
250   int DefineDeoptimizationLiteral(Handle<Object> literal);
251 
252   void PopulateDeoptimizationLiteralsWithInlinedFunctions();
253 
254   Register ToRegister(int index) const;
255   DwVfpRegister ToDoubleRegister(int index) const;
256 
257   MemOperand BuildSeqStringOperand(Register string,
258                                    LOperand* index,
259                                    String::Encoding encoding);
260 
261   void EmitIntegerMathAbs(LMathAbs* instr);
262 
263   // Support for recording safepoint and position information.
264   void RecordSafepoint(LPointerMap* pointers,
265                        Safepoint::Kind kind,
266                        int arguments,
267                        Safepoint::DeoptMode mode);
268   void RecordSafepoint(LPointerMap* pointers, Safepoint::DeoptMode mode);
269   void RecordSafepoint(Safepoint::DeoptMode mode);
270   void RecordSafepointWithRegisters(LPointerMap* pointers,
271                                     int arguments,
272                                     Safepoint::DeoptMode mode);
273 
274   void RecordAndWritePosition(int position) OVERRIDE;
275 
276   static Condition TokenToCondition(Token::Value op, bool is_unsigned);
277   void EmitGoto(int block);
278 
279   // EmitBranch expects to be the last instruction of a block.
280   template<class InstrType>
281   void EmitBranch(InstrType instr, Condition condition);
282   template<class InstrType>
283   void EmitFalseBranch(InstrType instr, Condition condition);
284   void EmitNumberUntagD(LNumberUntagD* instr, Register input,
285                         DwVfpRegister result, NumberUntagDMode mode);
286 
287   // Emits optimized code for typeof x == "y".  Modifies input register.
288   // Returns the condition on which a final split to
289   // true and false label should be made, to optimize fallthrough.
290   Condition EmitTypeofIs(Label* true_label,
291                          Label* false_label,
292                          Register input,
293                          Handle<String> type_name);
294 
295   // Emits optimized code for %_IsObject(x).  Preserves input register.
296   // Returns the condition on which a final split to
297   // true and false label should be made, to optimize fallthrough.
298   Condition EmitIsObject(Register input,
299                          Register temp1,
300                          Label* is_not_object,
301                          Label* is_object);
302 
303   // Emits optimized code for %_IsString(x).  Preserves input register.
304   // Returns the condition on which a final split to
305   // true and false label should be made, to optimize fallthrough.
306   Condition EmitIsString(Register input,
307                          Register temp1,
308                          Label* is_not_string,
309                          SmiCheck check_needed);
310 
311   // Emits optimized code for %_IsConstructCall().
312   // Caller should branch on equal condition.
313   void EmitIsConstructCall(Register temp1, Register temp2);
314 
315   // Emits optimized code to deep-copy the contents of statically known
316   // object graphs (e.g. object literal boilerplate).
317   void EmitDeepCopy(Handle<JSObject> object,
318                     Register result,
319                     Register source,
320                     int* offset,
321                     AllocationSiteMode mode);
322 
323   void EnsureSpaceForLazyDeopt(int space_needed) OVERRIDE;
324   void DoLoadKeyedExternalArray(LLoadKeyed* instr);
325   void DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr);
326   void DoLoadKeyedFixedArray(LLoadKeyed* instr);
327   void DoStoreKeyedExternalArray(LStoreKeyed* instr);
328   void DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr);
329   void DoStoreKeyedFixedArray(LStoreKeyed* instr);
330 
331   template <class T>
332   void EmitVectorLoadICRegisters(T* instr);
333 
334   ZoneList<LEnvironment*> deoptimizations_;
335   ZoneList<Deoptimizer::JumpTableEntry> jump_table_;
336   ZoneList<Handle<Object> > deoptimization_literals_;
337   int inlined_function_count_;
338   Scope* const scope_;
339   TranslationBuffer translations_;
340   ZoneList<LDeferredCode*> deferred_;
341   int osr_pc_offset_;
342   bool frame_is_built_;
343 
344   // Builder that keeps track of safepoints in the code. The table
345   // itself is emitted at the end of the generated code.
346   SafepointTableBuilder safepoints_;
347 
348   // Compiler from a set of parallel moves to a sequential list of moves.
349   LGapResolver resolver_;
350 
351   Safepoint::Kind expected_safepoint_kind_;
352 
353   class PushSafepointRegistersScope FINAL BASE_EMBEDDED {
354    public:
PushSafepointRegistersScope(LCodeGen * codegen)355     explicit PushSafepointRegistersScope(LCodeGen* codegen)
356         : codegen_(codegen) {
357       DCHECK(codegen_->info()->is_calling());
358       DCHECK(codegen_->expected_safepoint_kind_ == Safepoint::kSimple);
359       codegen_->expected_safepoint_kind_ = Safepoint::kWithRegisters;
360       codegen_->masm_->PushSafepointRegisters();
361     }
362 
~PushSafepointRegistersScope()363     ~PushSafepointRegistersScope() {
364       DCHECK(codegen_->expected_safepoint_kind_ == Safepoint::kWithRegisters);
365       codegen_->masm_->PopSafepointRegisters();
366       codegen_->expected_safepoint_kind_ = Safepoint::kSimple;
367     }
368 
369    private:
370     LCodeGen* codegen_;
371   };
372 
373   friend class LDeferredCode;
374   friend class LEnvironment;
375   friend class SafepointGenerator;
376   DISALLOW_COPY_AND_ASSIGN(LCodeGen);
377 };
378 
379 
380 class LDeferredCode : public ZoneObject {
381  public:
LDeferredCode(LCodeGen * codegen)382   explicit LDeferredCode(LCodeGen* codegen)
383       : codegen_(codegen),
384         external_exit_(NULL),
385         instruction_index_(codegen->current_instruction_) {
386     codegen->AddDeferredCode(this);
387   }
388 
~LDeferredCode()389   virtual ~LDeferredCode() {}
390   virtual void Generate() = 0;
391   virtual LInstruction* instr() = 0;
392 
SetExit(Label * exit)393   void SetExit(Label* exit) { external_exit_ = exit; }
entry()394   Label* entry() { return &entry_; }
exit()395   Label* exit() { return external_exit_ != NULL ? external_exit_ : &exit_; }
instruction_index()396   int instruction_index() const { return instruction_index_; }
397 
398  protected:
codegen()399   LCodeGen* codegen() const { return codegen_; }
masm()400   MacroAssembler* masm() const { return codegen_->masm(); }
401 
402  private:
403   LCodeGen* codegen_;
404   Label entry_;
405   Label exit_;
406   Label* external_exit_;
407   int instruction_index_;
408 };
409 
410 } }  // namespace v8::internal
411 
412 #endif  // V8_ARM_LITHIUM_CODEGEN_ARM_H_
413