1 // Copyright 2012 the V8 project authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style license that can be 3 // found in the LICENSE file. 4 5 #ifndef V8_CRANKSHAFT_MIPS_LITHIUM_CODEGEN_MIPS_H_ 6 #define V8_CRANKSHAFT_MIPS_LITHIUM_CODEGEN_MIPS_H_ 7 8 #include "src/ast/scopes.h" 9 #include "src/crankshaft/lithium-codegen.h" 10 #include "src/crankshaft/mips/lithium-gap-resolver-mips.h" 11 #include "src/crankshaft/mips/lithium-mips.h" 12 #include "src/deoptimizer.h" 13 #include "src/safepoint-table.h" 14 #include "src/utils.h" 15 16 namespace v8 { 17 namespace internal { 18 19 // Forward declarations. 20 class LDeferredCode; 21 class SafepointGenerator; 22 23 class LCodeGen: public LCodeGenBase { 24 public: LCodeGen(LChunk * chunk,MacroAssembler * assembler,CompilationInfo * info)25 LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info) 26 : LCodeGenBase(chunk, assembler, info), 27 jump_table_(4, info->zone()), 28 scope_(info->scope()), 29 deferred_(8, info->zone()), 30 frame_is_built_(false), 31 safepoints_(info->zone()), 32 resolver_(this), 33 expected_safepoint_kind_(Safepoint::kSimple) { 34 PopulateDeoptimizationLiteralsWithInlinedFunctions(); 35 } 36 37 LookupDestination(int block_id)38 int LookupDestination(int block_id) const { 39 return chunk()->LookupDestination(block_id); 40 } 41 IsNextEmittedBlock(int block_id)42 bool IsNextEmittedBlock(int block_id) const { 43 return LookupDestination(block_id) == GetNextEmittedBlock(); 44 } 45 NeedsEagerFrame()46 bool NeedsEagerFrame() const { 47 return HasAllocatedStackSlots() || info()->is_non_deferred_calling() || 48 !info()->IsStub() || info()->requires_frame(); 49 } NeedsDeferredFrame()50 bool NeedsDeferredFrame() const { 51 return !NeedsEagerFrame() && info()->is_deferred_calling(); 52 } 53 GetRAState()54 RAStatus GetRAState() const { 55 return frame_is_built_ ? kRAHasBeenSaved : kRAHasNotBeenSaved; 56 } 57 58 // Support for converting LOperands to assembler types. 59 // LOperand must be a register. 60 Register ToRegister(LOperand* op) const; 61 62 // LOperand is loaded into scratch, unless already a register. 63 Register EmitLoadRegister(LOperand* op, Register scratch); 64 65 // LOperand must be a double register. 66 DoubleRegister ToDoubleRegister(LOperand* op) const; 67 68 // LOperand is loaded into dbl_scratch, unless already a double register. 69 DoubleRegister EmitLoadDoubleRegister(LOperand* op, 70 FloatRegister flt_scratch, 71 DoubleRegister dbl_scratch); 72 int32_t ToRepresentation(LConstantOperand* op, const Representation& r) const; 73 int32_t ToInteger32(LConstantOperand* op) const; 74 Smi* ToSmi(LConstantOperand* op) const; 75 double ToDouble(LConstantOperand* op) const; 76 Operand ToOperand(LOperand* op); 77 MemOperand ToMemOperand(LOperand* op) const; 78 // Returns a MemOperand pointing to the high word of a DoubleStackSlot. 79 MemOperand ToHighMemOperand(LOperand* op) const; 80 81 bool IsInteger32(LConstantOperand* op) const; 82 bool IsSmi(LConstantOperand* op) const; 83 Handle<Object> ToHandle(LConstantOperand* op) const; 84 85 // Try to generate code for the entire chunk, but it may fail if the 86 // chunk contains constructs we cannot handle. Returns true if the 87 // code generation attempt succeeded. 88 bool GenerateCode(); 89 90 // Finish the code by setting stack height, safepoint, and bailout 91 // information on it. 92 void FinishCode(Handle<Code> code); 93 94 void DoDeferredNumberTagD(LNumberTagD* instr); 95 96 enum IntegerSignedness { SIGNED_INT32, UNSIGNED_INT32 }; 97 void DoDeferredNumberTagIU(LInstruction* instr, 98 LOperand* value, 99 LOperand* temp1, 100 LOperand* temp2, 101 IntegerSignedness signedness); 102 103 void DoDeferredTaggedToI(LTaggedToI* instr); 104 void DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr); 105 void DoDeferredStackCheck(LStackCheck* instr); 106 void DoDeferredMaybeGrowElements(LMaybeGrowElements* instr); 107 void DoDeferredStringCharCodeAt(LStringCharCodeAt* instr); 108 void DoDeferredStringCharFromCode(LStringCharFromCode* instr); 109 void DoDeferredAllocate(LAllocate* instr); 110 void DoDeferredInstanceMigration(LCheckMaps* instr, Register object); 111 void DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr, 112 Register result, 113 Register object, 114 Register index); 115 116 // Parallel move support. 117 void DoParallelMove(LParallelMove* move); 118 void DoGap(LGap* instr); 119 120 MemOperand PrepareKeyedOperand(Register key, 121 Register base, 122 bool key_is_constant, 123 int constant_key, 124 int element_size, 125 int shift_size, 126 int base_offset); 127 128 // Emit frame translation commands for an environment. 129 void WriteTranslation(LEnvironment* environment, Translation* translation); 130 131 // Declare methods that deal with the individual node types. 132 #define DECLARE_DO(type) void Do##type(L##type* node); LITHIUM_CONCRETE_INSTRUCTION_LIST(DECLARE_DO)133 LITHIUM_CONCRETE_INSTRUCTION_LIST(DECLARE_DO) 134 #undef DECLARE_DO 135 136 private: 137 Scope* scope() const { return scope_; } 138 scratch0()139 Register scratch0() { return kLithiumScratchReg; } scratch1()140 Register scratch1() { return kLithiumScratchReg2; } double_scratch0()141 DoubleRegister double_scratch0() { return kLithiumScratchDouble; } 142 143 LInstruction* GetNextInstruction(); 144 145 void EmitClassOfTest(Label* if_true, 146 Label* if_false, 147 Handle<String> class_name, 148 Register input, 149 Register temporary, 150 Register temporary2); 151 HasAllocatedStackSlots()152 bool HasAllocatedStackSlots() const { 153 return chunk()->HasAllocatedStackSlots(); 154 } GetStackSlotCount()155 int GetStackSlotCount() const { return chunk()->GetSpillSlotCount(); } GetTotalFrameSlotCount()156 int GetTotalFrameSlotCount() const { 157 return chunk()->GetTotalFrameSlotCount(); 158 } 159 AddDeferredCode(LDeferredCode * code)160 void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); } 161 162 void SaveCallerDoubles(); 163 void RestoreCallerDoubles(); 164 165 // Code generation passes. Returns true if code generation should 166 // continue. 167 void GenerateBodyInstructionPre(LInstruction* instr) override; 168 bool GeneratePrologue(); 169 bool GenerateDeferredCode(); 170 bool GenerateJumpTable(); 171 bool GenerateSafepointTable(); 172 173 // Generates the custom OSR entrypoint and sets the osr_pc_offset. 174 void GenerateOsrPrologue(); 175 176 enum SafepointMode { 177 RECORD_SIMPLE_SAFEPOINT, 178 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS 179 }; 180 181 void CallCode(Handle<Code> code, 182 RelocInfo::Mode mode, 183 LInstruction* instr); 184 185 void CallCodeGeneric(Handle<Code> code, 186 RelocInfo::Mode mode, 187 LInstruction* instr, 188 SafepointMode safepoint_mode); 189 190 void CallRuntime(const Runtime::Function* function, 191 int num_arguments, 192 LInstruction* instr, 193 SaveFPRegsMode save_doubles = kDontSaveFPRegs); 194 CallRuntime(Runtime::FunctionId id,int num_arguments,LInstruction * instr)195 void CallRuntime(Runtime::FunctionId id, 196 int num_arguments, 197 LInstruction* instr) { 198 const Runtime::Function* function = Runtime::FunctionForId(id); 199 CallRuntime(function, num_arguments, instr); 200 } 201 CallRuntime(Runtime::FunctionId id,LInstruction * instr)202 void CallRuntime(Runtime::FunctionId id, LInstruction* instr) { 203 const Runtime::Function* function = Runtime::FunctionForId(id); 204 CallRuntime(function, function->nargs, instr); 205 } 206 207 void LoadContextFromDeferred(LOperand* context); 208 void CallRuntimeFromDeferred(Runtime::FunctionId id, 209 int argc, 210 LInstruction* instr, 211 LOperand* context); 212 213 void PrepareForTailCall(const ParameterCount& actual, Register scratch1, 214 Register scratch2, Register scratch3); 215 216 // Generate a direct call to a known function. Expects the function 217 // to be in a1. 218 void CallKnownFunction(Handle<JSFunction> function, 219 int formal_parameter_count, int arity, 220 bool is_tail_call, LInstruction* instr); 221 222 void RecordSafepointWithLazyDeopt(LInstruction* instr, 223 SafepointMode safepoint_mode); 224 225 void RegisterEnvironmentForDeoptimization(LEnvironment* environment, 226 Safepoint::DeoptMode mode); 227 void DeoptimizeIf(Condition condition, LInstruction* instr, 228 DeoptimizeReason deopt_reason, 229 Deoptimizer::BailoutType bailout_type, 230 Register src1 = zero_reg, 231 const Operand& src2 = Operand(zero_reg)); 232 void DeoptimizeIf(Condition condition, LInstruction* instr, 233 DeoptimizeReason deopt_reason = DeoptimizeReason::kNoReason, 234 Register src1 = zero_reg, 235 const Operand& src2 = Operand(zero_reg)); 236 237 void AddToTranslation(LEnvironment* environment, 238 Translation* translation, 239 LOperand* op, 240 bool is_tagged, 241 bool is_uint32, 242 int* object_index_pointer, 243 int* dematerialized_index_pointer); 244 245 Register ToRegister(int index) const; 246 DoubleRegister ToDoubleRegister(int index) const; 247 248 MemOperand BuildSeqStringOperand(Register string, 249 LOperand* index, 250 String::Encoding encoding); 251 252 void EmitIntegerMathAbs(LMathAbs* instr); 253 254 // Support for recording safepoint information. 255 void RecordSafepoint(LPointerMap* pointers, 256 Safepoint::Kind kind, 257 int arguments, 258 Safepoint::DeoptMode mode); 259 void RecordSafepoint(LPointerMap* pointers, Safepoint::DeoptMode mode); 260 void RecordSafepoint(Safepoint::DeoptMode mode); 261 void RecordSafepointWithRegisters(LPointerMap* pointers, 262 int arguments, 263 Safepoint::DeoptMode mode); 264 265 static Condition TokenToCondition(Token::Value op, bool is_unsigned); 266 void EmitGoto(int block); 267 268 // EmitBranch expects to be the last instruction of a block. 269 template<class InstrType> 270 void EmitBranch(InstrType instr, 271 Condition condition, 272 Register src1, 273 const Operand& src2); 274 template<class InstrType> 275 void EmitBranchF(InstrType instr, 276 Condition condition, 277 FPURegister src1, 278 FPURegister src2); 279 template <class InstrType> 280 void EmitTrueBranch(InstrType instr, Condition condition, Register src1, 281 const Operand& src2); 282 template <class InstrType> 283 void EmitFalseBranch(InstrType instr, Condition condition, Register src1, 284 const Operand& src2); 285 template<class InstrType> 286 void EmitFalseBranchF(InstrType instr, 287 Condition condition, 288 FPURegister src1, 289 FPURegister src2); 290 void EmitCmpI(LOperand* left, LOperand* right); 291 void EmitNumberUntagD(LNumberUntagD* instr, Register input, 292 DoubleRegister result, NumberUntagDMode mode); 293 294 // Emits optimized code for typeof x == "y". Modifies input register. 295 // Returns the condition on which a final split to 296 // true and false label should be made, to optimize fallthrough. 297 // Returns two registers in cmp1 and cmp2 that can be used in the 298 // Branch instruction after EmitTypeofIs. 299 Condition EmitTypeofIs(Label* true_label, 300 Label* false_label, 301 Register input, 302 Handle<String> type_name, 303 Register* cmp1, 304 Operand* cmp2); 305 306 // Emits optimized code for %_IsString(x). Preserves input register. 307 // Returns the condition on which a final split to 308 // true and false label should be made, to optimize fallthrough. 309 Condition EmitIsString(Register input, 310 Register temp1, 311 Label* is_not_string, 312 SmiCheck check_needed); 313 314 // Emits optimized code to deep-copy the contents of statically known 315 // object graphs (e.g. object literal boilerplate). 316 void EmitDeepCopy(Handle<JSObject> object, 317 Register result, 318 Register source, 319 int* offset, 320 AllocationSiteMode mode); 321 // Emit optimized code for integer division. 322 // Inputs are signed. 323 // All registers are clobbered. 324 // If 'remainder' is no_reg, it is not computed. 325 void EmitSignedIntegerDivisionByConstant(Register result, 326 Register dividend, 327 int32_t divisor, 328 Register remainder, 329 Register scratch, 330 LEnvironment* environment); 331 332 333 void EnsureSpaceForLazyDeopt(int space_needed) override; 334 void DoLoadKeyedExternalArray(LLoadKeyed* instr); 335 void DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr); 336 void DoLoadKeyedFixedArray(LLoadKeyed* instr); 337 void DoStoreKeyedExternalArray(LStoreKeyed* instr); 338 void DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr); 339 void DoStoreKeyedFixedArray(LStoreKeyed* instr); 340 341 template <class T> 342 void EmitVectorLoadICRegisters(T* instr); 343 344 ZoneList<Deoptimizer::JumpTableEntry> jump_table_; 345 Scope* const scope_; 346 ZoneList<LDeferredCode*> deferred_; 347 bool frame_is_built_; 348 349 // Builder that keeps track of safepoints in the code. The table 350 // itself is emitted at the end of the generated code. 351 SafepointTableBuilder safepoints_; 352 353 // Compiler from a set of parallel moves to a sequential list of moves. 354 LGapResolver resolver_; 355 356 Safepoint::Kind expected_safepoint_kind_; 357 358 class PushSafepointRegistersScope final BASE_EMBEDDED { 359 public: 360 explicit PushSafepointRegistersScope(LCodeGen* codegen); 361 362 ~PushSafepointRegistersScope(); 363 364 private: 365 LCodeGen* codegen_; 366 }; 367 368 friend class LDeferredCode; 369 friend class LEnvironment; 370 friend class SafepointGenerator; 371 DISALLOW_COPY_AND_ASSIGN(LCodeGen); 372 }; 373 374 375 class LDeferredCode : public ZoneObject { 376 public: LDeferredCode(LCodeGen * codegen)377 explicit LDeferredCode(LCodeGen* codegen) 378 : codegen_(codegen), 379 external_exit_(NULL), 380 instruction_index_(codegen->current_instruction_) { 381 codegen->AddDeferredCode(this); 382 } 383 ~LDeferredCode()384 virtual ~LDeferredCode() {} 385 virtual void Generate() = 0; 386 virtual LInstruction* instr() = 0; 387 SetExit(Label * exit)388 void SetExit(Label* exit) { external_exit_ = exit; } entry()389 Label* entry() { return &entry_; } exit()390 Label* exit() { return external_exit_ != NULL ? external_exit_ : &exit_; } instruction_index()391 int instruction_index() const { return instruction_index_; } 392 393 protected: codegen()394 LCodeGen* codegen() const { return codegen_; } masm()395 MacroAssembler* masm() const { return codegen_->masm(); } 396 397 private: 398 LCodeGen* codegen_; 399 Label entry_; 400 Label exit_; 401 Label* external_exit_; 402 int instruction_index_; 403 }; 404 405 } // namespace internal 406 } // namespace v8 407 408 #endif // V8_CRANKSHAFT_MIPS_LITHIUM_CODEGEN_MIPS_H_ 409