1 // Copyright 2012 the V8 project authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style license that can be 3 // found in the LICENSE file. 4 5 #ifndef V8_MIPS_LITHIUM_CODEGEN_MIPS_H_ 6 #define V8_MIPS_LITHIUM_CODEGEN_MIPS_H_ 7 8 #include "src/deoptimizer.h" 9 #include "src/lithium-codegen.h" 10 #include "src/mips/lithium-gap-resolver-mips.h" 11 #include "src/mips/lithium-mips.h" 12 #include "src/safepoint-table.h" 13 #include "src/scopes.h" 14 #include "src/utils.h" 15 16 namespace v8 { 17 namespace internal { 18 19 // Forward declarations. 20 class LDeferredCode; 21 class SafepointGenerator; 22 23 class LCodeGen: public LCodeGenBase { 24 public: LCodeGen(LChunk * chunk,MacroAssembler * assembler,CompilationInfo * info)25 LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info) 26 : LCodeGenBase(chunk, assembler, info), 27 deoptimizations_(4, info->zone()), 28 jump_table_(4, info->zone()), 29 deoptimization_literals_(8, info->zone()), 30 inlined_function_count_(0), 31 scope_(info->scope()), 32 translations_(info->zone()), 33 deferred_(8, info->zone()), 34 osr_pc_offset_(-1), 35 frame_is_built_(false), 36 safepoints_(info->zone()), 37 resolver_(this), 38 expected_safepoint_kind_(Safepoint::kSimple) { 39 PopulateDeoptimizationLiteralsWithInlinedFunctions(); 40 } 41 42 LookupDestination(int block_id)43 int LookupDestination(int block_id) const { 44 return chunk()->LookupDestination(block_id); 45 } 46 IsNextEmittedBlock(int block_id)47 bool IsNextEmittedBlock(int block_id) const { 48 return LookupDestination(block_id) == GetNextEmittedBlock(); 49 } 50 NeedsEagerFrame()51 bool NeedsEagerFrame() const { 52 return GetStackSlotCount() > 0 || 53 info()->is_non_deferred_calling() || 54 !info()->IsStub() || 55 info()->requires_frame(); 56 } NeedsDeferredFrame()57 bool NeedsDeferredFrame() const { 58 return !NeedsEagerFrame() && info()->is_deferred_calling(); 59 } 60 GetRAState()61 RAStatus GetRAState() const { 62 return frame_is_built_ ? kRAHasBeenSaved : kRAHasNotBeenSaved; 63 } 64 65 // Support for converting LOperands to assembler types. 66 // LOperand must be a register. 67 Register ToRegister(LOperand* op) const; 68 69 // LOperand is loaded into scratch, unless already a register. 70 Register EmitLoadRegister(LOperand* op, Register scratch); 71 72 // LOperand must be a double register. 73 DoubleRegister ToDoubleRegister(LOperand* op) const; 74 75 // LOperand is loaded into dbl_scratch, unless already a double register. 76 DoubleRegister EmitLoadDoubleRegister(LOperand* op, 77 FloatRegister flt_scratch, 78 DoubleRegister dbl_scratch); 79 int32_t ToRepresentation(LConstantOperand* op, const Representation& r) const; 80 int32_t ToInteger32(LConstantOperand* op) const; 81 Smi* ToSmi(LConstantOperand* op) const; 82 double ToDouble(LConstantOperand* op) const; 83 Operand ToOperand(LOperand* op); 84 MemOperand ToMemOperand(LOperand* op) const; 85 // Returns a MemOperand pointing to the high word of a DoubleStackSlot. 86 MemOperand ToHighMemOperand(LOperand* op) const; 87 88 bool IsInteger32(LConstantOperand* op) const; 89 bool IsSmi(LConstantOperand* op) const; 90 Handle<Object> ToHandle(LConstantOperand* op) const; 91 92 // Try to generate code for the entire chunk, but it may fail if the 93 // chunk contains constructs we cannot handle. Returns true if the 94 // code generation attempt succeeded. 95 bool GenerateCode(); 96 97 // Finish the code by setting stack height, safepoint, and bailout 98 // information on it. 99 void FinishCode(Handle<Code> code); 100 101 void DoDeferredNumberTagD(LNumberTagD* instr); 102 103 enum IntegerSignedness { SIGNED_INT32, UNSIGNED_INT32 }; 104 void DoDeferredNumberTagIU(LInstruction* instr, 105 LOperand* value, 106 LOperand* temp1, 107 LOperand* temp2, 108 IntegerSignedness signedness); 109 110 void DoDeferredTaggedToI(LTaggedToI* instr); 111 void DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr); 112 void DoDeferredStackCheck(LStackCheck* instr); 113 void DoDeferredStringCharCodeAt(LStringCharCodeAt* instr); 114 void DoDeferredStringCharFromCode(LStringCharFromCode* instr); 115 void DoDeferredAllocate(LAllocate* instr); 116 void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr, 117 Label* map_check); 118 119 void DoDeferredInstanceMigration(LCheckMaps* instr, Register object); 120 void DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr, 121 Register result, 122 Register object, 123 Register index); 124 125 // Parallel move support. 126 void DoParallelMove(LParallelMove* move); 127 void DoGap(LGap* instr); 128 129 MemOperand PrepareKeyedOperand(Register key, 130 Register base, 131 bool key_is_constant, 132 int constant_key, 133 int element_size, 134 int shift_size, 135 int base_offset); 136 137 // Emit frame translation commands for an environment. 138 void WriteTranslation(LEnvironment* environment, Translation* translation); 139 140 // Declare methods that deal with the individual node types. 141 #define DECLARE_DO(type) void Do##type(L##type* node); LITHIUM_CONCRETE_INSTRUCTION_LIST(DECLARE_DO)142 LITHIUM_CONCRETE_INSTRUCTION_LIST(DECLARE_DO) 143 #undef DECLARE_DO 144 145 private: 146 StrictMode strict_mode() const { return info()->strict_mode(); } 147 scope()148 Scope* scope() const { return scope_; } 149 scratch0()150 Register scratch0() { return kLithiumScratchReg; } scratch1()151 Register scratch1() { return kLithiumScratchReg2; } double_scratch0()152 DoubleRegister double_scratch0() { return kLithiumScratchDouble; } 153 154 LInstruction* GetNextInstruction(); 155 156 void EmitClassOfTest(Label* if_true, 157 Label* if_false, 158 Handle<String> class_name, 159 Register input, 160 Register temporary, 161 Register temporary2); 162 GetStackSlotCount()163 int GetStackSlotCount() const { return chunk()->spill_slot_count(); } 164 AddDeferredCode(LDeferredCode * code)165 void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); } 166 167 void SaveCallerDoubles(); 168 void RestoreCallerDoubles(); 169 170 // Code generation passes. Returns true if code generation should 171 // continue. 172 void GenerateBodyInstructionPre(LInstruction* instr) OVERRIDE; 173 bool GeneratePrologue(); 174 bool GenerateDeferredCode(); 175 bool GenerateJumpTable(); 176 bool GenerateSafepointTable(); 177 178 // Generates the custom OSR entrypoint and sets the osr_pc_offset. 179 void GenerateOsrPrologue(); 180 181 enum SafepointMode { 182 RECORD_SIMPLE_SAFEPOINT, 183 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS 184 }; 185 186 void CallCode(Handle<Code> code, 187 RelocInfo::Mode mode, 188 LInstruction* instr); 189 190 void CallCodeGeneric(Handle<Code> code, 191 RelocInfo::Mode mode, 192 LInstruction* instr, 193 SafepointMode safepoint_mode); 194 195 void CallRuntime(const Runtime::Function* function, 196 int num_arguments, 197 LInstruction* instr, 198 SaveFPRegsMode save_doubles = kDontSaveFPRegs); 199 CallRuntime(Runtime::FunctionId id,int num_arguments,LInstruction * instr)200 void CallRuntime(Runtime::FunctionId id, 201 int num_arguments, 202 LInstruction* instr) { 203 const Runtime::Function* function = Runtime::FunctionForId(id); 204 CallRuntime(function, num_arguments, instr); 205 } 206 207 void LoadContextFromDeferred(LOperand* context); 208 void CallRuntimeFromDeferred(Runtime::FunctionId id, 209 int argc, 210 LInstruction* instr, 211 LOperand* context); 212 213 enum A1State { 214 A1_UNINITIALIZED, 215 A1_CONTAINS_TARGET 216 }; 217 218 // Generate a direct call to a known function. Expects the function 219 // to be in a1. 220 void CallKnownFunction(Handle<JSFunction> function, 221 int formal_parameter_count, 222 int arity, 223 LInstruction* instr, 224 A1State a1_state); 225 226 void RecordSafepointWithLazyDeopt(LInstruction* instr, 227 SafepointMode safepoint_mode); 228 229 void RegisterEnvironmentForDeoptimization(LEnvironment* environment, 230 Safepoint::DeoptMode mode); 231 void DeoptimizeIf(Condition condition, LInstruction* instr, 232 Deoptimizer::BailoutType bailout_type, 233 Register src1 = zero_reg, 234 const Operand& src2 = Operand(zero_reg), 235 const char* detail = NULL); 236 void DeoptimizeIf(Condition condition, LInstruction* instr, 237 Register src1 = zero_reg, 238 const Operand& src2 = Operand(zero_reg), 239 const char* detail = NULL); 240 241 void AddToTranslation(LEnvironment* environment, 242 Translation* translation, 243 LOperand* op, 244 bool is_tagged, 245 bool is_uint32, 246 int* object_index_pointer, 247 int* dematerialized_index_pointer); 248 void PopulateDeoptimizationData(Handle<Code> code); 249 int DefineDeoptimizationLiteral(Handle<Object> literal); 250 251 void PopulateDeoptimizationLiteralsWithInlinedFunctions(); 252 253 Register ToRegister(int index) const; 254 DoubleRegister ToDoubleRegister(int index) const; 255 256 MemOperand BuildSeqStringOperand(Register string, 257 LOperand* index, 258 String::Encoding encoding); 259 260 void EmitIntegerMathAbs(LMathAbs* instr); 261 262 // Support for recording safepoint and position information. 263 void RecordSafepoint(LPointerMap* pointers, 264 Safepoint::Kind kind, 265 int arguments, 266 Safepoint::DeoptMode mode); 267 void RecordSafepoint(LPointerMap* pointers, Safepoint::DeoptMode mode); 268 void RecordSafepoint(Safepoint::DeoptMode mode); 269 void RecordSafepointWithRegisters(LPointerMap* pointers, 270 int arguments, 271 Safepoint::DeoptMode mode); 272 273 void RecordAndWritePosition(int position) OVERRIDE; 274 275 static Condition TokenToCondition(Token::Value op, bool is_unsigned); 276 void EmitGoto(int block); 277 278 // EmitBranch expects to be the last instruction of a block. 279 template<class InstrType> 280 void EmitBranch(InstrType instr, 281 Condition condition, 282 Register src1, 283 const Operand& src2); 284 template<class InstrType> 285 void EmitBranchF(InstrType instr, 286 Condition condition, 287 FPURegister src1, 288 FPURegister src2); 289 template<class InstrType> 290 void EmitFalseBranch(InstrType instr, 291 Condition condition, 292 Register src1, 293 const Operand& src2); 294 template<class InstrType> 295 void EmitFalseBranchF(InstrType instr, 296 Condition condition, 297 FPURegister src1, 298 FPURegister src2); 299 void EmitCmpI(LOperand* left, LOperand* right); 300 void EmitNumberUntagD(LNumberUntagD* instr, Register input, 301 DoubleRegister result, NumberUntagDMode mode); 302 303 // Emits optimized code for typeof x == "y". Modifies input register. 304 // Returns the condition on which a final split to 305 // true and false label should be made, to optimize fallthrough. 306 // Returns two registers in cmp1 and cmp2 that can be used in the 307 // Branch instruction after EmitTypeofIs. 308 Condition EmitTypeofIs(Label* true_label, 309 Label* false_label, 310 Register input, 311 Handle<String> type_name, 312 Register* cmp1, 313 Operand* cmp2); 314 315 // Emits optimized code for %_IsObject(x). Preserves input register. 316 // Returns the condition on which a final split to 317 // true and false label should be made, to optimize fallthrough. 318 Condition EmitIsObject(Register input, 319 Register temp1, 320 Register temp2, 321 Label* is_not_object, 322 Label* is_object); 323 324 // Emits optimized code for %_IsString(x). Preserves input register. 325 // Returns the condition on which a final split to 326 // true and false label should be made, to optimize fallthrough. 327 Condition EmitIsString(Register input, 328 Register temp1, 329 Label* is_not_string, 330 SmiCheck check_needed); 331 332 // Emits optimized code for %_IsConstructCall(). 333 // Caller should branch on equal condition. 334 void EmitIsConstructCall(Register temp1, Register temp2); 335 336 // Emits optimized code to deep-copy the contents of statically known 337 // object graphs (e.g. object literal boilerplate). 338 void EmitDeepCopy(Handle<JSObject> object, 339 Register result, 340 Register source, 341 int* offset, 342 AllocationSiteMode mode); 343 // Emit optimized code for integer division. 344 // Inputs are signed. 345 // All registers are clobbered. 346 // If 'remainder' is no_reg, it is not computed. 347 void EmitSignedIntegerDivisionByConstant(Register result, 348 Register dividend, 349 int32_t divisor, 350 Register remainder, 351 Register scratch, 352 LEnvironment* environment); 353 354 355 void EnsureSpaceForLazyDeopt(int space_needed) OVERRIDE; 356 void DoLoadKeyedExternalArray(LLoadKeyed* instr); 357 void DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr); 358 void DoLoadKeyedFixedArray(LLoadKeyed* instr); 359 void DoStoreKeyedExternalArray(LStoreKeyed* instr); 360 void DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr); 361 void DoStoreKeyedFixedArray(LStoreKeyed* instr); 362 363 template <class T> 364 void EmitVectorLoadICRegisters(T* instr); 365 366 ZoneList<LEnvironment*> deoptimizations_; 367 ZoneList<Deoptimizer::JumpTableEntry> jump_table_; 368 ZoneList<Handle<Object> > deoptimization_literals_; 369 int inlined_function_count_; 370 Scope* const scope_; 371 TranslationBuffer translations_; 372 ZoneList<LDeferredCode*> deferred_; 373 int osr_pc_offset_; 374 bool frame_is_built_; 375 376 // Builder that keeps track of safepoints in the code. The table 377 // itself is emitted at the end of the generated code. 378 SafepointTableBuilder safepoints_; 379 380 // Compiler from a set of parallel moves to a sequential list of moves. 381 LGapResolver resolver_; 382 383 Safepoint::Kind expected_safepoint_kind_; 384 385 class PushSafepointRegistersScope FINAL BASE_EMBEDDED { 386 public: PushSafepointRegistersScope(LCodeGen * codegen)387 explicit PushSafepointRegistersScope(LCodeGen* codegen) 388 : codegen_(codegen) { 389 DCHECK(codegen_->info()->is_calling()); 390 DCHECK(codegen_->expected_safepoint_kind_ == Safepoint::kSimple); 391 codegen_->expected_safepoint_kind_ = Safepoint::kWithRegisters; 392 393 StoreRegistersStateStub stub(codegen_->isolate()); 394 codegen_->masm_->push(ra); 395 codegen_->masm_->CallStub(&stub); 396 } 397 ~PushSafepointRegistersScope()398 ~PushSafepointRegistersScope() { 399 DCHECK(codegen_->expected_safepoint_kind_ == Safepoint::kWithRegisters); 400 RestoreRegistersStateStub stub(codegen_->isolate()); 401 codegen_->masm_->push(ra); 402 codegen_->masm_->CallStub(&stub); 403 codegen_->expected_safepoint_kind_ = Safepoint::kSimple; 404 } 405 406 private: 407 LCodeGen* codegen_; 408 }; 409 410 friend class LDeferredCode; 411 friend class LEnvironment; 412 friend class SafepointGenerator; 413 DISALLOW_COPY_AND_ASSIGN(LCodeGen); 414 }; 415 416 417 class LDeferredCode : public ZoneObject { 418 public: LDeferredCode(LCodeGen * codegen)419 explicit LDeferredCode(LCodeGen* codegen) 420 : codegen_(codegen), 421 external_exit_(NULL), 422 instruction_index_(codegen->current_instruction_) { 423 codegen->AddDeferredCode(this); 424 } 425 ~LDeferredCode()426 virtual ~LDeferredCode() {} 427 virtual void Generate() = 0; 428 virtual LInstruction* instr() = 0; 429 SetExit(Label * exit)430 void SetExit(Label* exit) { external_exit_ = exit; } entry()431 Label* entry() { return &entry_; } exit()432 Label* exit() { return external_exit_ != NULL ? external_exit_ : &exit_; } instruction_index()433 int instruction_index() const { return instruction_index_; } 434 435 protected: codegen()436 LCodeGen* codegen() const { return codegen_; } masm()437 MacroAssembler* masm() const { return codegen_->masm(); } 438 439 private: 440 LCodeGen* codegen_; 441 Label entry_; 442 Label exit_; 443 Label* external_exit_; 444 int instruction_index_; 445 }; 446 447 } } // namespace v8::internal 448 449 #endif // V8_MIPS_LITHIUM_CODEGEN_MIPS_H_ 450