1 // Copyright 2012 the V8 project authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style license that can be 3 // found in the LICENSE file. 4 5 #ifndef V8_CRANKSHAFT_IA32_LITHIUM_CODEGEN_IA32_H_ 6 #define V8_CRANKSHAFT_IA32_LITHIUM_CODEGEN_IA32_H_ 7 8 #include "src/ast/scopes.h" 9 #include "src/base/logging.h" 10 #include "src/crankshaft/ia32/lithium-gap-resolver-ia32.h" 11 #include "src/crankshaft/ia32/lithium-ia32.h" 12 #include "src/crankshaft/lithium-codegen.h" 13 #include "src/deoptimizer.h" 14 #include "src/safepoint-table.h" 15 #include "src/utils.h" 16 17 namespace v8 { 18 namespace internal { 19 20 // Forward declarations. 21 class LDeferredCode; 22 class LGapNode; 23 class SafepointGenerator; 24 25 class LCodeGen: public LCodeGenBase { 26 public: LCodeGen(LChunk * chunk,MacroAssembler * assembler,CompilationInfo * info)27 LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info) 28 : LCodeGenBase(chunk, assembler, info), 29 jump_table_(4, info->zone()), 30 scope_(info->scope()), 31 deferred_(8, info->zone()), 32 frame_is_built_(false), 33 safepoints_(info->zone()), 34 resolver_(this), 35 expected_safepoint_kind_(Safepoint::kSimple) { 36 PopulateDeoptimizationLiteralsWithInlinedFunctions(); 37 } 38 LookupDestination(int block_id)39 int LookupDestination(int block_id) const { 40 return chunk()->LookupDestination(block_id); 41 } 42 IsNextEmittedBlock(int block_id)43 bool IsNextEmittedBlock(int block_id) const { 44 return LookupDestination(block_id) == GetNextEmittedBlock(); 45 } 46 NeedsEagerFrame()47 bool NeedsEagerFrame() const { 48 return HasAllocatedStackSlots() || info()->is_non_deferred_calling() || 49 !info()->IsStub() || info()->requires_frame(); 50 } NeedsDeferredFrame()51 bool NeedsDeferredFrame() const { 52 return !NeedsEagerFrame() && info()->is_deferred_calling(); 53 } 54 55 // Support for converting LOperands to assembler types. 56 Operand ToOperand(LOperand* op) const; 57 Register ToRegister(LOperand* op) const; 58 XMMRegister ToDoubleRegister(LOperand* op) const; 59 60 bool IsInteger32(LConstantOperand* op) const; 61 bool IsSmi(LConstantOperand* op) const; ToImmediate(LOperand * op,const Representation & r)62 Immediate ToImmediate(LOperand* op, const Representation& r) const { 63 return Immediate(ToRepresentation(LConstantOperand::cast(op), r)); 64 } 65 double ToDouble(LConstantOperand* op) const; 66 67 Handle<Object> ToHandle(LConstantOperand* op) const; 68 69 // The operand denoting the second word (the one with a higher address) of 70 // a double stack slot. 71 Operand HighOperand(LOperand* op); 72 73 // Try to generate code for the entire chunk, but it may fail if the 74 // chunk contains constructs we cannot handle. Returns true if the 75 // code generation attempt succeeded. 76 bool GenerateCode(); 77 78 // Finish the code by setting stack height, safepoint, and bailout 79 // information on it. 80 void FinishCode(Handle<Code> code); 81 82 // Deferred code support. 83 void DoDeferredNumberTagD(LNumberTagD* instr); 84 85 enum IntegerSignedness { SIGNED_INT32, UNSIGNED_INT32 }; 86 void DoDeferredNumberTagIU(LInstruction* instr, 87 LOperand* value, 88 LOperand* temp, 89 IntegerSignedness signedness); 90 91 void DoDeferredTaggedToI(LTaggedToI* instr, Label* done); 92 void DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr); 93 void DoDeferredStackCheck(LStackCheck* instr); 94 void DoDeferredMaybeGrowElements(LMaybeGrowElements* instr); 95 void DoDeferredStringCharCodeAt(LStringCharCodeAt* instr); 96 void DoDeferredStringCharFromCode(LStringCharFromCode* instr); 97 void DoDeferredAllocate(LAllocate* instr); 98 void DoDeferredInstanceMigration(LCheckMaps* instr, Register object); 99 void DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr, 100 Register object, 101 Register index); 102 103 // Parallel move support. 104 void DoParallelMove(LParallelMove* move); 105 void DoGap(LGap* instr); 106 107 // Emit frame translation commands for an environment. 108 void WriteTranslation(LEnvironment* environment, Translation* translation); 109 110 void EnsureRelocSpaceForDeoptimization(); 111 112 // Declare methods that deal with the individual node types. 113 #define DECLARE_DO(type) void Do##type(L##type* node); LITHIUM_CONCRETE_INSTRUCTION_LIST(DECLARE_DO)114 LITHIUM_CONCRETE_INSTRUCTION_LIST(DECLARE_DO) 115 #undef DECLARE_DO 116 117 private: 118 Scope* scope() const { return scope_; } 119 double_scratch0()120 XMMRegister double_scratch0() const { return xmm0; } 121 122 void EmitClassOfTest(Label* if_true, 123 Label* if_false, 124 Handle<String> class_name, 125 Register input, 126 Register temporary, 127 Register temporary2); 128 HasAllocatedStackSlots()129 bool HasAllocatedStackSlots() const { 130 return chunk()->HasAllocatedStackSlots(); 131 } GetStackSlotCount()132 int GetStackSlotCount() const { return chunk()->GetSpillSlotCount(); } GetTotalFrameSlotCount()133 int GetTotalFrameSlotCount() const { 134 return chunk()->GetTotalFrameSlotCount(); 135 } 136 AddDeferredCode(LDeferredCode * code)137 void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); } 138 139 void SaveCallerDoubles(); 140 void RestoreCallerDoubles(); 141 142 // Code generation passes. Returns true if code generation should 143 // continue. 144 void GenerateBodyInstructionPre(LInstruction* instr) override; 145 void GenerateBodyInstructionPost(LInstruction* instr) override; 146 bool GeneratePrologue(); 147 bool GenerateDeferredCode(); 148 bool GenerateJumpTable(); 149 bool GenerateSafepointTable(); 150 151 // Generates the custom OSR entrypoint and sets the osr_pc_offset. 152 void GenerateOsrPrologue(); 153 154 enum SafepointMode { 155 RECORD_SIMPLE_SAFEPOINT, 156 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS 157 }; 158 159 void CallCode(Handle<Code> code, 160 RelocInfo::Mode mode, 161 LInstruction* instr); 162 163 void CallCodeGeneric(Handle<Code> code, 164 RelocInfo::Mode mode, 165 LInstruction* instr, 166 SafepointMode safepoint_mode); 167 168 void CallRuntime(const Runtime::Function* fun, 169 int argc, 170 LInstruction* instr, 171 SaveFPRegsMode save_doubles = kDontSaveFPRegs); 172 CallRuntime(Runtime::FunctionId id,int argc,LInstruction * instr)173 void CallRuntime(Runtime::FunctionId id, 174 int argc, 175 LInstruction* instr) { 176 const Runtime::Function* function = Runtime::FunctionForId(id); 177 CallRuntime(function, argc, instr); 178 } 179 CallRuntime(Runtime::FunctionId id,LInstruction * instr)180 void CallRuntime(Runtime::FunctionId id, LInstruction* instr) { 181 const Runtime::Function* function = Runtime::FunctionForId(id); 182 CallRuntime(function, function->nargs, instr); 183 } 184 185 void CallRuntimeFromDeferred(Runtime::FunctionId id, 186 int argc, 187 LInstruction* instr, 188 LOperand* context); 189 190 void LoadContextFromDeferred(LOperand* context); 191 192 void PrepareForTailCall(const ParameterCount& actual, Register scratch1, 193 Register scratch2, Register scratch3); 194 195 // Generate a direct call to a known function. Expects the function 196 // to be in edi. 197 void CallKnownFunction(Handle<JSFunction> function, 198 int formal_parameter_count, int arity, 199 bool is_tail_call, LInstruction* instr); 200 201 void RecordSafepointWithLazyDeopt(LInstruction* instr, 202 SafepointMode safepoint_mode); 203 204 void RegisterEnvironmentForDeoptimization(LEnvironment* environment, 205 Safepoint::DeoptMode mode); 206 void DeoptimizeIf(Condition cc, LInstruction* instr, 207 DeoptimizeReason deopt_reason, 208 Deoptimizer::BailoutType bailout_type); 209 void DeoptimizeIf(Condition cc, LInstruction* instr, 210 DeoptimizeReason deopt_reason); 211 DeoptEveryNTimes()212 bool DeoptEveryNTimes() { 213 return FLAG_deopt_every_n_times != 0 && !info()->IsStub(); 214 } 215 216 void AddToTranslation(LEnvironment* environment, 217 Translation* translation, 218 LOperand* op, 219 bool is_tagged, 220 bool is_uint32, 221 int* object_index_pointer, 222 int* dematerialized_index_pointer); 223 224 Register ToRegister(int index) const; 225 XMMRegister ToDoubleRegister(int index) const; 226 int32_t ToRepresentation(LConstantOperand* op, const Representation& r) const; 227 int32_t ToInteger32(LConstantOperand* op) const; 228 ExternalReference ToExternalReference(LConstantOperand* op) const; 229 230 Operand BuildFastArrayOperand(LOperand* elements_pointer, 231 LOperand* key, 232 Representation key_representation, 233 ElementsKind elements_kind, 234 uint32_t base_offset); 235 236 Operand BuildSeqStringOperand(Register string, 237 LOperand* index, 238 String::Encoding encoding); 239 240 void EmitIntegerMathAbs(LMathAbs* instr); 241 242 // Support for recording safepoint information. 243 void RecordSafepoint(LPointerMap* pointers, 244 Safepoint::Kind kind, 245 int arguments, 246 Safepoint::DeoptMode mode); 247 void RecordSafepoint(LPointerMap* pointers, Safepoint::DeoptMode mode); 248 void RecordSafepoint(Safepoint::DeoptMode mode); 249 void RecordSafepointWithRegisters(LPointerMap* pointers, 250 int arguments, 251 Safepoint::DeoptMode mode); 252 253 static Condition TokenToCondition(Token::Value op, bool is_unsigned); 254 void EmitGoto(int block); 255 256 // EmitBranch expects to be the last instruction of a block. 257 template<class InstrType> 258 void EmitBranch(InstrType instr, Condition cc); 259 template <class InstrType> 260 void EmitTrueBranch(InstrType instr, Condition cc); 261 template <class InstrType> 262 void EmitFalseBranch(InstrType instr, Condition cc); 263 void EmitNumberUntagD(LNumberUntagD* instr, Register input, Register temp, 264 XMMRegister result, NumberUntagDMode mode); 265 266 // Emits optimized code for typeof x == "y". Modifies input register. 267 // Returns the condition on which a final split to 268 // true and false label should be made, to optimize fallthrough. 269 Condition EmitTypeofIs(LTypeofIsAndBranch* instr, Register input); 270 271 // Emits optimized code for %_IsString(x). Preserves input register. 272 // Returns the condition on which a final split to 273 // true and false label should be made, to optimize fallthrough. 274 Condition EmitIsString(Register input, 275 Register temp1, 276 Label* is_not_string, 277 SmiCheck check_needed); 278 279 // Emits optimized code to deep-copy the contents of statically known 280 // object graphs (e.g. object literal boilerplate). 281 void EmitDeepCopy(Handle<JSObject> object, 282 Register result, 283 Register source, 284 int* offset, 285 AllocationSiteMode mode); 286 287 void EnsureSpaceForLazyDeopt(int space_needed) override; 288 void DoLoadKeyedExternalArray(LLoadKeyed* instr); 289 void DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr); 290 void DoLoadKeyedFixedArray(LLoadKeyed* instr); 291 void DoStoreKeyedExternalArray(LStoreKeyed* instr); 292 void DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr); 293 void DoStoreKeyedFixedArray(LStoreKeyed* instr); 294 295 template <class T> 296 void EmitVectorLoadICRegisters(T* instr); 297 298 void EmitReturn(LReturn* instr); 299 300 // Emits code for pushing either a tagged constant, a (non-double) 301 // register, or a stack slot operand. 302 void EmitPushTaggedOperand(LOperand* operand); 303 304 friend class LGapResolver; 305 306 #ifdef _MSC_VER 307 // On windows, you may not access the stack more than one page below 308 // the most recently mapped page. To make the allocated area randomly 309 // accessible, we write an arbitrary value to each page in range 310 // esp + offset - page_size .. esp in turn. 311 void MakeSureStackPagesMapped(int offset); 312 #endif 313 314 ZoneList<Deoptimizer::JumpTableEntry> jump_table_; 315 Scope* const scope_; 316 ZoneList<LDeferredCode*> deferred_; 317 bool frame_is_built_; 318 319 // Builder that keeps track of safepoints in the code. The table 320 // itself is emitted at the end of the generated code. 321 SafepointTableBuilder safepoints_; 322 323 // Compiler from a set of parallel moves to a sequential list of moves. 324 LGapResolver resolver_; 325 326 Safepoint::Kind expected_safepoint_kind_; 327 328 class PushSafepointRegistersScope final BASE_EMBEDDED { 329 public: PushSafepointRegistersScope(LCodeGen * codegen)330 explicit PushSafepointRegistersScope(LCodeGen* codegen) 331 : codegen_(codegen) { 332 DCHECK(codegen_->expected_safepoint_kind_ == Safepoint::kSimple); 333 codegen_->masm_->PushSafepointRegisters(); 334 codegen_->expected_safepoint_kind_ = Safepoint::kWithRegisters; 335 DCHECK(codegen_->info()->is_calling()); 336 } 337 ~PushSafepointRegistersScope()338 ~PushSafepointRegistersScope() { 339 DCHECK(codegen_->expected_safepoint_kind_ == Safepoint::kWithRegisters); 340 codegen_->masm_->PopSafepointRegisters(); 341 codegen_->expected_safepoint_kind_ = Safepoint::kSimple; 342 } 343 344 private: 345 LCodeGen* codegen_; 346 }; 347 348 friend class LDeferredCode; 349 friend class LEnvironment; 350 friend class SafepointGenerator; 351 DISALLOW_COPY_AND_ASSIGN(LCodeGen); 352 }; 353 354 355 class LDeferredCode : public ZoneObject { 356 public: LDeferredCode(LCodeGen * codegen)357 explicit LDeferredCode(LCodeGen* codegen) 358 : codegen_(codegen), 359 external_exit_(NULL), 360 instruction_index_(codegen->current_instruction_) { 361 codegen->AddDeferredCode(this); 362 } 363 ~LDeferredCode()364 virtual ~LDeferredCode() {} 365 virtual void Generate() = 0; 366 virtual LInstruction* instr() = 0; 367 SetExit(Label * exit)368 void SetExit(Label* exit) { external_exit_ = exit; } entry()369 Label* entry() { return &entry_; } exit()370 Label* exit() { return external_exit_ != NULL ? external_exit_ : &exit_; } done()371 Label* done() { return codegen_->NeedsDeferredFrame() ? &done_ : exit(); } instruction_index()372 int instruction_index() const { return instruction_index_; } 373 374 protected: codegen()375 LCodeGen* codegen() const { return codegen_; } masm()376 MacroAssembler* masm() const { return codegen_->masm(); } 377 378 private: 379 LCodeGen* codegen_; 380 Label entry_; 381 Label exit_; 382 Label* external_exit_; 383 Label done_; 384 int instruction_index_; 385 }; 386 387 } // namespace internal 388 } // namespace v8 389 390 #endif // V8_CRANKSHAFT_IA32_LITHIUM_CODEGEN_IA32_H_ 391