1 // Copyright 2012 the V8 project authors. All rights reserved. 2 // Use of this source code is governed by a BSD-style license that can be 3 // found in the LICENSE file. 4 5 #ifndef V8_CRANKSHAFT_X64_LITHIUM_CODEGEN_X64_H_ 6 #define V8_CRANKSHAFT_X64_LITHIUM_CODEGEN_X64_H_ 7 8 9 #include "src/ast/scopes.h" 10 #include "src/base/logging.h" 11 #include "src/crankshaft/lithium-codegen.h" 12 #include "src/crankshaft/x64/lithium-gap-resolver-x64.h" 13 #include "src/crankshaft/x64/lithium-x64.h" 14 #include "src/deoptimizer.h" 15 #include "src/safepoint-table.h" 16 #include "src/utils.h" 17 18 namespace v8 { 19 namespace internal { 20 21 // Forward declarations. 22 class LDeferredCode; 23 class SafepointGenerator; 24 25 class LCodeGen: public LCodeGenBase { 26 public: LCodeGen(LChunk * chunk,MacroAssembler * assembler,CompilationInfo * info)27 LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info) 28 : LCodeGenBase(chunk, assembler, info), 29 jump_table_(4, info->zone()), 30 scope_(info->scope()), 31 deferred_(8, info->zone()), 32 frame_is_built_(false), 33 safepoints_(info->zone()), 34 resolver_(this), 35 expected_safepoint_kind_(Safepoint::kSimple) { 36 PopulateDeoptimizationLiteralsWithInlinedFunctions(); 37 } 38 LookupDestination(int block_id)39 int LookupDestination(int block_id) const { 40 return chunk()->LookupDestination(block_id); 41 } 42 IsNextEmittedBlock(int block_id)43 bool IsNextEmittedBlock(int block_id) const { 44 return LookupDestination(block_id) == GetNextEmittedBlock(); 45 } 46 NeedsEagerFrame()47 bool NeedsEagerFrame() const { 48 return HasAllocatedStackSlots() || info()->is_non_deferred_calling() || 49 !info()->IsStub() || info()->requires_frame(); 50 } NeedsDeferredFrame()51 bool NeedsDeferredFrame() const { 52 return !NeedsEagerFrame() && info()->is_deferred_calling(); 53 } 54 55 // Support for converting LOperands to assembler types. 56 Register ToRegister(LOperand* op) const; 57 XMMRegister ToDoubleRegister(LOperand* op) const; 58 bool IsInteger32Constant(LConstantOperand* op) const; 59 bool IsExternalConstant(LConstantOperand* op) const; 60 bool IsDehoistedKeyConstant(LConstantOperand* op) const; 61 bool IsSmiConstant(LConstantOperand* op) const; 62 int32_t ToRepresentation(LConstantOperand* op, const Representation& r) const; 63 int32_t ToInteger32(LConstantOperand* op) const; 64 Smi* ToSmi(LConstantOperand* op) const; 65 double ToDouble(LConstantOperand* op) const; 66 ExternalReference ToExternalReference(LConstantOperand* op) const; 67 Handle<Object> ToHandle(LConstantOperand* op) const; 68 Operand ToOperand(LOperand* op) const; 69 70 // Try to generate code for the entire chunk, but it may fail if the 71 // chunk contains constructs we cannot handle. Returns true if the 72 // code generation attempt succeeded. 73 bool GenerateCode(); 74 75 // Finish the code by setting stack height, safepoint, and bailout 76 // information on it. 77 void FinishCode(Handle<Code> code); 78 79 // Deferred code support. 80 void DoDeferredNumberTagD(LNumberTagD* instr); 81 82 enum IntegerSignedness { SIGNED_INT32, UNSIGNED_INT32 }; 83 void DoDeferredNumberTagIU(LInstruction* instr, 84 LOperand* value, 85 LOperand* temp1, 86 LOperand* temp2, 87 IntegerSignedness signedness); 88 89 void DoDeferredTaggedToI(LTaggedToI* instr, Label* done); 90 void DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr); 91 void DoDeferredStackCheck(LStackCheck* instr); 92 void DoDeferredMaybeGrowElements(LMaybeGrowElements* instr); 93 void DoDeferredStringCharCodeAt(LStringCharCodeAt* instr); 94 void DoDeferredStringCharFromCode(LStringCharFromCode* instr); 95 void DoDeferredAllocate(LAllocate* instr); 96 void DoDeferredInstanceMigration(LCheckMaps* instr, Register object); 97 void DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr, 98 Register object, 99 Register index); 100 101 // Parallel move support. 102 void DoParallelMove(LParallelMove* move); 103 void DoGap(LGap* instr); 104 105 // Emit frame translation commands for an environment. 106 void WriteTranslation(LEnvironment* environment, Translation* translation); 107 108 // Declare methods that deal with the individual node types. 109 #define DECLARE_DO(type) void Do##type(L##type* node); LITHIUM_CONCRETE_INSTRUCTION_LIST(DECLARE_DO)110 LITHIUM_CONCRETE_INSTRUCTION_LIST(DECLARE_DO) 111 #undef DECLARE_DO 112 113 private: 114 LPlatformChunk* chunk() const { return chunk_; } scope()115 Scope* scope() const { return scope_; } graph()116 HGraph* graph() const { return chunk()->graph(); } 117 double_scratch0()118 XMMRegister double_scratch0() const { return kScratchDoubleReg; } 119 120 void EmitClassOfTest(Label* if_true, 121 Label* if_false, 122 Handle<String> class_name, 123 Register input, 124 Register temporary, 125 Register scratch); 126 HasAllocatedStackSlots()127 bool HasAllocatedStackSlots() const { 128 return chunk()->HasAllocatedStackSlots(); 129 } GetStackSlotCount()130 int GetStackSlotCount() const { return chunk()->GetSpillSlotCount(); } GetTotalFrameSlotCount()131 int GetTotalFrameSlotCount() const { 132 return chunk()->GetTotalFrameSlotCount(); 133 } 134 AddDeferredCode(LDeferredCode * code)135 void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); } 136 137 138 void SaveCallerDoubles(); 139 void RestoreCallerDoubles(); 140 141 // Code generation passes. Returns true if code generation should 142 // continue. 143 void GenerateBodyInstructionPre(LInstruction* instr) override; 144 void GenerateBodyInstructionPost(LInstruction* instr) override; 145 bool GeneratePrologue(); 146 bool GenerateDeferredCode(); 147 bool GenerateJumpTable(); 148 bool GenerateSafepointTable(); 149 150 // Generates the custom OSR entrypoint and sets the osr_pc_offset. 151 void GenerateOsrPrologue(); 152 153 enum SafepointMode { 154 RECORD_SIMPLE_SAFEPOINT, 155 RECORD_SAFEPOINT_WITH_REGISTERS 156 }; 157 158 void CallCodeGeneric(Handle<Code> code, 159 RelocInfo::Mode mode, 160 LInstruction* instr, 161 SafepointMode safepoint_mode, 162 int argc); 163 164 165 void CallCode(Handle<Code> code, 166 RelocInfo::Mode mode, 167 LInstruction* instr); 168 169 void CallRuntime(const Runtime::Function* function, 170 int num_arguments, 171 LInstruction* instr, 172 SaveFPRegsMode save_doubles = kDontSaveFPRegs); 173 CallRuntime(Runtime::FunctionId id,int num_arguments,LInstruction * instr)174 void CallRuntime(Runtime::FunctionId id, 175 int num_arguments, 176 LInstruction* instr) { 177 const Runtime::Function* function = Runtime::FunctionForId(id); 178 CallRuntime(function, num_arguments, instr); 179 } 180 CallRuntime(Runtime::FunctionId id,LInstruction * instr)181 void CallRuntime(Runtime::FunctionId id, LInstruction* instr) { 182 const Runtime::Function* function = Runtime::FunctionForId(id); 183 CallRuntime(function, function->nargs, instr); 184 } 185 186 void CallRuntimeFromDeferred(Runtime::FunctionId id, 187 int argc, 188 LInstruction* instr, 189 LOperand* context); 190 191 void LoadContextFromDeferred(LOperand* context); 192 193 void PrepareForTailCall(const ParameterCount& actual, Register scratch1, 194 Register scratch2, Register scratch3); 195 196 // Generate a direct call to a known function. Expects the function 197 // to be in rdi. 198 void CallKnownFunction(Handle<JSFunction> function, 199 int formal_parameter_count, int arity, 200 bool is_tail_call, LInstruction* instr); 201 202 void RecordSafepointWithLazyDeopt(LInstruction* instr, 203 SafepointMode safepoint_mode, 204 int argc); 205 void RegisterEnvironmentForDeoptimization(LEnvironment* environment, 206 Safepoint::DeoptMode mode); 207 void DeoptimizeIf(Condition cc, LInstruction* instr, 208 DeoptimizeReason deopt_reason, 209 Deoptimizer::BailoutType bailout_type); 210 void DeoptimizeIf(Condition cc, LInstruction* instr, 211 DeoptimizeReason deopt_reason); 212 DeoptEveryNTimes()213 bool DeoptEveryNTimes() { 214 return FLAG_deopt_every_n_times != 0 && !info()->IsStub(); 215 } 216 217 void AddToTranslation(LEnvironment* environment, 218 Translation* translation, 219 LOperand* op, 220 bool is_tagged, 221 bool is_uint32, 222 int* object_index_pointer, 223 int* dematerialized_index_pointer); 224 225 Register ToRegister(int index) const; 226 XMMRegister ToDoubleRegister(int index) const; 227 Operand BuildFastArrayOperand( 228 LOperand* elements_pointer, 229 LOperand* key, 230 Representation key_representation, 231 ElementsKind elements_kind, 232 uint32_t base_offset); 233 234 Operand BuildSeqStringOperand(Register string, 235 LOperand* index, 236 String::Encoding encoding); 237 238 void EmitIntegerMathAbs(LMathAbs* instr); 239 void EmitSmiMathAbs(LMathAbs* instr); 240 241 // Support for recording safepoint information. 242 void RecordSafepoint(LPointerMap* pointers, 243 Safepoint::Kind kind, 244 int arguments, 245 Safepoint::DeoptMode mode); 246 void RecordSafepoint(LPointerMap* pointers, Safepoint::DeoptMode mode); 247 void RecordSafepoint(Safepoint::DeoptMode mode); 248 void RecordSafepointWithRegisters(LPointerMap* pointers, 249 int arguments, 250 Safepoint::DeoptMode mode); 251 252 static Condition TokenToCondition(Token::Value op, bool is_unsigned); 253 void EmitGoto(int block); 254 255 // EmitBranch expects to be the last instruction of a block. 256 template<class InstrType> 257 void EmitBranch(InstrType instr, Condition cc); 258 template <class InstrType> 259 void EmitTrueBranch(InstrType instr, Condition cc); 260 template <class InstrType> 261 void EmitFalseBranch(InstrType instr, Condition cc); 262 void EmitNumberUntagD(LNumberUntagD* instr, Register input, 263 XMMRegister result, NumberUntagDMode mode); 264 265 // Emits optimized code for typeof x == "y". Modifies input register. 266 // Returns the condition on which a final split to 267 // true and false label should be made, to optimize fallthrough. 268 Condition EmitTypeofIs(LTypeofIsAndBranch* instr, Register input); 269 270 // Emits optimized code for %_IsString(x). Preserves input register. 271 // Returns the condition on which a final split to 272 // true and false label should be made, to optimize fallthrough. 273 Condition EmitIsString(Register input, 274 Register temp1, 275 Label* is_not_string, 276 SmiCheck check_needed); 277 278 // Emits code for pushing either a tagged constant, a (non-double) 279 // register, or a stack slot operand. 280 void EmitPushTaggedOperand(LOperand* operand); 281 282 // Emits optimized code to deep-copy the contents of statically known 283 // object graphs (e.g. object literal boilerplate). 284 void EmitDeepCopy(Handle<JSObject> object, 285 Register result, 286 Register source, 287 int* offset, 288 AllocationSiteMode mode); 289 290 void EnsureSpaceForLazyDeopt(int space_needed) override; 291 void DoLoadKeyedExternalArray(LLoadKeyed* instr); 292 void DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr); 293 void DoLoadKeyedFixedArray(LLoadKeyed* instr); 294 void DoStoreKeyedExternalArray(LStoreKeyed* instr); 295 void DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr); 296 void DoStoreKeyedFixedArray(LStoreKeyed* instr); 297 298 template <class T> 299 void EmitVectorLoadICRegisters(T* instr); 300 301 #ifdef _MSC_VER 302 // On windows, you may not access the stack more than one page below 303 // the most recently mapped page. To make the allocated area randomly 304 // accessible, we write an arbitrary value to each page in range 305 // rsp + offset - page_size .. rsp in turn. 306 void MakeSureStackPagesMapped(int offset); 307 #endif 308 309 ZoneList<Deoptimizer::JumpTableEntry> jump_table_; 310 Scope* const scope_; 311 ZoneList<LDeferredCode*> deferred_; 312 bool frame_is_built_; 313 314 // Builder that keeps track of safepoints in the code. The table 315 // itself is emitted at the end of the generated code. 316 SafepointTableBuilder safepoints_; 317 318 // Compiler from a set of parallel moves to a sequential list of moves. 319 LGapResolver resolver_; 320 321 Safepoint::Kind expected_safepoint_kind_; 322 323 class PushSafepointRegistersScope final BASE_EMBEDDED { 324 public: PushSafepointRegistersScope(LCodeGen * codegen)325 explicit PushSafepointRegistersScope(LCodeGen* codegen) 326 : codegen_(codegen) { 327 DCHECK(codegen_->info()->is_calling()); 328 DCHECK(codegen_->expected_safepoint_kind_ == Safepoint::kSimple); 329 codegen_->masm_->PushSafepointRegisters(); 330 codegen_->expected_safepoint_kind_ = Safepoint::kWithRegisters; 331 } 332 ~PushSafepointRegistersScope()333 ~PushSafepointRegistersScope() { 334 DCHECK(codegen_->expected_safepoint_kind_ == Safepoint::kWithRegisters); 335 codegen_->masm_->PopSafepointRegisters(); 336 codegen_->expected_safepoint_kind_ = Safepoint::kSimple; 337 } 338 339 private: 340 LCodeGen* codegen_; 341 }; 342 343 friend class LDeferredCode; 344 friend class LEnvironment; 345 friend class SafepointGenerator; 346 DISALLOW_COPY_AND_ASSIGN(LCodeGen); 347 }; 348 349 350 class LDeferredCode: public ZoneObject { 351 public: LDeferredCode(LCodeGen * codegen)352 explicit LDeferredCode(LCodeGen* codegen) 353 : codegen_(codegen), 354 external_exit_(NULL), 355 instruction_index_(codegen->current_instruction_) { 356 codegen->AddDeferredCode(this); 357 } 358 ~LDeferredCode()359 virtual ~LDeferredCode() {} 360 virtual void Generate() = 0; 361 virtual LInstruction* instr() = 0; 362 SetExit(Label * exit)363 void SetExit(Label* exit) { external_exit_ = exit; } entry()364 Label* entry() { return &entry_; } exit()365 Label* exit() { return external_exit_ != NULL ? external_exit_ : &exit_; } done()366 Label* done() { return codegen_->NeedsDeferredFrame() ? &done_ : exit(); } instruction_index()367 int instruction_index() const { return instruction_index_; } 368 369 protected: codegen()370 LCodeGen* codegen() const { return codegen_; } masm()371 MacroAssembler* masm() const { return codegen_->masm(); } 372 373 private: 374 LCodeGen* codegen_; 375 Label entry_; 376 Label exit_; 377 Label done_; 378 Label* external_exit_; 379 int instruction_index_; 380 }; 381 382 } // namespace internal 383 } // namespace v8 384 385 #endif // V8_CRANKSHAFT_X64_LITHIUM_CODEGEN_X64_H_ 386