/art/compiler/dex/quick/x86/ |
D | call_x86.cc | 98 DCHECK_EQ(start_of_method_reg.Is64Bit(), cu_->target64); in GenLargePackedSwitch() 117 OpRegReg(kOpAdd, start_of_method_reg, cu_->target64 ? As64BitReg(disp_reg) : disp_reg); in GenLargePackedSwitch() 176 int ex_offset = cu_->target64 ? in GenMoveException() 180 NewLIR2(cu_->target64 ? kX86Mov64RT : kX86Mov32RT, rl_result.reg.GetReg(), ex_offset); in GenMoveException() 181 NewLIR2(cu_->target64 ? kX86Mov64TI : kX86Mov32TI, ex_offset, 0); in GenMoveException() 189 DCHECK_EQ(tgt_addr_reg.Is64Bit(), cu_->target64); in MarkGCCard() 190 DCHECK_EQ(val_reg.Is64Bit(), cu_->target64); in MarkGCCard() 194 int ct_offset = cu_->target64 ? in MarkGCCard() 197 NewLIR2(cu_->target64 ? kX86Mov64RT : kX86Mov32RT, reg_card_base.GetReg(), ct_offset); in MarkGCCard() 221 InstructionSet isa = cu_->target64 ? kX86_64 : kX86; in GenEntrySequence() [all …]
|
D | target_x86.cc | 184 return cu_->target64 ? x86_64_loc_c_return_ref : x86_loc_c_return_ref; in LocCReturnRef() 188 return cu_->target64 ? x86_64_loc_c_return_wide : x86_loc_c_return_wide; in LocCReturnWide() 227 case kHiddenFpArg: DCHECK(!cu_->target64); res_reg = rs_fr0; break; in TargetReg32() 447 if (!cu_->target64) { in AllocateByteRegister() 458 return cu_->target64 || reg.GetRegNum() < rs_rX86_SP.GetRegNum(); in IsByteRegister() 463 if (cu_->target64) { in ClobberCallerSave() 522 if (cu_->target64) { in LockCallTemps() 543 if (cu_->target64) { in FreeCallTemps() 615 if (cu_->target64) { in CompilerInitializeRegAlloc() 628 const ArrayRef<const RegStorage> *xp_regs = cu_->target64 ? &xp_regs_64 : &xp_regs_32; in CompilerInitializeRegAlloc() [all …]
|
D | int_x86.cc | 36 if (cu_->target64) { in GenCmpLong() 401 if (cu_->target64) { in GenFusedLongCmpBranch() 455 if (cu_->target64) { in GenFusedLongCmpImmBranch() 852 if (!cu_->target64) { in GenInlinedPeek() 874 if (!cu_->target64) { in GenInlinedPoke() 888 if (!cu_->target64 && size == kSignedByte) { in GenInlinedPoke() 928 if (!cu_->target64) { in GenInlinedCas() 936 if (is_long && cu_->target64) { in GenInlinedCas() 1037 if (cu_->target64) { in GenInlinedCas() 1121 if (cu_->target64) { in GenDivZeroCheckWide() [all …]
|
D | utility_x86.cc | 498 if (op == kOpMul && !cu_->target64) { in OpRegRegImm() 501 } else if (op == kOpAnd && !cu_->target64) { in OpRegRegImm() 656 if (cu_->target64) { in LoadBaseIndexedDisp() 803 if (cu_->target64) { in StoreBaseIndexedDisp() 916 base_of_code_ = mir_graph_->GetNewCompilerTemp(kCompilerTempVR, cu_->target64 == true); in AnalyzeMIR() 1020 if (use.is_const && !cu_->target64) { in AnalyzeDoubleUse() 1055 if (cu_->target64) { in AnalyzeInvokeStatic() 1079 if (cu_->target64) { in InvokeTrampoline()
|
D | fp_x86.cc | 258 if (cu_->target64) { in GenConversion() 266 if (cu_->target64) { in GenConversion() 274 if (cu_->target64) { in GenConversion() 299 if (cu_->target64) { in GenConversion() 573 if (cu_->target64) { in GenNegDouble() 651 if (cu_->target64) { in GenInlinedAbsDouble()
|
D | codegen_x86.h | 89 if (cu_->target64) { in TargetReg() 99 } else if (wide_kind == kRef && cu_->target64) { in TargetReg() 106 return TargetReg(symbolic_reg, cu_->target64 ? kWide : kNotWide); in TargetPtrReg() 947 return cu_->target64; // On 64b, we have 64b GPRs. in WideGPRsAreAliases()
|
D | assemble_x86.cc | 628 if (cu_->target64 || kIsDebugBuild) { in ComputeSize() 638 DCHECK(cu_->target64) << "Attempt to use a 64-bit only addressable register " in ComputeSize() 658 || (cu_->target64 && entry->skeleton.prefix1 == THREAD_PREFIX)) { in ComputeSize() 835 ComputeSize(&X86Mir2Lir::EncodingMap[cu_->target64 ? kX86Sub64RI : kX86Sub32RI], in GetInsnSize() 872 CHECK(cu_->target64 || !entry->skeleton.r8_form) in CheckValidByteRegister() 916 if (cu_->target64 && entry->skeleton.prefix1 == THREAD_PREFIX) { in EmitPrefix() 921 DCHECK(cu_->target64); in EmitPrefix() 931 DCHECK(cu_->target64); in EmitPrefix() 943 DCHECK(cu_->target64); in EmitPrefix() 984 if (cu_->target64) { in EmitModrmThread() [all …]
|
/art/runtime/ |
D | vmap_table.h | 68 bool target64 = (kRuntimeISA == kArm64) || (kRuntimeISA == kX86_64); in IsInContext() local 69 if (target64 && high_reg) { in IsInContext()
|
D | stack.cc | 174 bool target64 = Is64BitInstructionSet(kRuntimeISA); in GetVReg() local 175 if (target64) { in GetVReg() 231 bool target64 = Is64BitInstructionSet(kRuntimeISA); in GetVRegPair() local 232 if (target64) { in GetVRegPair() 270 bool target64 = Is64BitInstructionSet(kRuntimeISA); in SetVReg() local 272 if (target64) { in SetVReg() 338 bool target64 = Is64BitInstructionSet(kRuntimeISA); in SetVRegPair() local 340 if (target64) { in SetVRegPair()
|
/art/compiler/dex/ |
D | compiler_ir.h | 67 bool target64; member
|
D | frontend.cc | 495 target64(false), in CompilationUnit() 656 cu.target64 = Is64BitInstructionSet(cu.instruction_set); in CompileMethod()
|
/art/compiler/dex/quick/ |
D | codegen_util.cc | 437 static void PushPointer(std::vector<uint8_t>&buf, const void* pointer, bool target64) { in PushPointer() argument 439 if (target64) { in PushPointer() 477 PushPointer(code_buffer_, &target_method_id, cu_->target64); in InstallLiteralPools() 495 PushPointer(code_buffer_, &target_method_id, cu_->target64); in InstallLiteralPools() 509 PushPointer(code_buffer_, &target_method_id, cu_->target64); in InstallLiteralPools() 523 PushPointer(code_buffer_, &target_string_id, cu_->target64); in InstallLiteralPools()
|
D | ralloc_util.cc | 496 if (cu_->target64) { in AllocLiveReg() 504 if (wide && !reg.IsFloat() && !cu_->target64) { in AllocLiveReg() 1319 bool wide = curr->wide || (cu_->target64 && curr->ref); in DoPromotion() 1334 if (wide && !cu_->target64) { in DoPromotion()
|
D | mir_to_lir.cc | 89 if (cu_->target64) { in LoadArg() 190 if (cu_->target64) { in LoadArgDirect() 1279 if (cu_->target64 && !rs.Is64Bit()) { in CheckRegStorageImpl()
|
D | gen_invoke.cc | 245 DCHECK(!cu_->target64); in CallRuntimeHelperRegLocationRegLocation() 666 if (cu->target64) { in NextInvokeInsnSP()
|