Home
last modified time | relevance | path

Searched refs:Is64Bit (Results 1 – 19 of 19) sorted by relevance

/art/compiler/dex/quick/arm64/
Dutility_arm64.cc401 if (r_dest.Is64Bit()) { in LoadConstantNoClobber()
463 DCHECK(r_dest.Is64Bit()); in LoadConstantWide()
565 A64Opcode wide = (r_dest_src1.Is64Bit()) ? WIDE(0) : UNWIDE(0); in OpRegRegShift()
566 CHECK_EQ(r_dest_src1.Is64Bit(), r_src2.Is64Bit()); in OpRegRegShift()
632 A64Opcode wide = (r_dest_src1.Is64Bit()) ? WIDE(0) : UNWIDE(0); in OpRegRegExtend()
750 A64Opcode widened_opcode = r_dest.Is64Bit() ? WIDE(opcode) : opcode; in OpRegRegRegShift()
751 CHECK_EQ(r_dest.Is64Bit(), r_src1.Is64Bit()); in OpRegRegRegShift()
752 CHECK_EQ(r_dest.Is64Bit(), r_src2.Is64Bit()); in OpRegRegRegShift()
778 A64Opcode widened_opcode = r_dest.Is64Bit() ? WIDE(opcode) : opcode; in OpRegRegRegExtend()
780 if (r_dest.Is64Bit()) { in OpRegRegRegExtend()
[all …]
Dint_arm64.cc115 bool is_wide = rs_dest.Is64Bit(); in GenSelect()
153 if (t_reg2.Is64Bit()) { in GenSelect()
275 A64Opcode wide = reg.Is64Bit() ? WIDE(0) : UNWIDE(0); in OpCmpImmBranch()
281 A64Opcode wide = reg.Is64Bit() ? WIDE(0) : UNWIDE(0); in OpCmpImmBranch()
285 A64Opcode wide = reg.Is64Bit() ? WIDE(0) : UNWIDE(0); in OpCmpImmBranch()
286 int value = reg.Is64Bit() ? 63 : 31; in OpCmpImmBranch()
306 if (temp_reg.Is64Bit()) { in OpCmpMemImmBranch()
322 DCHECK_EQ(r_dest.Is64Bit(), r_src.Is64Bit()); in OpRegCopyNoInsert()
333 if (r_dest.Is64Bit() && r_src.Is64Bit()) { in OpRegCopyNoInsert()
638 CHECK_EQ(r_src1.Is64Bit(), r_src2.Is64Bit()); in GenDivRem()
[all …]
Dcodegen_arm64.h276 if ((kFailOnSizeError || kReportSizeError) && !reg.Is64Bit()) { in As32BitReg()
329 if ((kFailOnSizeError || kReportSizeError) && !reg.Is64Bit()) { in Check64BitReg()
Dassemble_arm64.cc775 } else if (reg.Is64Bit() != want_64_bit) { in EncodeLIRs()
947 DCHECK_EQ(IS_WIDE(opcode), reg.Is64Bit()); in AssembleLIR()
/art/compiler/dex/quick/mips/
Dutility_mips.cc35 DCHECK_EQ(r_dest.Is64Bit(), r_src.Is64Bit()); in OpFpRegCopy()
36 if (r_dest.Is64Bit()) { in OpFpRegCopy()
165 DCHECK(r_dest.Is64Bit()); in LoadConstantWideNoClobber()
306 bool is64bit = cu_->target64 && (r_dest.Is64Bit() || r_src1.Is64Bit() || r_src2.Is64Bit()); in OpRegRegReg()
350 bool is64bit = cu_->target64 && (r_dest.Is64Bit() || r_src1.Is64Bit()); in OpRegRegImm()
476 if (cu_->target64 && r_dest_src1.Is64Bit()) { in OpRegReg()
574 bool is64bit = cu_->target64 && r_dest.Is64Bit(); in LoadBaseIndexed()
772 AnnotateDalvikRegAccess(load, displacement >> 2, true /* is_load */, r_dest.Is64Bit()); in LoadBaseDispBody()
943 AnnotateDalvikRegAccess(store, displacement >> 2, false /* is_load */, r_src.Is64Bit()); in StoreBaseDispBody()
Dcodegen_mips.h292 if ((kFailOnSizeError || kReportSizeError) && !reg.Is64Bit()) { in As32BitReg()
331 if ((kFailOnSizeError || kReportSizeError) && !reg.Is64Bit()) { in Check64BitReg()
Dint_mips.cc201 if (r_dest.Is64Bit() || r_src.Is64Bit()) { in OpRegCopyNoInsert()
/art/compiler/dex/
Dreg_storage.h150 constexpr bool Is64Bit() const { in Is64Bit() function
155 return Is64Bit() ? kWide : kNotWide; in GetWideKind()
200 static constexpr bool Is64Bit(uint16_t reg) { in Is64Bit() function
/art/compiler/dex/quick/x86/
Dutility_x86.cc129 case kOpNeg: opcode = r_dest_src.Is64Bit() ? kX86Neg64R : kX86Neg32R; break; in OpReg()
130 case kOpNot: opcode = r_dest_src.Is64Bit() ? kX86Not64R : kX86Not32R; break; in OpReg()
131 case kOpRev: opcode = r_dest_src.Is64Bit() ? kX86Bswap64R : kX86Bswap32R; break; in OpReg()
143 if (r_dest_src1.Is64Bit()) { in OpRegImm()
202 bool is64Bit = r_dest_src1.Is64Bit(); in OpRegReg()
365 DCHECK_EQ(r_dest.Is64Bit(), r_src.Is64Bit()); in OpCondRegReg()
366 return NewLIR3(r_dest.Is64Bit() ? kX86Cmov64RRC : kX86Cmov32RRC, r_dest.GetReg(), in OpCondRegReg()
371 bool is64Bit = r_dest.Is64Bit(); in OpRegMem()
428 bool is64Bit = r_dest.Is64Bit(); in OpRegMem()
453 bool is64Bit = r_dest.Is64Bit(); in OpRegRegReg()
[all …]
Dcall_x86.cc135 DCHECK_EQ(tgt_addr_reg.Is64Bit(), cu_->target64); in UnconditionallyMarkGCCard()
365 if (target_reg.Is64Bit()) { in X86NextSDCallInsn()
Dint_x86.cc100 NewLIR2(src1.Is64Bit() ? kX86Cmp64RR : kX86Cmp32RR, src1.GetReg(), src2.GetReg()); in OpCmpBranch()
112 NewLIR2(reg.Is64Bit() ? kX86Test64RR: kX86Test32RR, reg.GetReg(), reg.GetReg()); in OpCmpImmBranch()
114 if (reg.Is64Bit()) { in OpCmpImmBranch()
136 LIR* res = RawLIR(current_dalvik_offset_, r_dest.Is64Bit() ? kX86Mov64RR : kX86Mov32RR, in OpRegCopyNoInsert()
237 NewLIR2(rs_dest.Is64Bit() ? kX86Movzx8qRR : kX86Movzx8RR, rs_dest.GetReg(), rs_dest.GetReg()); in GenSelectConst32()
1479 DCHECK(reg.Is64Bit()); in GenDivZeroCheckWide()
3018 RegStorage object_32reg = object.reg.Is64Bit() ? As32BitReg(object.reg) : object.reg; in GenInstanceofFinal()
Dcodegen_x86.h395 if ((kFailOnSizeError || kReportSizeError) && !reg.Is64Bit()) { in As32BitReg()
/art/runtime/
Delf_file.h81 bool Is64Bit() const { in Is64Bit() function
/art/compiler/dex/quick/
Dmir_to_lir.cc1369 if (cu_->target64 && !rs.Is64Bit()) { in CheckRegStorageImpl()
1378 if (!rs.Is64Bit()) { in CheckRegStorageImpl()
1437 if (arg.IsWide() && !reg.Is64Bit()) { in Initialize()
Dgen_invoke.cc448 if (t_loc->wide && reg.Valid() && !reg.Is64Bit()) { in FlushIns()
845 if (!reg.Is64Bit()) { in GenDalvikArgs()
Dcodegen_util.cc334 DCHECK_EQ(val_reg.Is64Bit(), cu_->target64); in MarkGCCard()
/art/patchoat/
Dpatchoat.cc567 if (elf_file->Is64Bit()) { in GetOatHeader()
718 if (oat_file_->Is64Bit()) in PatchElf()
/art/compiler/dex/quick/arm/
Dutility_arm.cc1025 AnnotateDalvikRegAccess(load, displacement >> 2, true /* is_load */, r_dest.Is64Bit()); in LoadBaseDispBody()
1167 AnnotateDalvikRegAccess(store, displacement >> 2, false /* is_load */, r_src.Is64Bit()); in StoreBaseDispBody()
Dint_arm.cc452 DCHECK(r_dest.Is64Bit()); in OpRegCopyWide()
453 DCHECK(r_src.Is64Bit()); in OpRegCopyWide()