/art/compiler/dex/quick/ |
D | mir_to_lir.cc | 141 StoreBaseDisp(TargetPtrReg(kSp), offset, reg_arg, k32, kNotVolatile); in LoadArg() 147 LoadBaseDisp(TargetPtrReg(kSp), offset, reg_arg, wide ? k64 : k32, kNotVolatile); in LoadArg() 191 StoreBaseDisp(TargetPtrReg(kSp), offset, reg_arg, k32, kNotVolatile); in LoadArgDirect() 196 OpSize op_size = rl_dest.wide ? k64 : (rl_dest.ref ? kReference : k32); in LoadArgDirect() 214 (arg.IsWide() && reg_arg.GetWideKind() == kWide) ? k64 : k32; in SpillArg() 226 (arg.IsWide() && reg_arg.GetWideKind() == kWide) ? k64 : k32; in UnspillArg() 253 size = in_to_reg_storage_mapping_.GetShorty(data.src_arg).IsFP() ? kSingle : k32; in GenSpecialIGet() 325 size = in_to_reg_storage_mapping_.GetShorty(data.src_arg).IsFP() ? kSingle : k32; in GenSpecialIPut() 732 GenArrayGet(opt_flags, rl_dest.fp ? kSingle : k32, rl_src[0], rl_src[1], rl_dest, 2); in CompileDalvikInstruction() 750 … GenArrayPut(opt_flags, rl_src[0].fp ? kSingle : k32, rl_src[1], rl_src[2], rl_src[0], 2, false); in CompileDalvikInstruction() [all …]
|
D | gen_invoke.cc | 471 StoreBaseDisp(TargetPtrReg(kSp), offset, reg, t_loc->wide ? k64 : k32, kNotVolatile); in FlushIns() 481 LoadBaseDisp(TargetPtrReg(kSp), offset, t_loc->reg, t_loc->wide ? k64 : k32, in FlushIns() 715 StoreBaseDisp(TargetPtrReg(kSp), SRegOffset(loc.s_reg_low), loc.reg, k32, in GenDalvikArgsFlushPromoted() 815 StoreBaseDisp(TargetPtrReg(kSp), out_offset, rl_arg.reg, k32, kNotVolatile); in GenDalvikArgs() 823 StoreBaseDisp(TargetPtrReg(kSp), out_offset, regSingle, k32, kNotVolatile); in GenDalvikArgs() 851 LoadBaseDisp(TargetPtrReg(kSp), out_offset, reg, k32, kNotVolatile); in GenDalvikArgs() 855 LoadBaseDisp(TargetPtrReg(kSp), SRegOffset(rl_arg.s_reg_low + 1), reg, k32, in GenDalvikArgs() 857 StoreBaseDisp(TargetPtrReg(kSp), high_offset, reg, k32, kNotVolatile); in GenDalvikArgs() 859 LoadBaseDisp(TargetPtrReg(kSp), SRegOffset(rl_arg.s_reg_low), reg, k32, kNotVolatile); in GenDalvikArgs() 862 StoreBaseDisp(TargetPtrReg(kSp), low_offset, reg, k32, kNotVolatile); in GenDalvikArgs() [all …]
|
D | dex_file_method_inliner.cc | 365 INTRINSIC(JavaLangInteger, ReverseBytes, I_I, kIntrinsicReverseBytes, k32), 368 INTRINSIC(JavaLangInteger, Reverse, I_I, kIntrinsicReverseBits, k32), 423 INTRINSIC(LibcoreIoMemory, PeekIntNative, J_I, kIntrinsicPeek, k32), 427 INTRINSIC(LibcoreIoMemory, PokeIntNative, JI_V, kIntrinsicPoke, k32),
|
D | gen_loadstore.cc | 62 op_size = k32; in LoadValueDirect()
|
D | gen_common.cc | 614 LoadBaseIndexed(r_src, r_idx, r_val, 2, k32); in GenFilledNewArray() 618 StoreBaseIndexed(r_dst, r_idx, r_val, 2, k32); in GenFilledNewArray() 741 case k32: in GenSput() 986 case k32: in GenIPut() 2266 case k32: in SizeMatchesTypeForEntrypoint()
|
D | mir_to_lir.h | 981 return LoadBaseDisp(r_base, displacement, r_dest, k32, kNotVolatile); in Load32Disp() 1019 return StoreBaseDisp(r_base, displacement, r_src, k32, kNotVolatile); in Store32Disp()
|
/art/compiler/dex/quick/mips/ |
D | utility_mips.cc | 579 DCHECK((size == k32) || (size == kSingle) || (size == kReference)); in LoadBaseIndexed() 583 size = k32; in LoadBaseIndexed() 617 case k32: in LoadBaseIndexed() 651 DCHECK((size == k32) || (size == kSingle) || (size == kReference)); in StoreBaseIndexed() 655 size = k32; in StoreBaseIndexed() 670 case k32: in StoreBaseIndexed() 727 case k32: in LoadBaseDispBody() 862 size = cu_->target64 ? k64 : k32; in LoadBaseDisp() 904 case k32: in StoreBaseDispBody() 1020 size = cu_->target64 ? k64 : k32; in StoreBaseDisp()
|
D | call_mips.cc | 202 LoadBaseIndexed(r_base, r_key, r_disp, 2, k32); in GenLargePackedSwitch()
|
/art/compiler/dex/quick/arm/ |
D | utility_arm.cc | 758 DCHECK((size == k32) || (size == kSingle) || (size == kReference)); in LoadBaseIndexed() 769 size = k32; in LoadBaseIndexed() 786 case k32: in LoadBaseIndexed() 824 DCHECK((size == k32) || (size == kSingle) || (size == kReference)); in StoreBaseIndexed() 836 size = k32; in StoreBaseIndexed() 853 case k32: in StoreBaseIndexed() 936 case k32: in LoadBaseDispBody() 1034 size = k32; in LoadBaseDisp() 1090 case k32: in StoreBaseDispBody() 1211 size = k32; in StoreBaseDisp()
|
D | int_arm.cc | 802 DCHECK(size == kSignedByte || size == kSignedHalf || size == k32); in GenInlinedPeek() 818 StoreBaseDisp(rl_address.reg, 0, rl_value.reg.GetLow(), k32, kNotVolatile); in GenInlinedPoke() 819 StoreBaseDisp(rl_address.reg, 4, rl_value.reg.GetHigh(), k32, kNotVolatile); in GenInlinedPoke() 821 DCHECK(size == kSignedByte || size == kSignedHalf || size == k32); in GenInlinedPoke() 1064 LoadBaseIndexed(rs_src, rs_length, rs_tmp, 0, k32); in GenInlinedArrayCopyCharArray() 1065 StoreBaseIndexed(rs_dst, rs_length, rs_tmp, 0, k32); in GenInlinedArrayCopyCharArray()
|
D | call_arm.cc | 132 LoadBaseIndexed(table_base, keyReg, disp_reg, 2, k32); in GenLargePackedSwitch()
|
/art/compiler/dex/quick/arm64/ |
D | utility_arm64.cc | 1046 DCHECK(size == k32 || size == kSingle); in LoadBaseIndexed() 1068 case k32: in LoadBaseIndexed() 1131 DCHECK(size == k32 || size == kSingle); in StoreBaseIndexed() 1153 case k32: in StoreBaseIndexed() 1217 case k32: in LoadBaseDispBody() 1313 case k32: in StoreBaseDispBody()
|
D | int_arm64.cc | 718 DCHECK(size == kSignedByte || size == kSignedHalf || size == k32); in GenInlinedPeek() 733 DCHECK(size == kSignedByte || size == kSignedHalf || size == k32); in GenInlinedPoke() 902 LoadBaseIndexed(rs_src, rs_length, rs_tmp, 0, k32); in GenInlinedArrayCopyCharArray() 903 StoreBaseIndexed(rs_dst, rs_length, rs_tmp, 0, k32); in GenInlinedArrayCopyCharArray()
|
D | call_arm64.cc | 135 LoadBaseIndexed(table_base, As64BitReg(key_reg), disp_reg, 2, k32); in GenLargePackedSwitch()
|
/art/compiler/dex/quick/x86/ |
D | target_x86.cc | 743 OpSize size = cu_->target64 ? k64 : k32; in SpillCoreRegs() 763 OpSize size = cu_->target64 ? k64 : k32; in UnSpillCoreRegs() 1713 case k32: in GenMultiplyVector() 1747 case k32: in GenAddVector() 1782 case k32: in GenSubtractVector() 1861 case k32: in GenShiftLeftVector() 1890 case k32: in GenSignedShiftRightVector() 1918 case k32: in GenUnsignedShiftRightVector() 2090 } else if (opsize == k32) { in GenAddReduceVector() 2209 case k32: in GenReduceVector() [all …]
|
D | fp_x86.cc | 387 StoreBaseDisp(rs_rSP, src1_v_reg_offset, rl_src1.reg, is_double ? k64 : k32, in GenRemFP() 398 StoreBaseDisp(rs_rSP, src2_v_reg_offset, rl_src2.reg, is_double ? k64 : k32, in GenRemFP()
|
D | int_x86.cc | 1051 DCHECK(size == kSignedByte || size == kSignedHalf || size == k32); in GenInlinedPeek() 1072 DCHECK(size == kSignedByte || size == kSignedHalf || size == k32); in GenInlinedPoke() 1649 LoadBaseDisp(rs_rSP, displacement, dest, k32, kNotVolatile); in GenImulMemImm() 1867 LoadBaseDisp(rs_rSP, SRegOffset(rl_src1.s_reg_low) + HIWORD_OFFSET, rs_r1, k32, in GenMulLong() 1891 LoadBaseDisp(rs_rSP, SRegOffset(rl_src2.s_reg_low) + HIWORD_OFFSET, rs_r0, k32, in GenMulLong() 1925 LoadBaseDisp(rs_rSP, SRegOffset(rl_src2.s_reg_low) + LOWORD_OFFSET, rs_r0, k32, in GenMulLong()
|
D | utility_x86.cc | 672 case k32: in LoadBaseIndexedDisp() 826 case k32: in StoreBaseIndexedDisp()
|
/art/compiler/optimizing/ |
D | intrinsics.cc | 54 case k32: in GetType()
|
/art/compiler/dex/ |
D | compiler_enums.h | 382 k32, enumerator
|