/art/compiler/dex/quick/mips/ |
D | utility_mips.cc | 365 DCHECK((size == k32) || (size == kSingle) || (size == kReference)); in LoadBaseIndexed() 369 size = k32; in LoadBaseIndexed() 383 case k32: in LoadBaseIndexed() 417 DCHECK((size == k32) || (size == kSingle) || (size == kReference)); in StoreBaseIndexed() 421 size = k32; in StoreBaseIndexed() 435 case k32: in StoreBaseIndexed() 489 case k32: in LoadBaseDispBody() 561 size = k32; in LoadBaseDisp() 600 case k32: in StoreBaseDispBody() 669 size = k32; in StoreBaseDisp()
|
D | call_mips.cc | 202 LoadBaseIndexed(r_base, r_key, r_disp, 2, k32); in GenLargePackedSwitch()
|
/art/compiler/dex/quick/arm/ |
D | utility_arm.cc | 702 DCHECK((size == k32) || (size == kSingle) || (size == kReference)); in LoadBaseIndexed() 713 size = k32; in LoadBaseIndexed() 730 case k32: in LoadBaseIndexed() 768 DCHECK((size == k32) || (size == kSingle) || (size == kReference)); in StoreBaseIndexed() 780 size = k32; in StoreBaseIndexed() 797 case k32: in StoreBaseIndexed() 880 case k32: in LoadBaseDispBody() 969 size = k32; in LoadBaseDisp() 1019 case k32: in StoreBaseDispBody() 1126 size = k32; in StoreBaseDisp()
|
D | int_arm.cc | 768 DCHECK(size == kSignedByte || size == kSignedHalf || size == k32); in GenInlinedPeek() 784 StoreBaseDisp(rl_address.reg, 0, rl_value.reg.GetLow(), k32, kNotVolatile); in GenInlinedPoke() 785 StoreBaseDisp(rl_address.reg, 4, rl_value.reg.GetHigh(), k32, kNotVolatile); in GenInlinedPoke() 787 DCHECK(size == kSignedByte || size == kSignedHalf || size == k32); in GenInlinedPoke() 1030 LoadBaseIndexed(rs_src, rs_length, rs_tmp, 0, k32); in GenInlinedArrayCopyCharArray() 1031 StoreBaseIndexed(rs_dst, rs_length, rs_tmp, 0, k32); in GenInlinedArrayCopyCharArray()
|
D | call_arm.cc | 129 LoadBaseIndexed(table_base, keyReg, disp_reg, 2, k32); in GenLargePackedSwitch()
|
/art/compiler/dex/quick/x86/ |
D | target_x86.cc | 698 OpSize size = cu_->target64 ? k64 : k32; in SpillCoreRegs() 715 OpSize size = cu_->target64 ? k64 : k32; in UnSpillCoreRegs() 1885 case k32: in GenMultiplyVector() 1915 case k32: in GenAddVector() 1946 case k32: in GenSubtractVector() 2026 case k32: in GenShiftLeftVector() 2054 case k32: in GenSignedShiftRightVector() 2079 case k32: in GenUnsignedShiftRightVector() 2155 case k32: in GenAddReduceVector() 2245 case k32: in GenReduceVector() [all …]
|
D | call_x86.cc | 295 cu_->target64 ? k64 : k32, kNotVolatile); in GenEntrySequence()
|
D | fp_x86.cc | 367 StoreBaseDisp(rs_rX86_SP, src1_v_reg_offset, rl_src1.reg, is_double ? k64 : k32, in GenRemFP() 378 StoreBaseDisp(rs_rX86_SP, src2_v_reg_offset, rl_src2.reg, is_double ? k64 : k32, in GenRemFP()
|
D | int_x86.cc | 865 DCHECK(size == kSignedByte || size == kSignedHalf || size == k32); in GenInlinedPeek() 886 DCHECK(size == kSignedByte || size == kSignedHalf || size == k32); in GenInlinedPoke() 1285 LoadBaseDisp(rs_rX86_SP, displacement, dest, k32, kNotVolatile); in GenImulMemImm() 1501 LoadBaseDisp(rs_rX86_SP, SRegOffset(rl_src1.s_reg_low) + HIWORD_OFFSET, rs_r1, k32, in GenMulLong() 1525 LoadBaseDisp(rs_rX86_SP, SRegOffset(rl_src2.s_reg_low) + HIWORD_OFFSET, rs_r0, k32, in GenMulLong() 1559 LoadBaseDisp(rs_rX86_SP, SRegOffset(rl_src2.s_reg_low) + LOWORD_OFFSET, rs_r0, k32, in GenMulLong()
|
D | utility_x86.cc | 662 case k32: in LoadBaseIndexedDisp() 809 case k32: in StoreBaseIndexedDisp()
|
/art/compiler/dex/quick/arm64/ |
D | target_arm64.cc | 882 *op_size = k32; in GetArgPhysicalReg() 956 StoreBaseDisp(TargetPtrReg(kSp), SRegOffset(start_vreg + i), reg, t_loc->wide ? k64 : k32, in FlushIns() 967 t_loc->wide ? k64 : k32, kNotVolatile); in FlushIns() 1073 StoreBaseDisp(TargetPtrReg(kSp), SRegOffset(loc.s_reg_low), loc.reg, k32, in GenDalvikArgsRange() 1142 StoreBaseDisp(TargetPtrReg(kSp), out_offset, rl_arg.reg, k32, kNotVolatile); in GenDalvikArgsRange() 1152 StoreBaseDisp(TargetPtrReg(kSp), out_offset, regSingle, k32, kNotVolatile); in GenDalvikArgsRange()
|
D | utility_arm64.cc | 1046 DCHECK(size == k32 || size == kSingle); in LoadBaseIndexed() 1065 case k32: // Intentional fall-through. in LoadBaseIndexed() 1134 DCHECK(size == k32 || size == kSingle); in StoreBaseIndexed() 1153 case k32: // Intentional fall-trough. in StoreBaseIndexed() 1220 case k32: // Intentional fall-trough. in LoadBaseDispBody() 1317 case k32: // Intentional fall-trough. in StoreBaseDispBody()
|
D | int_arm64.cc | 683 DCHECK(size == kSignedByte || size == kSignedHalf || size == k32); in GenInlinedPeek() 698 DCHECK(size == kSignedByte || size == kSignedHalf || size == k32); in GenInlinedPoke() 869 LoadBaseIndexed(rs_src, rs_length, rs_tmp, 0, k32); in GenInlinedArrayCopyCharArray() 870 StoreBaseIndexed(rs_dst, rs_length, rs_tmp, 0, k32); in GenInlinedArrayCopyCharArray()
|
D | call_arm64.cc | 133 LoadBaseIndexed(table_base, As64BitReg(key_reg), disp_reg, 2, k32); in GenLargePackedSwitch()
|
/art/compiler/dex/quick/ |
D | mir_to_lir.cc | 94 LoadBaseDisp(TargetPtrReg(kSp), offset, new_reg, wide ? k64 : k32, kNotVolatile); in LoadArg() 686 GenArrayGet(opt_flags, k32, rl_src[0], rl_src[1], rl_dest, 2); in CompileDalvikInstruction() 704 GenArrayPut(opt_flags, k32, rl_src[1], rl_src[2], rl_src[0], 2, false); in CompileDalvikInstruction() 741 GenIGet(mir, opt_flags, k32, rl_dest, rl_src[0], false, false); in CompileDalvikInstruction() 766 GenIPut(mir, opt_flags, k32, rl_src[0], rl_src[1], false, false); in CompileDalvikInstruction()
|
D | dex_file_method_inliner.cc | 297 INTRINSIC(JavaLangInteger, ReverseBytes, I_I, kIntrinsicReverseBytes, k32), 300 INTRINSIC(JavaLangInteger, Reverse, I_I, kIntrinsicReverseBits, k32), 354 INTRINSIC(LibcoreIoMemory, PeekIntNative, J_I, kIntrinsicPeek, k32), 358 INTRINSIC(LibcoreIoMemory, PokeIntNative, JI_V, kIntrinsicPoke, k32),
|
D | gen_invoke.cc | 1356 DCHECK(size == k32 || size == kSignedHalf); in GenInlinedReverseBytes() 1357 OpKind op = (size == k32) ? kOpRev : kOpRevsh; in GenInlinedReverseBytes() 1631 LoadBaseIndexed(rl_object.reg, rl_offset.reg, rl_result.reg, 0, k32); in GenInlinedUnsafeGet() 1680 StoreBaseIndexed(rl_object.reg, rl_offset.reg, rl_value.reg, 0, k32); in GenInlinedUnsafePut()
|
D | gen_loadstore.cc | 69 … StoreBaseDisp(TargetPtrReg(kSp), SRegOffset(rl_dest.s_reg_low), temp_reg, k32, kNotVolatile); in Workaround7250540()
|
D | mir_to_lir.h | 1000 return LoadBaseDisp(r_base, displacement, r_dest, k32, kNotVolatile); in Load32Disp() 1042 return StoreBaseDisp(r_base, displacement, r_src, k32, kNotVolatile); in Store32Disp() 1505 return wide ? k64 : ref ? kReference : k32; in LoadStoreOpSize()
|
D | gen_common.cc | 475 LoadBaseIndexed(r_src, r_idx, r_val, 2, k32); in GenFilledNewArray() 479 StoreBaseIndexed(r_dst, r_idx, r_val, 2, k32); in GenFilledNewArray()
|
/art/compiler/dex/ |
D | compiler_enums.h | 264 k32, enumerator
|