Home
last modified time | relevance | path

Searched refs:k32 (Results 1 – 21 of 21) sorted by relevance

/art/compiler/dex/quick/mips/
Dutility_mips.cc365 DCHECK((size == k32) || (size == kSingle) || (size == kReference)); in LoadBaseIndexed()
369 size = k32; in LoadBaseIndexed()
383 case k32: in LoadBaseIndexed()
417 DCHECK((size == k32) || (size == kSingle) || (size == kReference)); in StoreBaseIndexed()
421 size = k32; in StoreBaseIndexed()
435 case k32: in StoreBaseIndexed()
489 case k32: in LoadBaseDispBody()
561 size = k32; in LoadBaseDisp()
600 case k32: in StoreBaseDispBody()
669 size = k32; in StoreBaseDisp()
Dcall_mips.cc202 LoadBaseIndexed(r_base, r_key, r_disp, 2, k32); in GenLargePackedSwitch()
/art/compiler/dex/quick/arm/
Dutility_arm.cc702 DCHECK((size == k32) || (size == kSingle) || (size == kReference)); in LoadBaseIndexed()
713 size = k32; in LoadBaseIndexed()
730 case k32: in LoadBaseIndexed()
768 DCHECK((size == k32) || (size == kSingle) || (size == kReference)); in StoreBaseIndexed()
780 size = k32; in StoreBaseIndexed()
797 case k32: in StoreBaseIndexed()
880 case k32: in LoadBaseDispBody()
969 size = k32; in LoadBaseDisp()
1019 case k32: in StoreBaseDispBody()
1126 size = k32; in StoreBaseDisp()
Dint_arm.cc768 DCHECK(size == kSignedByte || size == kSignedHalf || size == k32); in GenInlinedPeek()
784 StoreBaseDisp(rl_address.reg, 0, rl_value.reg.GetLow(), k32, kNotVolatile); in GenInlinedPoke()
785 StoreBaseDisp(rl_address.reg, 4, rl_value.reg.GetHigh(), k32, kNotVolatile); in GenInlinedPoke()
787 DCHECK(size == kSignedByte || size == kSignedHalf || size == k32); in GenInlinedPoke()
1030 LoadBaseIndexed(rs_src, rs_length, rs_tmp, 0, k32); in GenInlinedArrayCopyCharArray()
1031 StoreBaseIndexed(rs_dst, rs_length, rs_tmp, 0, k32); in GenInlinedArrayCopyCharArray()
Dcall_arm.cc129 LoadBaseIndexed(table_base, keyReg, disp_reg, 2, k32); in GenLargePackedSwitch()
/art/compiler/dex/quick/x86/
Dtarget_x86.cc698 OpSize size = cu_->target64 ? k64 : k32; in SpillCoreRegs()
715 OpSize size = cu_->target64 ? k64 : k32; in UnSpillCoreRegs()
1885 case k32: in GenMultiplyVector()
1915 case k32: in GenAddVector()
1946 case k32: in GenSubtractVector()
2026 case k32: in GenShiftLeftVector()
2054 case k32: in GenSignedShiftRightVector()
2079 case k32: in GenUnsignedShiftRightVector()
2155 case k32: in GenAddReduceVector()
2245 case k32: in GenReduceVector()
[all …]
Dcall_x86.cc295 cu_->target64 ? k64 : k32, kNotVolatile); in GenEntrySequence()
Dfp_x86.cc367 StoreBaseDisp(rs_rX86_SP, src1_v_reg_offset, rl_src1.reg, is_double ? k64 : k32, in GenRemFP()
378 StoreBaseDisp(rs_rX86_SP, src2_v_reg_offset, rl_src2.reg, is_double ? k64 : k32, in GenRemFP()
Dint_x86.cc865 DCHECK(size == kSignedByte || size == kSignedHalf || size == k32); in GenInlinedPeek()
886 DCHECK(size == kSignedByte || size == kSignedHalf || size == k32); in GenInlinedPoke()
1285 LoadBaseDisp(rs_rX86_SP, displacement, dest, k32, kNotVolatile); in GenImulMemImm()
1501 LoadBaseDisp(rs_rX86_SP, SRegOffset(rl_src1.s_reg_low) + HIWORD_OFFSET, rs_r1, k32, in GenMulLong()
1525 LoadBaseDisp(rs_rX86_SP, SRegOffset(rl_src2.s_reg_low) + HIWORD_OFFSET, rs_r0, k32, in GenMulLong()
1559 LoadBaseDisp(rs_rX86_SP, SRegOffset(rl_src2.s_reg_low) + LOWORD_OFFSET, rs_r0, k32, in GenMulLong()
Dutility_x86.cc662 case k32: in LoadBaseIndexedDisp()
809 case k32: in StoreBaseIndexedDisp()
/art/compiler/dex/quick/arm64/
Dtarget_arm64.cc882 *op_size = k32; in GetArgPhysicalReg()
956 StoreBaseDisp(TargetPtrReg(kSp), SRegOffset(start_vreg + i), reg, t_loc->wide ? k64 : k32, in FlushIns()
967 t_loc->wide ? k64 : k32, kNotVolatile); in FlushIns()
1073 StoreBaseDisp(TargetPtrReg(kSp), SRegOffset(loc.s_reg_low), loc.reg, k32, in GenDalvikArgsRange()
1142 StoreBaseDisp(TargetPtrReg(kSp), out_offset, rl_arg.reg, k32, kNotVolatile); in GenDalvikArgsRange()
1152 StoreBaseDisp(TargetPtrReg(kSp), out_offset, regSingle, k32, kNotVolatile); in GenDalvikArgsRange()
Dutility_arm64.cc1046 DCHECK(size == k32 || size == kSingle); in LoadBaseIndexed()
1065 case k32: // Intentional fall-through. in LoadBaseIndexed()
1134 DCHECK(size == k32 || size == kSingle); in StoreBaseIndexed()
1153 case k32: // Intentional fall-trough. in StoreBaseIndexed()
1220 case k32: // Intentional fall-trough. in LoadBaseDispBody()
1317 case k32: // Intentional fall-trough. in StoreBaseDispBody()
Dint_arm64.cc683 DCHECK(size == kSignedByte || size == kSignedHalf || size == k32); in GenInlinedPeek()
698 DCHECK(size == kSignedByte || size == kSignedHalf || size == k32); in GenInlinedPoke()
869 LoadBaseIndexed(rs_src, rs_length, rs_tmp, 0, k32); in GenInlinedArrayCopyCharArray()
870 StoreBaseIndexed(rs_dst, rs_length, rs_tmp, 0, k32); in GenInlinedArrayCopyCharArray()
Dcall_arm64.cc133 LoadBaseIndexed(table_base, As64BitReg(key_reg), disp_reg, 2, k32); in GenLargePackedSwitch()
/art/compiler/dex/quick/
Dmir_to_lir.cc94 LoadBaseDisp(TargetPtrReg(kSp), offset, new_reg, wide ? k64 : k32, kNotVolatile); in LoadArg()
686 GenArrayGet(opt_flags, k32, rl_src[0], rl_src[1], rl_dest, 2); in CompileDalvikInstruction()
704 GenArrayPut(opt_flags, k32, rl_src[1], rl_src[2], rl_src[0], 2, false); in CompileDalvikInstruction()
741 GenIGet(mir, opt_flags, k32, rl_dest, rl_src[0], false, false); in CompileDalvikInstruction()
766 GenIPut(mir, opt_flags, k32, rl_src[0], rl_src[1], false, false); in CompileDalvikInstruction()
Ddex_file_method_inliner.cc297 INTRINSIC(JavaLangInteger, ReverseBytes, I_I, kIntrinsicReverseBytes, k32),
300 INTRINSIC(JavaLangInteger, Reverse, I_I, kIntrinsicReverseBits, k32),
354 INTRINSIC(LibcoreIoMemory, PeekIntNative, J_I, kIntrinsicPeek, k32),
358 INTRINSIC(LibcoreIoMemory, PokeIntNative, JI_V, kIntrinsicPoke, k32),
Dgen_invoke.cc1356 DCHECK(size == k32 || size == kSignedHalf); in GenInlinedReverseBytes()
1357 OpKind op = (size == k32) ? kOpRev : kOpRevsh; in GenInlinedReverseBytes()
1631 LoadBaseIndexed(rl_object.reg, rl_offset.reg, rl_result.reg, 0, k32); in GenInlinedUnsafeGet()
1680 StoreBaseIndexed(rl_object.reg, rl_offset.reg, rl_value.reg, 0, k32); in GenInlinedUnsafePut()
Dgen_loadstore.cc69 … StoreBaseDisp(TargetPtrReg(kSp), SRegOffset(rl_dest.s_reg_low), temp_reg, k32, kNotVolatile); in Workaround7250540()
Dmir_to_lir.h1000 return LoadBaseDisp(r_base, displacement, r_dest, k32, kNotVolatile); in Load32Disp()
1042 return StoreBaseDisp(r_base, displacement, r_src, k32, kNotVolatile); in Store32Disp()
1505 return wide ? k64 : ref ? kReference : k32; in LoadStoreOpSize()
Dgen_common.cc475 LoadBaseIndexed(r_src, r_idx, r_val, 2, k32); in GenFilledNewArray()
479 StoreBaseIndexed(r_dst, r_idx, r_val, 2, k32); in GenFilledNewArray()
/art/compiler/dex/
Dcompiler_enums.h264 k32, enumerator