/external/libhevc/common/arm/ |
D | ihevc_inter_pred_filters_luma_vert_w16inp.s | 169 vmull.s16 q5,d2,d23 @mul_res2 = vmull_u8(src_tmp3, coeffabs_1)@ 171 vmlal.s16 q5,d1,d22 @mul_res2 = vmlal_u8(mul_res2, src_tmp2, coeffabs_0)@ 173 vmlal.s16 q5,d3,d24 @mul_res2 = vmlal_u8(mul_res2, src_tmp4, coeffabs_2)@ 175 vmlal.s16 q5,d4,d25 @mul_res2 = vmlal_u8(mul_res2, src_tmp1, coeffabs_3)@ 177 vmlal.s16 q5,d5,d26 @mul_res2 = vmlal_u8(mul_res2, src_tmp2, coeffabs_4)@ 179 vmlal.s16 q5,d6,d27 @mul_res2 = vmlal_u8(mul_res2, src_tmp3, coeffabs_5)@ 180 vmlal.s16 q5,d7,d28 @mul_res2 = vmlal_u8(mul_res2, src_tmp4, coeffabs_6)@ 181 vmlal.s16 q5,d16,d29 @mul_res2 = vmlal_u8(mul_res2, src_tmp1, coeffabs_7)@ 241 vmull.s16 q5,d2,d23 @mul_res2 = vmull_u8(src_tmp3, coeffabs_1)@ 242 vmlal.s16 q5,d1,d22 @mul_res2 = vmlal_u8(mul_res2, src_tmp2, coeffabs_0)@ [all …]
|
D | ihevc_inter_pred_luma_vert_w16inp_w16out.s | 179 vmull.s16 q5,d2,d23 @mul_res2 = vmull_u8(src_tmp3, coeffabs_1)@ 181 vmlal.s16 q5,d1,d22 @mul_res2 = vmlal_u8(mul_res2, src_tmp2, coeffabs_0)@ 183 vmlal.s16 q5,d3,d24 @mul_res2 = vmlal_u8(mul_res2, src_tmp4, coeffabs_2)@ 185 vmlal.s16 q5,d4,d25 @mul_res2 = vmlal_u8(mul_res2, src_tmp1, coeffabs_3)@ 187 vmlal.s16 q5,d5,d26 @mul_res2 = vmlal_u8(mul_res2, src_tmp2, coeffabs_4)@ 189 vmlal.s16 q5,d6,d27 @mul_res2 = vmlal_u8(mul_res2, src_tmp3, coeffabs_5)@ 190 vmlal.s16 q5,d7,d28 @mul_res2 = vmlal_u8(mul_res2, src_tmp4, coeffabs_6)@ 191 vmlal.s16 q5,d16,d29 @mul_res2 = vmlal_u8(mul_res2, src_tmp1, coeffabs_7)@ 255 vmull.s16 q5,d2,d23 @mul_res2 = vmull_u8(src_tmp3, coeffabs_1)@ 256 vmlal.s16 q5,d1,d22 @mul_res2 = vmlal_u8(mul_res2, src_tmp2, coeffabs_0)@ [all …]
|
D | ihevc_inter_pred_filters_luma_vert.s | 182 vmull.u8 q5,d2,d23 @mul_res2 = vmull_u8(src_tmp3, coeffabs_1)@ 185 vmlsl.u8 q5,d1,d22 @mul_res2 = vmlsl_u8(mul_res2, src_tmp2, coeffabs_0)@ 188 vmlsl.u8 q5,d3,d24 @mul_res2 = vmlsl_u8(mul_res2, src_tmp4, coeffabs_2)@ 191 vmlal.u8 q5,d4,d25 @mul_res2 = vmlal_u8(mul_res2, src_tmp1, coeffabs_3)@ 193 vmlal.u8 q5,d5,d26 @mul_res2 = vmlal_u8(mul_res2, src_tmp2, coeffabs_4)@ 195 vmlsl.u8 q5,d6,d27 @mul_res2 = vmlsl_u8(mul_res2, src_tmp3, coeffabs_5)@ 198 vmlal.u8 q5,d7,d28 @mul_res2 = vmlal_u8(mul_res2, src_tmp4, coeffabs_6)@ 201 vmlsl.u8 q5,d16,d29 @mul_res2 = vmlsl_u8(mul_res2, src_tmp1, coeffabs_7)@ 273 vmull.u8 q5,d2,d23 @mul_res2 = vmull_u8(src_tmp3, coeffabs_1)@ 276 vmlsl.u8 q5,d1,d22 @mul_res2 = vmlsl_u8(mul_res2, src_tmp2, coeffabs_0)@ [all …]
|
D | ihevc_intra_pred_filters_neon_intr.c | 2126 uint16x8_t mul_res1, mul_res2, add_res; in ihevc_intra_pred_luma_mode_3_to_9_neonintr() local 2156 mul_res2 = vmull_u8(ref_main_idx_1, dup_const_fract); in ihevc_intra_pred_luma_mode_3_to_9_neonintr() 2158 add_res = vaddq_u16(mul_res1, mul_res2); in ihevc_intra_pred_luma_mode_3_to_9_neonintr() 2212 uint16x8_t mul_res1, mul_res2, add_res; in ihevc_intra_pred_luma_mode_3_to_9_neonintr() local 2241 mul_res2 = vmull_u8(vreinterpret_u8_u32(pu1_ref_val2), dup_fract); in ihevc_intra_pred_luma_mode_3_to_9_neonintr() 2243 add_res = vaddq_u16(mul_res1, mul_res2); in ihevc_intra_pred_luma_mode_3_to_9_neonintr() 2334 uint16x8_t mul_res1, mul_res2, add_res; in ihevc_intra_pred_luma_mode_11_to_17_neonintr() local 2436 mul_res2 = vmull_u8(ref_main_idx_1, dup_const_fract); in ihevc_intra_pred_luma_mode_11_to_17_neonintr() 2438 add_res = vaddq_u16(mul_res1, mul_res2); in ihevc_intra_pred_luma_mode_11_to_17_neonintr() 2506 mul_res2 = vmull_u8(vreinterpret_u8_u32(ref_main_idx2), dup_const_fract); in ihevc_intra_pred_luma_mode_11_to_17_neonintr() [all …]
|
/external/libvpx/config/arm-neon/vpx_dsp/arm/ |
D | vpx_convolve8_vert_filter_type2_neon.asm.S | 115 vmlal.u8 q5, d2, d23 @mul_res2 = vmull_u8(src_tmp3, 118 vmlsl.u8 q5, d1, d22 @mul_res2 = vmlsl_u8(mul_res2, 121 vmlsl.u8 q5, d3, d24 @mul_res2 = vmlsl_u8(mul_res2, 124 vmlal.u8 q5, d4, d25 @mul_res2 = vmlal_u8(mul_res2, 129 vmlal.u8 q5, d5, d26 @mul_res2 = vmlal_u8(mul_res2, 132 vmlsl.u8 q5, d6, d27 @mul_res2 = vmlsl_u8(mul_res2, 135 vmlal.u8 q5, d7, d28 @mul_res2 = vmlal_u8(mul_res2, 138 vmlsl.u8 q5, d16, d29 @mul_res2 = vmlsl_u8(mul_res2, 210 vmlal.u8 q5, d2, d23 @mul_res2 = vmull_u8(src_tmp3, 213 vmlsl.u8 q5, d1, d22 @mul_res2 = vmlsl_u8(mul_res2, [all …]
|
D | vpx_convolve8_avg_vert_filter_type2_neon.asm.S | 115 vmlal.u8 q5, d2, d23 @mul_res2 = vmull_u8(src_tmp3, 118 vmlsl.u8 q5, d1, d22 @mul_res2 = vmlsl_u8(mul_res2, 121 vmlsl.u8 q5, d3, d24 @mul_res2 = vmlsl_u8(mul_res2, 124 vmlal.u8 q5, d4, d25 @mul_res2 = vmlal_u8(mul_res2, 130 vmlal.u8 q5, d5, d26 @mul_res2 = vmlal_u8(mul_res2, 133 vmlsl.u8 q5, d6, d27 @mul_res2 = vmlsl_u8(mul_res2, 136 vmlal.u8 q5, d7, d28 @mul_res2 = vmlal_u8(mul_res2, 139 vmlsl.u8 q5, d16, d29 @mul_res2 = vmlsl_u8(mul_res2, 216 vmlal.u8 q5, d2, d23 @mul_res2 = vmull_u8(src_tmp3, 219 vmlsl.u8 q5, d1, d22 @mul_res2 = vmlsl_u8(mul_res2, [all …]
|
D | vpx_convolve8_vert_filter_type1_neon.asm.S | 115 vmlsl.u8 q5, d2, d23 @mul_res2 = vmull_u8(src_tmp3, 118 vmlsl.u8 q5, d1, d22 @mul_res2 = vmlsl_u8(mul_res2, 121 vmlal.u8 q5, d3, d24 @mul_res2 = vmlsl_u8(mul_res2, 124 vmlal.u8 q5, d4, d25 @mul_res2 = vmlal_u8(mul_res2, 129 vmlal.u8 q5, d5, d26 @mul_res2 = vmlal_u8(mul_res2, 132 vmlal.u8 q5, d6, d27 @mul_res2 = vmlsl_u8(mul_res2, 135 vmlsl.u8 q5, d7, d28 @mul_res2 = vmlal_u8(mul_res2, 138 vmlsl.u8 q5, d16, d29 @mul_res2 = vmlsl_u8(mul_res2, 209 vmlsl.u8 q5, d2, d23 @mul_res2 = vmull_u8(src_tmp3, 212 vmlsl.u8 q5, d1, d22 @mul_res2 = vmlsl_u8(mul_res2, [all …]
|
D | vpx_convolve8_avg_vert_filter_type1_neon.asm.S | 114 vmlsl.u8 q5, d2, d23 @mul_res2 = vmull_u8(src_tmp3, 117 vmlsl.u8 q5, d1, d22 @mul_res2 = vmlsl_u8(mul_res2, 120 vmlal.u8 q5, d3, d24 @mul_res2 = vmlsl_u8(mul_res2, 123 vmlal.u8 q5, d4, d25 @mul_res2 = vmlal_u8(mul_res2, 129 vmlal.u8 q5, d5, d26 @mul_res2 = vmlal_u8(mul_res2, 132 vmlal.u8 q5, d6, d27 @mul_res2 = vmlsl_u8(mul_res2, 135 vmlsl.u8 q5, d7, d28 @mul_res2 = vmlal_u8(mul_res2, 138 vmlsl.u8 q5, d16, d29 @mul_res2 = vmlsl_u8(mul_res2, 215 vmlsl.u8 q5, d2, d23 @mul_res2 = vmull_u8(src_tmp3, 218 vmlsl.u8 q5, d1, d22 @mul_res2 = vmlsl_u8(mul_res2, [all …]
|
/external/libvpx/libvpx/vpx_dsp/arm/ |
D | vpx_convolve8_vert_filter_type2_neon.asm | 108 vmlal.u8 q5, d2, d23 ;mul_res2 = vmull_u8(src_tmp3, 111 vmlsl.u8 q5, d1, d22 ;mul_res2 = vmlsl_u8(mul_res2, 114 vmlsl.u8 q5, d3, d24 ;mul_res2 = vmlsl_u8(mul_res2, 117 vmlal.u8 q5, d4, d25 ;mul_res2 = vmlal_u8(mul_res2, 122 vmlal.u8 q5, d5, d26 ;mul_res2 = vmlal_u8(mul_res2, 125 vmlsl.u8 q5, d6, d27 ;mul_res2 = vmlsl_u8(mul_res2, 128 vmlal.u8 q5, d7, d28 ;mul_res2 = vmlal_u8(mul_res2, 131 vmlsl.u8 q5, d16, d29 ;mul_res2 = vmlsl_u8(mul_res2, 203 vmlal.u8 q5, d2, d23 ;mul_res2 = vmull_u8(src_tmp3, 206 vmlsl.u8 q5, d1, d22 ;mul_res2 = vmlsl_u8(mul_res2, [all …]
|
D | vpx_convolve8_avg_vert_filter_type2_neon.asm | 108 vmlal.u8 q5, d2, d23 ;mul_res2 = vmull_u8(src_tmp3, 111 vmlsl.u8 q5, d1, d22 ;mul_res2 = vmlsl_u8(mul_res2, 114 vmlsl.u8 q5, d3, d24 ;mul_res2 = vmlsl_u8(mul_res2, 117 vmlal.u8 q5, d4, d25 ;mul_res2 = vmlal_u8(mul_res2, 123 vmlal.u8 q5, d5, d26 ;mul_res2 = vmlal_u8(mul_res2, 126 vmlsl.u8 q5, d6, d27 ;mul_res2 = vmlsl_u8(mul_res2, 129 vmlal.u8 q5, d7, d28 ;mul_res2 = vmlal_u8(mul_res2, 132 vmlsl.u8 q5, d16, d29 ;mul_res2 = vmlsl_u8(mul_res2, 209 vmlal.u8 q5, d2, d23 ;mul_res2 = vmull_u8(src_tmp3, 212 vmlsl.u8 q5, d1, d22 ;mul_res2 = vmlsl_u8(mul_res2, [all …]
|
D | vpx_convolve8_vert_filter_type1_neon.asm | 108 vmlsl.u8 q5, d2, d23 ;mul_res2 = vmull_u8(src_tmp3, 111 vmlsl.u8 q5, d1, d22 ;mul_res2 = vmlsl_u8(mul_res2, 114 vmlal.u8 q5, d3, d24 ;mul_res2 = vmlsl_u8(mul_res2, 117 vmlal.u8 q5, d4, d25 ;mul_res2 = vmlal_u8(mul_res2, 122 vmlal.u8 q5, d5, d26 ;mul_res2 = vmlal_u8(mul_res2, 125 vmlal.u8 q5, d6, d27 ;mul_res2 = vmlsl_u8(mul_res2, 128 vmlsl.u8 q5, d7, d28 ;mul_res2 = vmlal_u8(mul_res2, 131 vmlsl.u8 q5, d16, d29 ;mul_res2 = vmlsl_u8(mul_res2, 202 vmlsl.u8 q5, d2, d23 ;mul_res2 = vmull_u8(src_tmp3, 205 vmlsl.u8 q5, d1, d22 ;mul_res2 = vmlsl_u8(mul_res2, [all …]
|
D | vpx_convolve8_avg_vert_filter_type1_neon.asm | 107 vmlsl.u8 q5, d2, d23 ;mul_res2 = vmull_u8(src_tmp3, 110 vmlsl.u8 q5, d1, d22 ;mul_res2 = vmlsl_u8(mul_res2, 113 vmlal.u8 q5, d3, d24 ;mul_res2 = vmlsl_u8(mul_res2, 116 vmlal.u8 q5, d4, d25 ;mul_res2 = vmlal_u8(mul_res2, 122 vmlal.u8 q5, d5, d26 ;mul_res2 = vmlal_u8(mul_res2, 125 vmlal.u8 q5, d6, d27 ;mul_res2 = vmlsl_u8(mul_res2, 128 vmlsl.u8 q5, d7, d28 ;mul_res2 = vmlal_u8(mul_res2, 131 vmlsl.u8 q5, d16, d29 ;mul_res2 = vmlsl_u8(mul_res2, 208 vmlsl.u8 q5, d2, d23 ;mul_res2 = vmull_u8(src_tmp3, 211 vmlsl.u8 q5, d1, d22 ;mul_res2 = vmlsl_u8(mul_res2, [all …]
|