Home
last modified time | relevance | path

Searched refs:d5u8 (Results 1 – 5 of 5) sorted by relevance

/external/libvpx/libvpx/vpx_dsp/arm/
Dloopfilter_4_neon.c21 uint8x8_t d5u8, // p1 in loop_filter_neon() argument
36 d20u8 = vabd_u8(d4u8, d5u8); in loop_filter_neon()
37 d21u8 = vabd_u8(d5u8, d6u8); in loop_filter_neon()
53 d28u8 = vabd_u8(d5u8, d16u8); in loop_filter_neon()
59 d5u8 = veor_u8(d5u8, d18u8); in loop_filter_neon()
74 d27s8 = vqsub_s8(vreinterpret_s8_u8(d5u8), in loop_filter_neon()
103 d21s8 = vqadd_s8(vreinterpret_s8_u8(d5u8), d27s8); in loop_filter_neon()
123 uint8x8_t d3u8, d4u8, d5u8, d6u8, d7u8, d16u8, d17u8, d18u8; in vpx_lpf_horizontal_4_neon() local
140 d5u8 = vld1_u8(s); in vpx_lpf_horizontal_4_neon()
153 d3u8, d4u8, d5u8, d6u8, d7u8, d16u8, d17u8, d18u8, in vpx_lpf_horizontal_4_neon()
[all …]
Dloopfilter_8_neon.c21 uint8x8_t d5u8, // p1 in mbloop_filter_neon() argument
41 d20u8 = vabd_u8(d4u8, d5u8); in mbloop_filter_neon()
42 d21u8 = vabd_u8(d5u8, d6u8); in mbloop_filter_neon()
64 d23u8 = vabd_u8(d5u8, d16u8); in mbloop_filter_neon()
111 q14u16 = vaddw_u8(q14u16, d5u8); in mbloop_filter_neon()
116 q14u16 = vaddw_u8(q14u16, d5u8); in mbloop_filter_neon()
121 q14u16 = vsubw_u8(q14u16, d5u8); in mbloop_filter_neon()
138 q14u16 = vsubw_u8(q14u16, d5u8); in mbloop_filter_neon()
146 d25u8 = veor_u8(d5u8, d22u8); in mbloop_filter_neon()
202 q14u16 = vaddw_u8(q14u16, d5u8); in mbloop_filter_neon()
[all …]
Dvariance_neon.c373 uint8x8_t d0u8, d1u8, d2u8, d3u8, d4u8, d5u8, d6u8, d7u8; in vpx_get4x4sse_cs_neon() local
384 d5u8 = vld1_u8(ref_ptr); in vpx_get4x4sse_cs_neon()
396 q12u16 = vsubl_u8(d1u8, d5u8); in vpx_get4x4sse_cs_neon()
/external/libvpx/libvpx/vp8/common/arm/neon/
Dbilinearpredict_neon.c31 uint8x8_t d0u8, d1u8, d2u8, d3u8, d4u8, d5u8, d6u8; in vp8_bilinear_predict4x4_neon() local
61 d5u8 = vld1_u8(src_ptr); src_ptr += src_pixels_per_line; in vp8_bilinear_predict4x4_neon()
65 q2u8 = vcombine_u8(d4u8, d5u8); in vp8_bilinear_predict4x4_neon()
139 uint8x8_t d0u8, d1u8, d2u8, d3u8, d4u8, d5u8; in vp8_bilinear_predict8x4_neon() local
168 d5u8 = vext_u8(vget_low_u8(q2u8), vget_high_u8(q2u8), 1); in vp8_bilinear_predict8x4_neon()
174 q7u16 = vmlal_u8(q7u16, d5u8, d1u8); in vp8_bilinear_predict8x4_neon()
209 d5u8 = vqrshrn_n_u16(q4u16, 7); in vp8_bilinear_predict8x4_neon()
214 vst1_u8((uint8_t *)dst_ptr, d5u8); in vp8_bilinear_predict8x4_neon()
226 uint8x8_t d0u8, d1u8, d2u8, d3u8, d4u8, d5u8, d6u8, d7u8, d8u8, d9u8, d11u8; in vp8_bilinear_predict8x8_neon() local
257 d5u8 = vext_u8(vget_low_u8(q2u8), vget_high_u8(q2u8), 1); in vp8_bilinear_predict8x8_neon()
[all …]
Dsixtappredict_neon.c33 uint8x8_t d0u8, d1u8, d2u8, d3u8, d4u8, d5u8, d18u8, d19u8, d20u8, d21u8; in vp8_sixtap_predict4x4_neon() local
64 d5u8 = vreinterpret_u8_s8(vabs_s8(d5s8)); in vp8_sixtap_predict4x4_neon()
99 q5u16 = vmull_u8(d25u8, d5u8); in vp8_sixtap_predict4x4_neon()
100 q6u16 = vmull_u8(d26u8, d5u8); in vp8_sixtap_predict4x4_neon()
146 d5u8 = vreinterpret_u8_s8(vabs_s8(d5s8)); in vp8_sixtap_predict4x4_neon()
177 q7u16 = vmull_u8(vreinterpret_u8_u32(d0u32x2.val[0]), d5u8); in vp8_sixtap_predict4x4_neon()
178 q8u16 = vmull_u8(vreinterpret_u8_u32(d1u32x2.val[0]), d5u8); in vp8_sixtap_predict4x4_neon()
273 q7u16 = vmull_u8(vreinterpret_u8_u32(d0u32x2.val[0]), d5u8); in vp8_sixtap_predict4x4_neon()
274 q8u16 = vmull_u8(vreinterpret_u8_u32(d1u32x2.val[0]), d5u8); in vp8_sixtap_predict4x4_neon()
275 q12u16 = vmull_u8(d31u8, d5u8); in vp8_sixtap_predict4x4_neon()
[all …]