Home
last modified time | relevance | path

Searched refs:v_acc_q (Results 1 – 5 of 5) sorted by relevance

/external/libvpx/libvpx/vpx_dsp/x86/
Dsum_squares_sse2.c37 __m128i v_acc_q = _mm_setzero_si128(); in vpx_sum_squares_2d_i16_sse2() local
86 v_acc_q = _mm_add_epi64(v_acc_q, _mm_and_si128(v_acc_d, v_zext_mask_q)); in vpx_sum_squares_2d_i16_sse2()
87 v_acc_q = _mm_add_epi64(v_acc_q, _mm_srli_epi64(v_acc_d, 32)); in vpx_sum_squares_2d_i16_sse2()
93 v_acc_q = _mm_add_epi64(v_acc_q, _mm_srli_si128(v_acc_q, 8)); in vpx_sum_squares_2d_i16_sse2()
96 return (uint64_t)_mm_cvtsi128_si64(v_acc_q); in vpx_sum_squares_2d_i16_sse2()
100 _mm_storel_epi64((__m128i *)&tmp, v_acc_q); in vpx_sum_squares_2d_i16_sse2()
/external/libaom/libaom/aom_dsp/x86/
Dsum_squares_sse2.c59 __m128i v_acc_q = _mm_setzero_si128(); in aom_sum_squares_2d_i16_4xn_sse2() local
62 v_acc_q = _mm_add_epi32(v_acc_q, v_acc_d); in aom_sum_squares_2d_i16_4xn_sse2()
67 __m128i v_acc_64 = _mm_add_epi64(_mm_srli_epi64(v_acc_q, 32), in aom_sum_squares_2d_i16_4xn_sse2()
68 _mm_and_si128(v_acc_q, v_zext_mask_q)); in aom_sum_squares_2d_i16_4xn_sse2()
85 __m128i v_acc_q = _mm_setzero_si128(); in aom_sum_squares_2d_i16_nxn_sse2() local
112 v_acc_q = _mm_add_epi64(v_acc_q, _mm_and_si128(v_acc_d, v_zext_mask_q)); in aom_sum_squares_2d_i16_nxn_sse2()
113 v_acc_q = _mm_add_epi64(v_acc_q, _mm_srli_epi64(v_acc_d, 32)); in aom_sum_squares_2d_i16_nxn_sse2()
119 v_acc_q = _mm_add_epi64(v_acc_q, _mm_srli_si128(v_acc_q, 8)); in aom_sum_squares_2d_i16_nxn_sse2()
120 return xx_cvtsi128_si64(v_acc_q); in aom_sum_squares_2d_i16_nxn_sse2()
Dsum_squares_avx2.c23 __m256i v_acc_q = _mm256_setzero_si256(); in aom_sum_squares_2d_i16_nxn_avx2() local
49 v_acc_q = in aom_sum_squares_2d_i16_nxn_avx2()
50 _mm256_add_epi64(v_acc_q, _mm256_and_si256(v_acc_d, v_zext_mask_q)); in aom_sum_squares_2d_i16_nxn_avx2()
51 v_acc_q = _mm256_add_epi64(v_acc_q, _mm256_srli_epi64(v_acc_d, 32)); in aom_sum_squares_2d_i16_nxn_avx2()
54 __m128i lower_64_2_Value = _mm256_castsi256_si128(v_acc_q); in aom_sum_squares_2d_i16_nxn_avx2()
55 __m128i higher_64_2_Value = _mm256_extracti128_si256(v_acc_q, 1); in aom_sum_squares_2d_i16_nxn_avx2()
/external/libaom/libaom/av1/encoder/x86/
Dwedge_utils_sse2.c107 __m128i v_acc_q; in av1_wedge_sign_from_residuals_sse2() local
173 v_acc_q = _mm_add_epi64(v_acc0_d, v_acc1_d); in av1_wedge_sign_from_residuals_sse2()
175 v_acc_q = _mm_add_epi64(v_acc_q, _mm_srli_si128(v_acc_q, 8)); in av1_wedge_sign_from_residuals_sse2()
178 acc = (uint64_t)_mm_cvtsi128_si64(v_acc_q); in av1_wedge_sign_from_residuals_sse2()
180 xx_storel_64(&acc, v_acc_q); in av1_wedge_sign_from_residuals_sse2()
Dwedge_utils_avx2.c138 __m256i v_acc_q = _mm256_add_epi64(v_acc0_d, _mm256_srli_si256(v_acc0_d, 8)); in av1_wedge_sign_from_residuals_avx2() local
140 __m128i v_acc_q_0 = _mm256_castsi256_si128(v_acc_q); in av1_wedge_sign_from_residuals_avx2()
141 __m128i v_acc_q_1 = _mm256_extracti128_si256(v_acc_q, 1); in av1_wedge_sign_from_residuals_avx2()