/external/libvpx/libvpx/vpx_dsp/x86/ |
D | quantize_avx.c | 37 __m128i cmp_mask0, cmp_mask1; in vpx_quantize_b_avx() local 59 cmp_mask1 = _mm_cmpgt_epi16(qcoeff1, zbin); in vpx_quantize_b_avx() 61 all_zero = _mm_or_si128(cmp_mask0, cmp_mask1); in vpx_quantize_b_avx() 89 qcoeff1 = _mm_and_si128(qcoeff1, cmp_mask1); in vpx_quantize_b_avx() 99 scan_for_eob(&qcoeff0, &qcoeff1, cmp_mask0, cmp_mask1, iscan, 0, zero); in vpx_quantize_b_avx() 111 cmp_mask1 = _mm_cmpgt_epi16(qcoeff1, zbin); in vpx_quantize_b_avx() 113 all_zero = _mm_or_si128(cmp_mask0, cmp_mask1); in vpx_quantize_b_avx() 131 qcoeff1 = _mm_and_si128(qcoeff1, cmp_mask1); in vpx_quantize_b_avx() 139 eob0 = scan_for_eob(&qcoeff0, &qcoeff1, cmp_mask0, cmp_mask1, iscan, index, in vpx_quantize_b_avx() 163 __m128i cmp_mask0, cmp_mask1; in vpx_quantize_b_32x32_avx() local [all …]
|
D | quantize_ssse3.c | 33 __m128i cmp_mask0, cmp_mask1; in vpx_quantize_b_ssse3() local 52 cmp_mask1 = _mm_cmpgt_epi16(qcoeff1, zbin); in vpx_quantize_b_ssse3() 66 qcoeff1 = _mm_and_si128(qcoeff1, cmp_mask1); in vpx_quantize_b_ssse3() 75 eob = scan_for_eob(&qcoeff0, &qcoeff1, cmp_mask0, cmp_mask1, iscan, 0, zero); in vpx_quantize_b_ssse3() 86 cmp_mask1 = _mm_cmpgt_epi16(qcoeff1, zbin); in vpx_quantize_b_ssse3() 95 qcoeff1 = _mm_and_si128(qcoeff1, cmp_mask1); in vpx_quantize_b_ssse3() 103 eob0 = scan_for_eob(&qcoeff0, &qcoeff1, cmp_mask0, cmp_mask1, iscan, index, in vpx_quantize_b_ssse3() 128 __m128i cmp_mask0, cmp_mask1; in vpx_quantize_b_32x32_ssse3() local 168 cmp_mask1 = _mm_cmpgt_epi16(qcoeff1, zbin); in vpx_quantize_b_32x32_ssse3() 170 all_zero = _mm_or_si128(cmp_mask0, cmp_mask1); in vpx_quantize_b_32x32_ssse3() [all …]
|
D | quantize_sse2.c | 33 __m128i cmp_mask0, cmp_mask1; in vpx_quantize_b_sse2() local 56 cmp_mask1 = _mm_cmpgt_epi16(qcoeff1, zbin); in vpx_quantize_b_sse2() 72 qcoeff1 = _mm_and_si128(qcoeff1, cmp_mask1); in vpx_quantize_b_sse2() 81 eob = scan_for_eob(&qcoeff0, &qcoeff1, cmp_mask0, cmp_mask1, iscan, 0, zero); in vpx_quantize_b_sse2() 94 cmp_mask1 = _mm_cmpgt_epi16(qcoeff1, zbin); in vpx_quantize_b_sse2() 103 qcoeff1 = _mm_and_si128(qcoeff1, cmp_mask1); in vpx_quantize_b_sse2() 111 eob0 = scan_for_eob(&qcoeff0, &qcoeff1, cmp_mask0, cmp_mask1, iscan, index, in vpx_quantize_b_sse2()
|
/external/libaom/libaom/aom_dsp/x86/ |
D | adaptive_quantize_sse2.c | 34 __m128i cmp_mask0, cmp_mask1; in aom_quantize_b_adaptive_sse2() local 71 cmp_mask1 = _mm_cmpgt_epi16(qcoeff1, zbin); in aom_quantize_b_adaptive_sse2() 73 update_mask1(&cmp_mask0, &cmp_mask1, iscan, &is_found1, &mask1); in aom_quantize_b_adaptive_sse2() 76 all_zero = _mm_or_si128(cmp_mask0, cmp_mask1); in aom_quantize_b_adaptive_sse2() 105 qcoeff1 = _mm_and_si128(qcoeff1, cmp_mask1); in aom_quantize_b_adaptive_sse2() 132 cmp_mask1 = _mm_cmpgt_epi16(qcoeff1, zbin); in aom_quantize_b_adaptive_sse2() 134 update_mask1(&cmp_mask0, &cmp_mask1, iscan + index, &is_found1, &mask1); in aom_quantize_b_adaptive_sse2() 136 all_zero = _mm_or_si128(cmp_mask0, cmp_mask1); in aom_quantize_b_adaptive_sse2() 156 qcoeff1 = _mm_and_si128(qcoeff1, cmp_mask1); in aom_quantize_b_adaptive_sse2() 237 __m128i cmp_mask0, cmp_mask1; in aom_quantize_b_32x32_adaptive_sse2() local [all …]
|
D | quantize_sse2.c | 34 __m128i cmp_mask0, cmp_mask1; in aom_quantize_b_sse2() local 55 cmp_mask1 = _mm_cmpgt_epi16(qcoeff1, zbin); in aom_quantize_b_sse2() 71 qcoeff1 = _mm_and_si128(qcoeff1, cmp_mask1); in aom_quantize_b_sse2() 84 scan_for_eob(&coeff0, &coeff1, cmp_mask0, cmp_mask1, iscan_ptr, 0, zero); in aom_quantize_b_sse2() 97 cmp_mask1 = _mm_cmpgt_epi16(qcoeff1, zbin); in aom_quantize_b_sse2() 106 qcoeff1 = _mm_and_si128(qcoeff1, cmp_mask1); in aom_quantize_b_sse2() 117 eob0 = scan_for_eob(&coeff0, &coeff1, cmp_mask0, cmp_mask1, iscan_ptr, in aom_quantize_b_sse2()
|
D | highbd_adaptive_quantize_sse2.c | 74 __m128i coeff[2], cmp_mask0, cmp_mask1; in highbd_update_mask0() local 79 cmp_mask1 = _mm_cmpgt_epi32(coeff[1], threshold[1]); in highbd_update_mask0() 81 cmp_mask0 = _mm_packs_epi32(cmp_mask0, cmp_mask1); in highbd_update_mask0() 111 __m128i cmp_mask0, cmp_mask1, cmp_mask; in aom_highbd_quantize_b_adaptive_sse2() local 163 cmp_mask1 = _mm_cmpgt_epi32(qcoeff1, zbin); in aom_highbd_quantize_b_adaptive_sse2() 164 cmp_mask = _mm_packs_epi32(cmp_mask0, cmp_mask1); in aom_highbd_quantize_b_adaptive_sse2() 168 all_zero = _mm_or_si128(cmp_mask0, cmp_mask1); in aom_highbd_quantize_b_adaptive_sse2() 193 qcoeff1 = _mm_and_si128(qcoeff1, cmp_mask1); in aom_highbd_quantize_b_adaptive_sse2() 219 cmp_mask1 = _mm_cmpgt_epi32(qcoeff1, zbin); in aom_highbd_quantize_b_adaptive_sse2() 220 cmp_mask = _mm_packs_epi32(cmp_mask0, cmp_mask1); in aom_highbd_quantize_b_adaptive_sse2() [all …]
|
D | quantize_ssse3.c | 77 __m128i cmp_mask0, cmp_mask1, all_zero; in aom_quantize_b_64x64_ssse3() local 105 cmp_mask1 = _mm_cmpgt_epi16(qcoeff1, zbin); in aom_quantize_b_64x64_ssse3() 106 all_zero = _mm_or_si128(cmp_mask0, cmp_mask1); in aom_quantize_b_64x64_ssse3() 133 qcoeff1 = _mm_and_si128(qcoeff1, cmp_mask1); in aom_quantize_b_64x64_ssse3() 143 scan_for_eob(&qcoeff0, &qcoeff1, cmp_mask0, cmp_mask1, iscan, 0, zero); in aom_quantize_b_64x64_ssse3() 155 cmp_mask1 = _mm_cmpgt_epi16(qcoeff1, zbin); in aom_quantize_b_64x64_ssse3() 157 all_zero = _mm_or_si128(cmp_mask0, cmp_mask1); in aom_quantize_b_64x64_ssse3() 176 qcoeff1 = _mm_and_si128(qcoeff1, cmp_mask1); in aom_quantize_b_64x64_ssse3() 186 eob0 = scan_for_eob(&qcoeff0, &qcoeff1, cmp_mask0, cmp_mask1, iscan, index, in aom_quantize_b_64x64_ssse3()
|
D | quantize_x86.h | 147 static INLINE void update_mask1(__m128i *cmp_mask0, __m128i *cmp_mask1, in update_mask1() argument 152 all_zero = _mm_or_si128(*cmp_mask0, *cmp_mask1); in update_mask1() 157 __m128i mask1 = _mm_and_si128(*cmp_mask1, iscan1); in update_mask1() 168 __m128i coeff[4], cmp_mask0, cmp_mask1, cmp_mask2, cmp_mask3; in update_mask0() local 178 cmp_mask1 = _mm_cmpgt_epi32(coeff[1], threshold[1]); in update_mask0() 184 cmp_mask0 = _mm_packs_epi32(cmp_mask0, cmp_mask1); in update_mask0() 185 cmp_mask1 = _mm_packs_epi32(cmp_mask2, cmp_mask3); in update_mask0() 187 update_mask1(&cmp_mask0, &cmp_mask1, iscan_ptr, is_found, mask); in update_mask0()
|
D | adaptive_quantize_avx2.c | 61 __m256i coeff[2], cmp_mask0, cmp_mask1; in update_mask0_avx2() local 67 cmp_mask1 = _mm256_cmpgt_epi32(coeff[1], threshold[1]); in update_mask0_avx2() 69 _mm256_permute4x64_epi64(_mm256_packs_epi32(cmp_mask0, cmp_mask1), 0xd8); in update_mask0_avx2()
|
D | highbd_adaptive_quantize_avx2.c | 51 __m256i coeff[2], cmp_mask0, cmp_mask1; in highbd_update_mask0_avx2() local 55 cmp_mask1 = _mm256_cmpgt_epi32(coeff[1], threshold[1]); in highbd_update_mask0_avx2() 57 _mm256_permute4x64_epi64(_mm256_packs_epi32(cmp_mask0, cmp_mask1), 0xd8); in highbd_update_mask0_avx2()
|