/external/XNNPACK/src/qs8-gemm/gen/ |
D | 4x4c2-minmax-ssse3-ld128.c | 230 const __m128i vabsprod3x13 = _mm_mul_epu32(vabsacc3x1133, vmultiplier); in xnn_qs8_gemm_minmax_ukernel_4x4c2__ssse3_ld128() local 240 const __m128i vprod3x13 = _mm_sub_epi64(_mm_xor_si128(vabsprod3x13, vnmask3x13), vnmask3x13); in xnn_qs8_gemm_minmax_ukernel_4x4c2__ssse3_ld128()
|
D | 4x4c2-minmax-sse2-ld64.c | 230 const __m128i vabsprod3x13 = _mm_mul_epu32(vabsacc3x1133, vmultiplier); in xnn_qs8_gemm_minmax_ukernel_4x4c2__sse2_ld64() local 240 const __m128i vprod3x13 = _mm_sub_epi64(_mm_xor_si128(vabsprod3x13, vnmask3x13), vnmask3x13); in xnn_qs8_gemm_minmax_ukernel_4x4c2__sse2_ld64()
|
D | 4x4c2-minmax-ssse3-ld64.c | 230 const __m128i vabsprod3x13 = _mm_mul_epu32(vabsacc3x1133, vmultiplier); in xnn_qs8_gemm_minmax_ukernel_4x4c2__ssse3_ld64() local 240 const __m128i vprod3x13 = _mm_sub_epi64(_mm_xor_si128(vabsprod3x13, vnmask3x13), vnmask3x13); in xnn_qs8_gemm_minmax_ukernel_4x4c2__ssse3_ld64()
|
D | 4x4c2-xw-minmax-ssse3.c | 223 const __m128i vabsprod3x13 = _mm_mul_epu32(vabsacc3x1133, vmultiplier); in xnn_qs8_gemm_xw_minmax_ukernel_4x4c2__ssse3() local 233 const __m128i vprod3x13 = _mm_sub_epi64(_mm_xor_si128(vabsprod3x13, vnmask3x13), vnmask3x13); in xnn_qs8_gemm_xw_minmax_ukernel_4x4c2__ssse3()
|
D | 4x4c2-minmax-sse2-ld128.c | 230 const __m128i vabsprod3x13 = _mm_mul_epu32(vabsacc3x1133, vmultiplier); in xnn_qs8_gemm_minmax_ukernel_4x4c2__sse2_ld128() local 240 const __m128i vprod3x13 = _mm_sub_epi64(_mm_xor_si128(vabsprod3x13, vnmask3x13), vnmask3x13); in xnn_qs8_gemm_minmax_ukernel_4x4c2__sse2_ld128()
|
D | 4x4c2-xw-minmax-sse2.c | 223 const __m128i vabsprod3x13 = _mm_mul_epu32(vabsacc3x1133, vmultiplier); in xnn_qs8_gemm_xw_minmax_ukernel_4x4c2__sse2() local 233 const __m128i vprod3x13 = _mm_sub_epi64(_mm_xor_si128(vabsprod3x13, vnmask3x13), vnmask3x13); in xnn_qs8_gemm_xw_minmax_ukernel_4x4c2__sse2()
|
/external/XNNPACK/src/qu8-gemm/ |
D | 4x4c2-minmax-sse2.c | 235 const __m128i vabsprod3x13 = _mm_mul_epu32(vabsacc3x1032, vmultiplier); in xnn_qu8_gemm_minmax_ukernel_4x4c2__sse2() local 245 const __m128i vprod3x13 = _mm_sub_epi64(_mm_xor_si128(vabsprod3x13, vnmask3x13), vnmask3x13); in xnn_qu8_gemm_minmax_ukernel_4x4c2__sse2()
|
/external/XNNPACK/src/qs8-igemm/gen/ |
D | 4x4c2-minmax-sse2-ld128.c | 249 const __m128i vabsprod3x13 = _mm_mul_epu32(vabsacc3x1133, vmultiplier); in xnn_qs8_igemm_minmax_ukernel_4x4c2__sse2_ld128() local 259 const __m128i vprod3x13 = _mm_sub_epi64(_mm_xor_si128(vabsprod3x13, vnmask3x13), vnmask3x13); in xnn_qs8_igemm_minmax_ukernel_4x4c2__sse2_ld128()
|
D | 4x4c2-minmax-ssse3-ld128.c | 249 const __m128i vabsprod3x13 = _mm_mul_epu32(vabsacc3x1133, vmultiplier); in xnn_qs8_igemm_minmax_ukernel_4x4c2__ssse3_ld128() local 259 const __m128i vprod3x13 = _mm_sub_epi64(_mm_xor_si128(vabsprod3x13, vnmask3x13), vnmask3x13); in xnn_qs8_igemm_minmax_ukernel_4x4c2__ssse3_ld128()
|
D | 4x4c2-minmax-ssse3-ld64.c | 249 const __m128i vabsprod3x13 = _mm_mul_epu32(vabsacc3x1133, vmultiplier); in xnn_qs8_igemm_minmax_ukernel_4x4c2__ssse3_ld64() local 259 const __m128i vprod3x13 = _mm_sub_epi64(_mm_xor_si128(vabsprod3x13, vnmask3x13), vnmask3x13); in xnn_qs8_igemm_minmax_ukernel_4x4c2__ssse3_ld64()
|
D | 4x4c2-minmax-sse2-ld64.c | 249 const __m128i vabsprod3x13 = _mm_mul_epu32(vabsacc3x1133, vmultiplier); in xnn_qs8_igemm_minmax_ukernel_4x4c2__sse2_ld64() local 259 const __m128i vprod3x13 = _mm_sub_epi64(_mm_xor_si128(vabsprod3x13, vnmask3x13), vnmask3x13); in xnn_qs8_igemm_minmax_ukernel_4x4c2__sse2_ld64()
|
/external/XNNPACK/src/qu8-igemm/ |
D | 4x4c2-minmax-sse2.c | 219 const __m128i vabsprod3x13 = _mm_mul_epu32(vabsacc3x1032, vmultiplier); in xnn_qu8_igemm_minmax_ukernel_4x4c2__sse2() local 229 const __m128i vprod3x13 = _mm_sub_epi64(_mm_xor_si128(vabsprod3x13, vnmask3x13), vnmask3x13); in xnn_qu8_igemm_minmax_ukernel_4x4c2__sse2()
|