Home
last modified time | relevance | path

Searched refs:vbias0x6 (Results 1 – 9 of 9) sorted by relevance

/external/XNNPACK/src/qs8-gemm/gen/
D1x8c8-xw-minmax-avx2.c54 const __m128i vbias0x6 = _mm_loadu_si32((const void*) ((uintptr_t) w + 6 * sizeof(int32_t))); in xnn_qs8_gemm_xw_minmax_ukernel_1x8c8__avx2() local
56 __m256i vacc0x67 = _mm256_inserti128_si256(_mm256_castsi128_si256(vbias0x6), vbias0x7, 1); in xnn_qs8_gemm_xw_minmax_ukernel_1x8c8__avx2()
D1x8c8-minmax-avx2.c54 const __m128i vbias0x6 = _mm_loadu_si32((const void*) ((uintptr_t) w + 6 * sizeof(int32_t))); in xnn_qs8_gemm_minmax_ukernel_1x8c8__avx2() local
56 __m256i vacc0x67 = _mm256_inserti128_si256(_mm256_castsi128_si256(vbias0x6), vbias0x7, 1); in xnn_qs8_gemm_minmax_ukernel_1x8c8__avx2()
D2x8c8-xw-minmax-avx2.c60 const __m128i vbias0x6 = _mm_loadu_si32((const void*) ((uintptr_t) w + 6 * sizeof(int32_t))); in xnn_qs8_gemm_xw_minmax_ukernel_2x8c8__avx2() local
62 __m256i vacc0x67 = _mm256_inserti128_si256(_mm256_castsi128_si256(vbias0x6), vbias0x7, 1); in xnn_qs8_gemm_xw_minmax_ukernel_2x8c8__avx2()
D2x8c8-minmax-avx2.c60 const __m128i vbias0x6 = _mm_loadu_si32((const void*) ((uintptr_t) w + 6 * sizeof(int32_t))); in xnn_qs8_gemm_minmax_ukernel_2x8c8__avx2() local
62 __m256i vacc0x67 = _mm256_inserti128_si256(_mm256_castsi128_si256(vbias0x6), vbias0x7, 1); in xnn_qs8_gemm_minmax_ukernel_2x8c8__avx2()
D3x8c8-xw-minmax-avx2.c66 const __m128i vbias0x6 = _mm_loadu_si32((const void*) ((uintptr_t) w + 6 * sizeof(int32_t))); in xnn_qs8_gemm_xw_minmax_ukernel_3x8c8__avx2() local
68 __m256i vacc0x67 = _mm256_inserti128_si256(_mm256_castsi128_si256(vbias0x6), vbias0x7, 1); in xnn_qs8_gemm_xw_minmax_ukernel_3x8c8__avx2()
D3x8c8-minmax-avx2.c66 const __m128i vbias0x6 = _mm_loadu_si32((const void*) ((uintptr_t) w + 6 * sizeof(int32_t))); in xnn_qs8_gemm_minmax_ukernel_3x8c8__avx2() local
68 __m256i vacc0x67 = _mm256_inserti128_si256(_mm256_castsi128_si256(vbias0x6), vbias0x7, 1); in xnn_qs8_gemm_minmax_ukernel_3x8c8__avx2()
/external/XNNPACK/src/qs8-igemm/gen/
D1x8c8-minmax-avx2.c57 const __m128i vbias0x6 = _mm_loadu_si32((const void*) ((uintptr_t) w + 6 * sizeof(int32_t))); in xnn_qs8_igemm_minmax_ukernel_1x8c8__avx2() local
59 __m256i vacc0x67 = _mm256_inserti128_si256(_mm256_castsi128_si256(vbias0x6), vbias0x7, 1); in xnn_qs8_igemm_minmax_ukernel_1x8c8__avx2()
D2x8c8-minmax-avx2.c61 const __m128i vbias0x6 = _mm_loadu_si32((const void*) ((uintptr_t) w + 6 * sizeof(int32_t))); in xnn_qs8_igemm_minmax_ukernel_2x8c8__avx2() local
63 __m256i vacc0x67 = _mm256_inserti128_si256(_mm256_castsi128_si256(vbias0x6), vbias0x7, 1); in xnn_qs8_igemm_minmax_ukernel_2x8c8__avx2()
D3x8c8-minmax-avx2.c65 const __m128i vbias0x6 = _mm_loadu_si32((const void*) ((uintptr_t) w + 6 * sizeof(int32_t))); in xnn_qs8_igemm_minmax_ukernel_3x8c8__avx2() local
67 __m256i vacc0x67 = _mm256_inserti128_si256(_mm256_castsi128_si256(vbias0x6), vbias0x7, 1); in xnn_qs8_igemm_minmax_ukernel_3x8c8__avx2()