/external/XNNPACK/src/qs8-dwconv/gen/ |
D | up8x9-minmax-sse41-mul16.c | 216 const __m128i vq31prod02 = _mm_srli_epi64(vprod02, 31); in xnn_qs8_dwconv_minmax_ukernel_up8x9__sse41_mul16() local 221 const __m128i vq31prod0123 = _mm_blend_epi16(vq31prod02, vq31prod13, 0xCC); in xnn_qs8_dwconv_minmax_ukernel_up8x9__sse41_mul16() 376 const __m128i vq31prod02 = _mm_srli_epi64(vprod02, 31); in xnn_qs8_dwconv_minmax_ukernel_up8x9__sse41_mul16() local 381 const __m128i vq31prod0123 = _mm_blend_epi16(vq31prod02, vq31prod13, 0xCC); in xnn_qs8_dwconv_minmax_ukernel_up8x9__sse41_mul16()
|
D | up8x9-minmax-sse2-mul16.c | 232 const __m128i vq31prod02 = _mm_srli_epi64(_mm_add_epi64(vprod02, vrounding), 31); in xnn_qs8_dwconv_minmax_ukernel_up8x9__sse2_mul16() local 238 _mm_castsi128_ps(vq31prod02), _mm_castsi128_ps(vq31prod13), _MM_SHUFFLE(2, 0, 2, 0))); in xnn_qs8_dwconv_minmax_ukernel_up8x9__sse2_mul16() 412 const __m128i vq31prod02 = _mm_srli_epi64(_mm_add_epi64(vprod02, vrounding), 31); in xnn_qs8_dwconv_minmax_ukernel_up8x9__sse2_mul16() local 418 _mm_castsi128_ps(vq31prod02), _mm_castsi128_ps(vq31prod13), _MM_SHUFFLE(2, 0, 2, 0))); in xnn_qs8_dwconv_minmax_ukernel_up8x9__sse2_mul16()
|
D | up8x9-minmax-ssse3-mul16.c | 232 const __m128i vq31prod02 = _mm_srli_epi64(_mm_add_epi64(vprod02, vrounding), 31); in xnn_qs8_dwconv_minmax_ukernel_up8x9__ssse3_mul16() local 238 _mm_castsi128_ps(vq31prod02), _mm_castsi128_ps(vq31prod13), _MM_SHUFFLE(2, 0, 2, 0))); in xnn_qs8_dwconv_minmax_ukernel_up8x9__ssse3_mul16() 412 const __m128i vq31prod02 = _mm_srli_epi64(_mm_add_epi64(vprod02, vrounding), 31); in xnn_qs8_dwconv_minmax_ukernel_up8x9__ssse3_mul16() local 418 _mm_castsi128_ps(vq31prod02), _mm_castsi128_ps(vq31prod13), _MM_SHUFFLE(2, 0, 2, 0))); in xnn_qs8_dwconv_minmax_ukernel_up8x9__ssse3_mul16()
|
D | up16x9-minmax-sse41-mul16.c | 296 const __m128i vq31prod02 = _mm_srli_epi64(vprod02, 31); in xnn_qs8_dwconv_minmax_ukernel_up16x9__sse41_mul16() local 305 const __m128i vq31prod0123 = _mm_blend_epi16(vq31prod02, vq31prod13, 0xCC); in xnn_qs8_dwconv_minmax_ukernel_up16x9__sse41_mul16() 484 const __m128i vq31prod02 = _mm_srli_epi64(vprod02, 31); in xnn_qs8_dwconv_minmax_ukernel_up16x9__sse41_mul16() local 489 const __m128i vq31prod0123 = _mm_blend_epi16(vq31prod02, vq31prod13, 0xCC); in xnn_qs8_dwconv_minmax_ukernel_up16x9__sse41_mul16()
|
D | up16x9-minmax-ssse3-mul16.c | 324 const __m128i vq31prod02 = _mm_srli_epi64(_mm_add_epi64(vprod02, vrounding), 31); in xnn_qs8_dwconv_minmax_ukernel_up16x9__ssse3_mul16() local 334 _mm_castsi128_ps(vq31prod02), _mm_castsi128_ps(vq31prod13), _MM_SHUFFLE(2, 0, 2, 0))); in xnn_qs8_dwconv_minmax_ukernel_up16x9__ssse3_mul16() 536 const __m128i vq31prod02 = _mm_srli_epi64(_mm_add_epi64(vprod02, vrounding), 31); in xnn_qs8_dwconv_minmax_ukernel_up16x9__ssse3_mul16() local 542 _mm_castsi128_ps(vq31prod02), _mm_castsi128_ps(vq31prod13), _MM_SHUFFLE(2, 0, 2, 0))); in xnn_qs8_dwconv_minmax_ukernel_up16x9__ssse3_mul16()
|
D | up16x9-minmax-sse2-mul16.c | 324 const __m128i vq31prod02 = _mm_srli_epi64(_mm_add_epi64(vprod02, vrounding), 31); in xnn_qs8_dwconv_minmax_ukernel_up16x9__sse2_mul16() local 334 _mm_castsi128_ps(vq31prod02), _mm_castsi128_ps(vq31prod13), _MM_SHUFFLE(2, 0, 2, 0))); in xnn_qs8_dwconv_minmax_ukernel_up16x9__sse2_mul16() 536 const __m128i vq31prod02 = _mm_srli_epi64(_mm_add_epi64(vprod02, vrounding), 31); in xnn_qs8_dwconv_minmax_ukernel_up16x9__sse2_mul16() local 542 _mm_castsi128_ps(vq31prod02), _mm_castsi128_ps(vq31prod13), _MM_SHUFFLE(2, 0, 2, 0))); in xnn_qs8_dwconv_minmax_ukernel_up16x9__sse2_mul16()
|
D | up24x9-minmax-sse41-mul16.c | 376 const __m128i vq31prod02 = _mm_srli_epi64(vprod02, 31); in xnn_qs8_dwconv_minmax_ukernel_up24x9__sse41_mul16() local 389 const __m128i vq31prod0123 = _mm_blend_epi16(vq31prod02, vq31prod13, 0xCC); in xnn_qs8_dwconv_minmax_ukernel_up24x9__sse41_mul16() 582 const __m128i vq31prod02 = _mm_srli_epi64(vprod02, 31); in xnn_qs8_dwconv_minmax_ukernel_up24x9__sse41_mul16() local 587 const __m128i vq31prod0123 = _mm_blend_epi16(vq31prod02, vq31prod13, 0xCC); in xnn_qs8_dwconv_minmax_ukernel_up24x9__sse41_mul16()
|
D | up24x9-minmax-ssse3-mul16.c | 416 const __m128i vq31prod02 = _mm_srli_epi64(_mm_add_epi64(vprod02, vrounding), 31); in xnn_qs8_dwconv_minmax_ukernel_up24x9__ssse3_mul16() local 430 _mm_castsi128_ps(vq31prod02), _mm_castsi128_ps(vq31prod13), _MM_SHUFFLE(2, 0, 2, 0))); in xnn_qs8_dwconv_minmax_ukernel_up24x9__ssse3_mul16() 650 const __m128i vq31prod02 = _mm_srli_epi64(_mm_add_epi64(vprod02, vrounding), 31); in xnn_qs8_dwconv_minmax_ukernel_up24x9__ssse3_mul16() local 656 _mm_castsi128_ps(vq31prod02), _mm_castsi128_ps(vq31prod13), _MM_SHUFFLE(2, 0, 2, 0))); in xnn_qs8_dwconv_minmax_ukernel_up24x9__ssse3_mul16()
|
D | up24x9-minmax-sse2-mul16.c | 416 const __m128i vq31prod02 = _mm_srli_epi64(_mm_add_epi64(vprod02, vrounding), 31); in xnn_qs8_dwconv_minmax_ukernel_up24x9__sse2_mul16() local 430 _mm_castsi128_ps(vq31prod02), _mm_castsi128_ps(vq31prod13), _MM_SHUFFLE(2, 0, 2, 0))); in xnn_qs8_dwconv_minmax_ukernel_up24x9__sse2_mul16() 650 const __m128i vq31prod02 = _mm_srli_epi64(_mm_add_epi64(vprod02, vrounding), 31); in xnn_qs8_dwconv_minmax_ukernel_up24x9__sse2_mul16() local 656 _mm_castsi128_ps(vq31prod02), _mm_castsi128_ps(vq31prod13), _MM_SHUFFLE(2, 0, 2, 0))); in xnn_qs8_dwconv_minmax_ukernel_up24x9__sse2_mul16()
|