Home
last modified time | relevance | path

Searched refs:vminus_inf (Results 1 – 25 of 26) sorted by relevance

12

/external/XNNPACK/src/f32-raddextexp/gen/
Davx512f-p5-scalef-x192-acc6.c38 const __m512 vminus_inf = _mm512_set1_ps(-INFINITY); in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x192_acc6() local
46 __m512 vacce0 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x192_acc6()
47 __m512 vacce1 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x192_acc6()
48 __m512 vacce2 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x192_acc6()
49 __m512 vacce3 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x192_acc6()
50 __m512 vacce4 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x192_acc6()
51 __m512 vacce5 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x192_acc6()
Davx512f-p5-scalef-x160-acc5.c38 const __m512 vminus_inf = _mm512_set1_ps(-INFINITY); in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x160_acc5() local
45 __m512 vacce0 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x160_acc5()
46 __m512 vacce1 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x160_acc5()
47 __m512 vacce2 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x160_acc5()
48 __m512 vacce3 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x160_acc5()
49 __m512 vacce4 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x160_acc5()
Davx512f-p5-scalef-x128-acc4.c38 const __m512 vminus_inf = _mm512_set1_ps(-INFINITY); in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x128_acc4() local
44 __m512 vacce0 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x128_acc4()
45 __m512 vacce1 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x128_acc4()
46 __m512 vacce2 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x128_acc4()
47 __m512 vacce3 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x128_acc4()
Davx2-p5-x96-acc6.c35 const __m256 vminus_inf = _mm256_set1_ps(-INFINITY); in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc6() local
50 __m256 vacce0 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc6()
51 __m256 vacce1 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc6()
52 __m256 vacce2 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc6()
53 __m256 vacce3 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc6()
54 __m256 vacce4 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc6()
55 __m256 vacce5 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc6()
Davx2-p5-x80-acc5.c35 const __m256 vminus_inf = _mm256_set1_ps(-INFINITY); in xnn_f32_raddextexp_ukernel__avx2_p5_x80_acc5() local
49 __m256 vacce0 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx2_p5_x80_acc5()
50 __m256 vacce1 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx2_p5_x80_acc5()
51 __m256 vacce2 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx2_p5_x80_acc5()
52 __m256 vacce3 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx2_p5_x80_acc5()
53 __m256 vacce4 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx2_p5_x80_acc5()
Davx2-p5-x64-acc4.c35 const __m256 vminus_inf = _mm256_set1_ps(-INFINITY); in xnn_f32_raddextexp_ukernel__avx2_p5_x64_acc4() local
48 __m256 vacce0 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx2_p5_x64_acc4()
49 __m256 vacce1 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx2_p5_x64_acc4()
50 __m256 vacce2 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx2_p5_x64_acc4()
51 __m256 vacce3 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx2_p5_x64_acc4()
Davx512f-p5-scalef-x144-acc3.c38 const __m512 vminus_inf = _mm512_set1_ps(-INFINITY); in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x144_acc3() local
43 __m512 vacce0 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x144_acc3()
44 __m512 vacce1 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x144_acc3()
45 __m512 vacce2 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x144_acc3()
Davx512f-p5-scalef-x192-acc3.c38 const __m512 vminus_inf = _mm512_set1_ps(-INFINITY); in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x192_acc3() local
43 __m512 vacce0 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x192_acc3()
44 __m512 vacce1 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x192_acc3()
45 __m512 vacce2 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x192_acc3()
Davx2-p5-x72-acc3.c35 const __m256 vminus_inf = _mm256_set1_ps(-INFINITY); in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3() local
47 __m256 vacce0 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
48 __m256 vacce1 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
49 __m256 vacce2 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx2_p5_x72_acc3()
Davx512f-p5-scalef-x128-acc2.c38 const __m512 vminus_inf = _mm512_set1_ps(-INFINITY); in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x128_acc2() local
42 __m512 vacce0 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x128_acc2()
43 __m512 vacce1 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x128_acc2()
Davx512f-p5-scalef-x160-acc2.c38 const __m512 vminus_inf = _mm512_set1_ps(-INFINITY); in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x160_acc2() local
42 __m512 vacce0 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x160_acc2()
43 __m512 vacce1 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x160_acc2()
Davx2-p5-x96-acc3.c35 const __m256 vminus_inf = _mm256_set1_ps(-INFINITY); in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc3() local
47 __m256 vacce0 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc3()
48 __m256 vacce1 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc3()
49 __m256 vacce2 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc3()
Davx512f-p5-scalef-x192-acc2.c38 const __m512 vminus_inf = _mm512_set1_ps(-INFINITY); in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x192_acc2() local
42 __m512 vacce0 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x192_acc2()
43 __m512 vacce1 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x192_acc2()
Davx2-p5-x64-acc2.c35 const __m256 vminus_inf = _mm256_set1_ps(-INFINITY); in xnn_f32_raddextexp_ukernel__avx2_p5_x64_acc2() local
46 __m256 vacce0 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx2_p5_x64_acc2()
47 __m256 vacce1 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx2_p5_x64_acc2()
Davx2-p5-x80-acc2.c35 const __m256 vminus_inf = _mm256_set1_ps(-INFINITY); in xnn_f32_raddextexp_ukernel__avx2_p5_x80_acc2() local
46 __m256 vacce0 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx2_p5_x80_acc2()
47 __m256 vacce1 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx2_p5_x80_acc2()
Davx512f-p5-scalef-x128.c38 const __m512 vminus_inf = _mm512_set1_ps(-INFINITY); in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x128() local
41 __m512 vacce0 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x128()
Davx2-p5-x96-acc2.c35 const __m256 vminus_inf = _mm256_set1_ps(-INFINITY); in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc2() local
46 __m256 vacce0 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc2()
47 __m256 vacce1 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx2_p5_x96_acc2()
Davx512f-p5-scalef-x144.c38 const __m512 vminus_inf = _mm512_set1_ps(-INFINITY); in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x144() local
41 __m512 vacce0 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x144()
Davx512f-p5-scalef-x160.c38 const __m512 vminus_inf = _mm512_set1_ps(-INFINITY); in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x160() local
41 __m512 vacce0 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x160()
Davx2-p5-x64.c35 const __m256 vminus_inf = _mm256_set1_ps(-INFINITY); in xnn_f32_raddextexp_ukernel__avx2_p5_x64() local
45 __m256 vacce0 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx2_p5_x64()
Davx2-p5-x72.c35 const __m256 vminus_inf = _mm256_set1_ps(-INFINITY); in xnn_f32_raddextexp_ukernel__avx2_p5_x72() local
45 __m256 vacce0 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx2_p5_x72()
Davx512f-p5-scalef-x192.c38 const __m512 vminus_inf = _mm512_set1_ps(-INFINITY); in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x192() local
41 __m512 vacce0 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx512f_p5_scalef_x192()
Davx2-p5-x80.c35 const __m256 vminus_inf = _mm256_set1_ps(-INFINITY); in xnn_f32_raddextexp_ukernel__avx2_p5_x80() local
45 __m256 vacce0 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx2_p5_x80()
Davx2-p5-x96.c35 const __m256 vminus_inf = _mm256_set1_ps(-INFINITY); in xnn_f32_raddextexp_ukernel__avx2_p5_x96() local
45 __m256 vacce0 = vminus_inf; in xnn_f32_raddextexp_ukernel__avx2_p5_x96()
/external/XNNPACK/src/f32-raddextexp/
Davx512f-p5-scalef.c.in38 const __m512 vminus_inf = _mm512_set1_ps(-INFINITY);
43 __m512 vacce${K} = vminus_inf;

12