Home
last modified time | relevance | path

Searched refs:vf0 (Results 1 – 25 of 231) sorted by relevance

12345678910

/external/llvm-project/llvm/test/CodeGen/PowerPC/
Dfp-strict-minmax.ll12 define <4 x float> @fmaxnum_v4f32(<4 x float> %vf0, <4 x float> %vf1) #0 {
18 <4 x float> %vf0, <4 x float> %vf1,
23 define <2 x double> @fmaxnum_v2f64(<2 x double> %vf0, <2 x double> %vf1) #0 {
29 <2 x double> %vf0, <2 x double> %vf1,
35 define <4 x float> @fminnum_v4f32(<4 x float> %vf0, <4 x float> %vf1) #0 {
41 <4 x float> %vf0, <4 x float> %vf1,
46 define <2 x double> @fminnum_v2f64(<2 x double> %vf0, <2 x double> %vf1) #0 {
52 <2 x double> %vf0, <2 x double> %vf1,
/external/XNNPACK/src/f32-sigmoid/gen/
Dscalar-lut2048-p1-div-x2.c76 float vf0 = vy0 / vd0; in xnn_f32_sigmoid_ukernel__scalar_lut2048_p1_div_x2() local
80 vf0 = 0.0f; in xnn_f32_sigmoid_ukernel__scalar_lut2048_p1_div_x2()
87 vf0 = vone - vf0; in xnn_f32_sigmoid_ukernel__scalar_lut2048_p1_div_x2()
93 y[0] = vf0; in xnn_f32_sigmoid_ukernel__scalar_lut2048_p1_div_x2()
Dscalar-p5-div-x2.c83 float vf0 = ve0 / vd0; in xnn_f32_sigmoid_ukernel__scalar_p5_div_x2() local
87 vf0 = 0.0f; in xnn_f32_sigmoid_ukernel__scalar_p5_div_x2()
94 vf0 = vone - vf0; in xnn_f32_sigmoid_ukernel__scalar_p5_div_x2()
100 y[0] = vf0; in xnn_f32_sigmoid_ukernel__scalar_p5_div_x2()
Dscalar-lut64-p2-div-x2.c79 float vf0 = vy0 / vd0; in xnn_f32_sigmoid_ukernel__scalar_lut64_p2_div_x2() local
83 vf0 = 0.0f; in xnn_f32_sigmoid_ukernel__scalar_lut64_p2_div_x2()
90 vf0 = vone - vf0; in xnn_f32_sigmoid_ukernel__scalar_lut64_p2_div_x2()
96 y[0] = vf0; in xnn_f32_sigmoid_ukernel__scalar_lut64_p2_div_x2()
Davx2-rr1-p5-div-x16.c81 __m256 vf0 = _mm256_div_ps(ve0, vd0); in xnn_f32_sigmoid_ukernel__avx2_rr1_p5_div_x16() local
84 vf0 = _mm256_andnot_ps(_mm256_cmp_ps(vz0, vdenorm_cutoff, _CMP_LT_OS), vf0); in xnn_f32_sigmoid_ukernel__avx2_rr1_p5_div_x16()
87 vf0 = _mm256_blendv_ps(_mm256_sub_ps(vone, vf0), vf0, vx0); in xnn_f32_sigmoid_ukernel__avx2_rr1_p5_div_x16()
90 _mm256_storeu_ps(y, vf0); in xnn_f32_sigmoid_ukernel__avx2_rr1_p5_div_x16()
Davx2-rr1-p5-nr1fma-x16.c88 __m256 vf0 = _mm256_mul_ps(ve0, vr0); in xnn_f32_sigmoid_ukernel__avx2_rr1_p5_nr1fma_x16() local
91 vf0 = _mm256_andnot_ps(_mm256_cmp_ps(vz0, vdenorm_cutoff, _CMP_LT_OS), vf0); in xnn_f32_sigmoid_ukernel__avx2_rr1_p5_nr1fma_x16()
94 vf0 = _mm256_blendv_ps(_mm256_sub_ps(vone, vf0), vf0, vx0); in xnn_f32_sigmoid_ukernel__avx2_rr1_p5_nr1fma_x16()
97 _mm256_storeu_ps(y, vf0); in xnn_f32_sigmoid_ukernel__avx2_rr1_p5_nr1fma_x16()
Davx2-rr1-p5-div-x24.c94 __m256 vf0 = _mm256_div_ps(ve0, vd0); in xnn_f32_sigmoid_ukernel__avx2_rr1_p5_div_x24() local
98 vf0 = _mm256_andnot_ps(_mm256_cmp_ps(vz0, vdenorm_cutoff, _CMP_LT_OS), vf0); in xnn_f32_sigmoid_ukernel__avx2_rr1_p5_div_x24()
102 vf0 = _mm256_blendv_ps(_mm256_sub_ps(vone, vf0), vf0, vx0); in xnn_f32_sigmoid_ukernel__avx2_rr1_p5_div_x24()
106 _mm256_storeu_ps(y, vf0); in xnn_f32_sigmoid_ukernel__avx2_rr1_p5_div_x24()
Davx2-rr1-p5-nr2fma-x16.c90 __m256 vf0 = _mm256_mul_ps(ve0, vr0); in xnn_f32_sigmoid_ukernel__avx2_rr1_p5_nr2fma_x16() local
93 vf0 = _mm256_andnot_ps(_mm256_cmp_ps(vz0, vdenorm_cutoff, _CMP_LT_OS), vf0); in xnn_f32_sigmoid_ukernel__avx2_rr1_p5_nr2fma_x16()
96 vf0 = _mm256_blendv_ps(_mm256_sub_ps(vone, vf0), vf0, vx0); in xnn_f32_sigmoid_ukernel__avx2_rr1_p5_nr2fma_x16()
99 _mm256_storeu_ps(y, vf0); in xnn_f32_sigmoid_ukernel__avx2_rr1_p5_nr2fma_x16()
Davx2-rr1-p5-nr1fma-x24.c103 __m256 vf0 = _mm256_mul_ps(ve0, vr0); in xnn_f32_sigmoid_ukernel__avx2_rr1_p5_nr1fma_x24() local
107 vf0 = _mm256_andnot_ps(_mm256_cmp_ps(vz0, vdenorm_cutoff, _CMP_LT_OS), vf0); in xnn_f32_sigmoid_ukernel__avx2_rr1_p5_nr1fma_x24()
111 vf0 = _mm256_blendv_ps(_mm256_sub_ps(vone, vf0), vf0, vx0); in xnn_f32_sigmoid_ukernel__avx2_rr1_p5_nr1fma_x24()
115 _mm256_storeu_ps(y, vf0); in xnn_f32_sigmoid_ukernel__avx2_rr1_p5_nr1fma_x24()
Davx-rr2-p5-div-x16.c89 __m256 vf0 = _mm256_div_ps(ve0, vd0); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_div_x16() local
92 vf0 = _mm256_andnot_ps(_mm256_cmp_ps(vz0, vdenorm_cutoff, _CMP_LT_OS), vf0); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_div_x16()
95 vf0 = _mm256_blendv_ps(_mm256_sub_ps(vone, vf0), vf0, vx0); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_div_x16()
98 _mm256_storeu_ps(y, vf0); in xnn_f32_sigmoid_ukernel__avx_rr2_p5_div_x16()
Dscalar-lut2048-p1-div-x4.c100 float vf0 = vy0 / vd0; in xnn_f32_sigmoid_ukernel__scalar_lut2048_p1_div_x4() local
106 vf0 = 0.0f; in xnn_f32_sigmoid_ukernel__scalar_lut2048_p1_div_x4()
119 vf0 = vone - vf0; in xnn_f32_sigmoid_ukernel__scalar_lut2048_p1_div_x4()
131 y[0] = vf0; in xnn_f32_sigmoid_ukernel__scalar_lut2048_p1_div_x4()
Dscalar-p5-div-x4.c111 float vf0 = ve0 / vd0; in xnn_f32_sigmoid_ukernel__scalar_p5_div_x4() local
117 vf0 = 0.0f; in xnn_f32_sigmoid_ukernel__scalar_p5_div_x4()
130 vf0 = vone - vf0; in xnn_f32_sigmoid_ukernel__scalar_p5_div_x4()
142 y[0] = vf0; in xnn_f32_sigmoid_ukernel__scalar_p5_div_x4()
Dscalar-lut64-p2-div-x4.c105 float vf0 = vy0 / vd0; in xnn_f32_sigmoid_ukernel__scalar_lut64_p2_div_x4() local
111 vf0 = 0.0f; in xnn_f32_sigmoid_ukernel__scalar_lut64_p2_div_x4()
124 vf0 = vone - vf0; in xnn_f32_sigmoid_ukernel__scalar_lut64_p2_div_x4()
136 y[0] = vf0; in xnn_f32_sigmoid_ukernel__scalar_lut64_p2_div_x4()
/external/XNNPACK/src/f32-vscaleexpminusmax/gen/
Davx2-p5-x8.c84 __m256 vf0 = _mm256_fmadd_ps(vt0, vp0, vs0); in xnn_f32_vscaleexpminusmax_ukernel__avx2_p5_x8() local
88 vf0 = _mm256_andnot_ps(_mm256_cmp_ps(vx0, vdenorm_cutoff, _CMP_LT_OS), vf0); in xnn_f32_vscaleexpminusmax_ukernel__avx2_p5_x8()
91 vf0 = _mm256_mul_ps(vf0, vscale); in xnn_f32_vscaleexpminusmax_ukernel__avx2_p5_x8()
94 _mm256_storeu_ps(output, vf0); in xnn_f32_vscaleexpminusmax_ukernel__avx2_p5_x8()
Davx512f-p5-scalef-x16.c72 __m512 vf0 = _mm512_scalef_ps(vp0, vn0); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x16() local
75 vf0 = _mm512_mul_ps(vf0, vscale); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x16()
78 _mm512_storeu_ps(output, vf0); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x16()
79 _mm512_storeu_ps(output + 0, vf0); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x16()
Davx2-p5-x16.c96 __m256 vf0 = _mm256_fmadd_ps(vt0, vp0, vs0); in xnn_f32_vscaleexpminusmax_ukernel__avx2_p5_x16() local
101 vf0 = _mm256_andnot_ps(_mm256_cmp_ps(vx0, vdenorm_cutoff, _CMP_LT_OS), vf0); in xnn_f32_vscaleexpminusmax_ukernel__avx2_p5_x16()
105 vf0 = _mm256_mul_ps(vf0, vscale); in xnn_f32_vscaleexpminusmax_ukernel__avx2_p5_x16()
109 _mm256_storeu_ps(output, vf0); in xnn_f32_vscaleexpminusmax_ukernel__avx2_p5_x16()
Davx512f-p5-scalef-x32.c82 __m512 vf0 = _mm512_scalef_ps(vp0, vn0); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x32() local
86 vf0 = _mm512_mul_ps(vf0, vscale); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x32()
90 _mm512_storeu_ps(output, vf0); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x32()
91 _mm512_storeu_ps(output + 0, vf0); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x32()
/external/XNNPACK/src/f32-raddstoreexpminusmax/gen/
Dwasmsimd-p5-x4.c190 const float vf0 = wasm_f32x4_extract_lane(vf, 0); in xnn_f32_raddstoreexpminusmax_ukernel__wasmsimd_p5_x4() local
191 output[0] = vf0; in xnn_f32_raddstoreexpminusmax_ukernel__wasmsimd_p5_x4()
192 vsum += vf0; in xnn_f32_raddstoreexpminusmax_ukernel__wasmsimd_p5_x4()
203 const float vf0 = wasm_f32x4_extract_lane(vf, 0); in xnn_f32_raddstoreexpminusmax_ukernel__wasmsimd_p5_x4() local
204 *output = vf0; in xnn_f32_raddstoreexpminusmax_ukernel__wasmsimd_p5_x4()
205 vsum += vf0; in xnn_f32_raddstoreexpminusmax_ukernel__wasmsimd_p5_x4()
Dscalar-p5-x2.c97 float vf0 = vt0 * vp0 + vs0; in xnn_f32_raddstoreexpminusmax_ukernel__scalar_p5_x2() local
103 vf0 = 0.0f; in xnn_f32_raddstoreexpminusmax_ukernel__scalar_p5_x2()
110 output[0] = vf0; in xnn_f32_raddstoreexpminusmax_ukernel__scalar_p5_x2()
115 vacc0 += vf0; in xnn_f32_raddstoreexpminusmax_ukernel__scalar_p5_x2()
Dscalar-p5-x2-acc2.c98 float vf0 = vt0 * vp0 + vs0; in xnn_f32_raddstoreexpminusmax_ukernel__scalar_p5_x2_acc2() local
104 vf0 = 0.0f; in xnn_f32_raddstoreexpminusmax_ukernel__scalar_p5_x2_acc2()
111 output[0] = vf0; in xnn_f32_raddstoreexpminusmax_ukernel__scalar_p5_x2_acc2()
116 vacc0 += vf0; in xnn_f32_raddstoreexpminusmax_ukernel__scalar_p5_x2_acc2()
Dscalar-lut64-p2-x2-acc2.c108 float vf0 = vp0 * vs0 + vs0; in xnn_f32_raddstoreexpminusmax_ukernel__scalar_lut64_p2_x2_acc2() local
114 vf0 = 0.0f; in xnn_f32_raddstoreexpminusmax_ukernel__scalar_lut64_p2_x2_acc2()
121 output[0] = vf0; in xnn_f32_raddstoreexpminusmax_ukernel__scalar_lut64_p2_x2_acc2()
126 vacc0 += vf0; in xnn_f32_raddstoreexpminusmax_ukernel__scalar_lut64_p2_x2_acc2()
Dscalar-lut64-p2-x2.c107 float vf0 = vp0 * vs0 + vs0; in xnn_f32_raddstoreexpminusmax_ukernel__scalar_lut64_p2_x2() local
113 vf0 = 0.0f; in xnn_f32_raddstoreexpminusmax_ukernel__scalar_lut64_p2_x2()
120 output[0] = vf0; in xnn_f32_raddstoreexpminusmax_ukernel__scalar_lut64_p2_x2()
125 vacc0 += vf0; in xnn_f32_raddstoreexpminusmax_ukernel__scalar_lut64_p2_x2()
Dwasmsimd-p5-x8-acc2.c209 const float vf0 = wasm_f32x4_extract_lane(vf, 0); in xnn_f32_raddstoreexpminusmax_ukernel__wasmsimd_p5_x8_acc2() local
210 output[0] = vf0; in xnn_f32_raddstoreexpminusmax_ukernel__wasmsimd_p5_x8_acc2()
211 vsum += vf0; in xnn_f32_raddstoreexpminusmax_ukernel__wasmsimd_p5_x8_acc2()
222 const float vf0 = wasm_f32x4_extract_lane(vf, 0); in xnn_f32_raddstoreexpminusmax_ukernel__wasmsimd_p5_x8_acc2() local
223 *output = vf0; in xnn_f32_raddstoreexpminusmax_ukernel__wasmsimd_p5_x8_acc2()
224 vsum += vf0; in xnn_f32_raddstoreexpminusmax_ukernel__wasmsimd_p5_x8_acc2()
/external/XNNPACK/src/f32-vscaleextexp/gen/
Davx512f-p5-scalef-x16.c73 __m512 vf0 = _mm512_mul_ps(vp0, vscalev); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x16() local
78 vf0 = _mm512_scalef_ps(vf0, ve0); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x16()
81 _mm512_storeu_ps(y, vf0); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x16()
82 _mm512_storeu_ps(y + 0, vf0); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x16()
Davx512f-p5-scalef-x32.c82 __m512 vf0 = _mm512_mul_ps(vp0, vscalev); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x32() local
89 vf0 = _mm512_scalef_ps(vf0, ve0); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x32()
93 _mm512_storeu_ps(y, vf0); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x32()
94 _mm512_storeu_ps(y + 0, vf0); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x32()

12345678910