Home
last modified time | relevance | path

Searched refs:vc1 (Results 1 – 25 of 455) sorted by relevance

12345678910>>...19

/external/mesa3d/src/gallium/frontends/va/
Dpicture_vc1.c32 VAPictureParameterBufferVC1 * vc1 = buf->data; in vlVaHandlePictureParameterBufferVC1() local
35 context->desc.vc1.slice_count = 0; in vlVaHandlePictureParameterBufferVC1()
36 vlVaGetReferenceFrame(drv, vc1->forward_reference_picture, &context->desc.vc1.ref[0]); in vlVaHandlePictureParameterBufferVC1()
37 vlVaGetReferenceFrame(drv, vc1->backward_reference_picture, &context->desc.vc1.ref[1]); in vlVaHandlePictureParameterBufferVC1()
38 context->desc.vc1.picture_type = vc1->picture_fields.bits.picture_type; in vlVaHandlePictureParameterBufferVC1()
39 context->desc.vc1.frame_coding_mode = vc1->picture_fields.bits.frame_coding_mode; in vlVaHandlePictureParameterBufferVC1()
40 context->desc.vc1.postprocflag = vc1->post_processing != 0; in vlVaHandlePictureParameterBufferVC1()
41 context->desc.vc1.pulldown = vc1->sequence_fields.bits.pulldown; in vlVaHandlePictureParameterBufferVC1()
42 context->desc.vc1.interlace = vc1->sequence_fields.bits.interlace; in vlVaHandlePictureParameterBufferVC1()
43 context->desc.vc1.tfcntrflag = vc1->sequence_fields.bits.tfcntrflag; in vlVaHandlePictureParameterBufferVC1()
[all …]
/external/XNNPACK/src/f32-raddexpminusmax/gen/
Davx512f-p5-scalef-x192-acc6.c31 const __m512 vc1 = _mm512_set1_ps(0x1.FFFFF6p-1f); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x192_acc6() local
157 vp0 = _mm512_fmadd_ps(vp0, vt0, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x192_acc6()
158 vp1 = _mm512_fmadd_ps(vp1, vt1, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x192_acc6()
159 vp2 = _mm512_fmadd_ps(vp2, vt2, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x192_acc6()
160 vp3 = _mm512_fmadd_ps(vp3, vt3, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x192_acc6()
161 vp4 = _mm512_fmadd_ps(vp4, vt4, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x192_acc6()
162 vp5 = _mm512_fmadd_ps(vp5, vt5, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x192_acc6()
163 vp6 = _mm512_fmadd_ps(vp6, vt6, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x192_acc6()
164 vp7 = _mm512_fmadd_ps(vp7, vt7, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x192_acc6()
165 vp8 = _mm512_fmadd_ps(vp8, vt8, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x192_acc6()
[all …]
Davx512f-p5-scalef-x192-acc2.c31 const __m512 vc1 = _mm512_set1_ps(0x1.FFFFF6p-1f); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x192_acc2() local
153 vp0 = _mm512_fmadd_ps(vp0, vt0, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x192_acc2()
154 vp1 = _mm512_fmadd_ps(vp1, vt1, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x192_acc2()
155 vp2 = _mm512_fmadd_ps(vp2, vt2, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x192_acc2()
156 vp3 = _mm512_fmadd_ps(vp3, vt3, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x192_acc2()
157 vp4 = _mm512_fmadd_ps(vp4, vt4, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x192_acc2()
158 vp5 = _mm512_fmadd_ps(vp5, vt5, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x192_acc2()
159 vp6 = _mm512_fmadd_ps(vp6, vt6, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x192_acc2()
160 vp7 = _mm512_fmadd_ps(vp7, vt7, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x192_acc2()
161 vp8 = _mm512_fmadd_ps(vp8, vt8, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x192_acc2()
[all …]
Davx512f-p5-scalef-x192-acc3.c31 const __m512 vc1 = _mm512_set1_ps(0x1.FFFFF6p-1f); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x192_acc3() local
154 vp0 = _mm512_fmadd_ps(vp0, vt0, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x192_acc3()
155 vp1 = _mm512_fmadd_ps(vp1, vt1, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x192_acc3()
156 vp2 = _mm512_fmadd_ps(vp2, vt2, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x192_acc3()
157 vp3 = _mm512_fmadd_ps(vp3, vt3, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x192_acc3()
158 vp4 = _mm512_fmadd_ps(vp4, vt4, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x192_acc3()
159 vp5 = _mm512_fmadd_ps(vp5, vt5, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x192_acc3()
160 vp6 = _mm512_fmadd_ps(vp6, vt6, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x192_acc3()
161 vp7 = _mm512_fmadd_ps(vp7, vt7, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x192_acc3()
162 vp8 = _mm512_fmadd_ps(vp8, vt8, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x192_acc3()
[all …]
Davx512f-p5-scalef-x192.c31 const __m512 vc1 = _mm512_set1_ps(0x1.FFFFF6p-1f); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x192() local
152 vp0 = _mm512_fmadd_ps(vp0, vt0, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x192()
153 vp1 = _mm512_fmadd_ps(vp1, vt1, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x192()
154 vp2 = _mm512_fmadd_ps(vp2, vt2, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x192()
155 vp3 = _mm512_fmadd_ps(vp3, vt3, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x192()
156 vp4 = _mm512_fmadd_ps(vp4, vt4, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x192()
157 vp5 = _mm512_fmadd_ps(vp5, vt5, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x192()
158 vp6 = _mm512_fmadd_ps(vp6, vt6, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x192()
159 vp7 = _mm512_fmadd_ps(vp7, vt7, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x192()
160 vp8 = _mm512_fmadd_ps(vp8, vt8, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x192()
[all …]
Davx512f-p5-scalef-x160-acc2.c31 const __m512 vc1 = _mm512_set1_ps(0x1.FFFFF6p-1f); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x160_acc2() local
137 vp0 = _mm512_fmadd_ps(vp0, vt0, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x160_acc2()
138 vp1 = _mm512_fmadd_ps(vp1, vt1, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x160_acc2()
139 vp2 = _mm512_fmadd_ps(vp2, vt2, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x160_acc2()
140 vp3 = _mm512_fmadd_ps(vp3, vt3, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x160_acc2()
141 vp4 = _mm512_fmadd_ps(vp4, vt4, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x160_acc2()
142 vp5 = _mm512_fmadd_ps(vp5, vt5, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x160_acc2()
143 vp6 = _mm512_fmadd_ps(vp6, vt6, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x160_acc2()
144 vp7 = _mm512_fmadd_ps(vp7, vt7, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x160_acc2()
145 vp8 = _mm512_fmadd_ps(vp8, vt8, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x160_acc2()
[all …]
Davx512f-p5-scalef-x160-acc5.c31 const __m512 vc1 = _mm512_set1_ps(0x1.FFFFF6p-1f); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x160_acc5() local
140 vp0 = _mm512_fmadd_ps(vp0, vt0, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x160_acc5()
141 vp1 = _mm512_fmadd_ps(vp1, vt1, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x160_acc5()
142 vp2 = _mm512_fmadd_ps(vp2, vt2, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x160_acc5()
143 vp3 = _mm512_fmadd_ps(vp3, vt3, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x160_acc5()
144 vp4 = _mm512_fmadd_ps(vp4, vt4, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x160_acc5()
145 vp5 = _mm512_fmadd_ps(vp5, vt5, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x160_acc5()
146 vp6 = _mm512_fmadd_ps(vp6, vt6, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x160_acc5()
147 vp7 = _mm512_fmadd_ps(vp7, vt7, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x160_acc5()
148 vp8 = _mm512_fmadd_ps(vp8, vt8, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x160_acc5()
[all …]
Davx512f-p5-scalef-x160.c31 const __m512 vc1 = _mm512_set1_ps(0x1.FFFFF6p-1f); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x160() local
136 vp0 = _mm512_fmadd_ps(vp0, vt0, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x160()
137 vp1 = _mm512_fmadd_ps(vp1, vt1, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x160()
138 vp2 = _mm512_fmadd_ps(vp2, vt2, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x160()
139 vp3 = _mm512_fmadd_ps(vp3, vt3, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x160()
140 vp4 = _mm512_fmadd_ps(vp4, vt4, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x160()
141 vp5 = _mm512_fmadd_ps(vp5, vt5, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x160()
142 vp6 = _mm512_fmadd_ps(vp6, vt6, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x160()
143 vp7 = _mm512_fmadd_ps(vp7, vt7, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x160()
144 vp8 = _mm512_fmadd_ps(vp8, vt8, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x160()
[all …]
Davx512f-p5-scalef-x144-acc3.c31 const __m512 vc1 = _mm512_set1_ps(0x1.FFFFF6p-1f); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x144_acc3() local
130 vp0 = _mm512_fmadd_ps(vp0, vt0, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x144_acc3()
131 vp1 = _mm512_fmadd_ps(vp1, vt1, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x144_acc3()
132 vp2 = _mm512_fmadd_ps(vp2, vt2, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x144_acc3()
133 vp3 = _mm512_fmadd_ps(vp3, vt3, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x144_acc3()
134 vp4 = _mm512_fmadd_ps(vp4, vt4, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x144_acc3()
135 vp5 = _mm512_fmadd_ps(vp5, vt5, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x144_acc3()
136 vp6 = _mm512_fmadd_ps(vp6, vt6, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x144_acc3()
137 vp7 = _mm512_fmadd_ps(vp7, vt7, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x144_acc3()
138 vp8 = _mm512_fmadd_ps(vp8, vt8, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x144_acc3()
[all …]
Davx512f-p5-scalef-x144.c31 const __m512 vc1 = _mm512_set1_ps(0x1.FFFFF6p-1f); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x144() local
128 vp0 = _mm512_fmadd_ps(vp0, vt0, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x144()
129 vp1 = _mm512_fmadd_ps(vp1, vt1, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x144()
130 vp2 = _mm512_fmadd_ps(vp2, vt2, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x144()
131 vp3 = _mm512_fmadd_ps(vp3, vt3, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x144()
132 vp4 = _mm512_fmadd_ps(vp4, vt4, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x144()
133 vp5 = _mm512_fmadd_ps(vp5, vt5, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x144()
134 vp6 = _mm512_fmadd_ps(vp6, vt6, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x144()
135 vp7 = _mm512_fmadd_ps(vp7, vt7, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x144()
136 vp8 = _mm512_fmadd_ps(vp8, vt8, vc1); in xnn_f32_raddexpminusmax_ukernel__avx512f_p5_scalef_x144()
[all …]
/external/XNNPACK/src/f32-raddstoreexpminusmax/gen/
Davx512f-p5-scalef-x192-acc3.c32 const __m512 vc1 = _mm512_set1_ps(0x1.FFFFF6p-1f); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x192_acc3() local
155 vp0 = _mm512_fmadd_ps(vp0, vt0, vc1); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x192_acc3()
156 vp1 = _mm512_fmadd_ps(vp1, vt1, vc1); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x192_acc3()
157 vp2 = _mm512_fmadd_ps(vp2, vt2, vc1); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x192_acc3()
158 vp3 = _mm512_fmadd_ps(vp3, vt3, vc1); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x192_acc3()
159 vp4 = _mm512_fmadd_ps(vp4, vt4, vc1); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x192_acc3()
160 vp5 = _mm512_fmadd_ps(vp5, vt5, vc1); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x192_acc3()
161 vp6 = _mm512_fmadd_ps(vp6, vt6, vc1); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x192_acc3()
162 vp7 = _mm512_fmadd_ps(vp7, vt7, vc1); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x192_acc3()
163 vp8 = _mm512_fmadd_ps(vp8, vt8, vc1); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x192_acc3()
[all …]
Davx512f-p5-scalef-x192.c32 const __m512 vc1 = _mm512_set1_ps(0x1.FFFFF6p-1f); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x192() local
153 vp0 = _mm512_fmadd_ps(vp0, vt0, vc1); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x192()
154 vp1 = _mm512_fmadd_ps(vp1, vt1, vc1); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x192()
155 vp2 = _mm512_fmadd_ps(vp2, vt2, vc1); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x192()
156 vp3 = _mm512_fmadd_ps(vp3, vt3, vc1); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x192()
157 vp4 = _mm512_fmadd_ps(vp4, vt4, vc1); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x192()
158 vp5 = _mm512_fmadd_ps(vp5, vt5, vc1); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x192()
159 vp6 = _mm512_fmadd_ps(vp6, vt6, vc1); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x192()
160 vp7 = _mm512_fmadd_ps(vp7, vt7, vc1); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x192()
161 vp8 = _mm512_fmadd_ps(vp8, vt8, vc1); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x192()
[all …]
Davx512f-p5-scalef-x192-acc2.c32 const __m512 vc1 = _mm512_set1_ps(0x1.FFFFF6p-1f); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x192_acc2() local
154 vp0 = _mm512_fmadd_ps(vp0, vt0, vc1); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x192_acc2()
155 vp1 = _mm512_fmadd_ps(vp1, vt1, vc1); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x192_acc2()
156 vp2 = _mm512_fmadd_ps(vp2, vt2, vc1); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x192_acc2()
157 vp3 = _mm512_fmadd_ps(vp3, vt3, vc1); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x192_acc2()
158 vp4 = _mm512_fmadd_ps(vp4, vt4, vc1); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x192_acc2()
159 vp5 = _mm512_fmadd_ps(vp5, vt5, vc1); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x192_acc2()
160 vp6 = _mm512_fmadd_ps(vp6, vt6, vc1); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x192_acc2()
161 vp7 = _mm512_fmadd_ps(vp7, vt7, vc1); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x192_acc2()
162 vp8 = _mm512_fmadd_ps(vp8, vt8, vc1); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x192_acc2()
[all …]
Davx512f-p5-scalef-x160-acc2.c32 const __m512 vc1 = _mm512_set1_ps(0x1.FFFFF6p-1f); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x160_acc2() local
138 vp0 = _mm512_fmadd_ps(vp0, vt0, vc1); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x160_acc2()
139 vp1 = _mm512_fmadd_ps(vp1, vt1, vc1); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x160_acc2()
140 vp2 = _mm512_fmadd_ps(vp2, vt2, vc1); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x160_acc2()
141 vp3 = _mm512_fmadd_ps(vp3, vt3, vc1); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x160_acc2()
142 vp4 = _mm512_fmadd_ps(vp4, vt4, vc1); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x160_acc2()
143 vp5 = _mm512_fmadd_ps(vp5, vt5, vc1); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x160_acc2()
144 vp6 = _mm512_fmadd_ps(vp6, vt6, vc1); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x160_acc2()
145 vp7 = _mm512_fmadd_ps(vp7, vt7, vc1); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x160_acc2()
146 vp8 = _mm512_fmadd_ps(vp8, vt8, vc1); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x160_acc2()
[all …]
Davx512f-p5-scalef-x160.c32 const __m512 vc1 = _mm512_set1_ps(0x1.FFFFF6p-1f); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x160() local
137 vp0 = _mm512_fmadd_ps(vp0, vt0, vc1); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x160()
138 vp1 = _mm512_fmadd_ps(vp1, vt1, vc1); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x160()
139 vp2 = _mm512_fmadd_ps(vp2, vt2, vc1); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x160()
140 vp3 = _mm512_fmadd_ps(vp3, vt3, vc1); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x160()
141 vp4 = _mm512_fmadd_ps(vp4, vt4, vc1); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x160()
142 vp5 = _mm512_fmadd_ps(vp5, vt5, vc1); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x160()
143 vp6 = _mm512_fmadd_ps(vp6, vt6, vc1); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x160()
144 vp7 = _mm512_fmadd_ps(vp7, vt7, vc1); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x160()
145 vp8 = _mm512_fmadd_ps(vp8, vt8, vc1); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x160()
[all …]
Davx512f-p5-scalef-x144.c32 const __m512 vc1 = _mm512_set1_ps(0x1.FFFFF6p-1f); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x144() local
129 vp0 = _mm512_fmadd_ps(vp0, vt0, vc1); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x144()
130 vp1 = _mm512_fmadd_ps(vp1, vt1, vc1); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x144()
131 vp2 = _mm512_fmadd_ps(vp2, vt2, vc1); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x144()
132 vp3 = _mm512_fmadd_ps(vp3, vt3, vc1); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x144()
133 vp4 = _mm512_fmadd_ps(vp4, vt4, vc1); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x144()
134 vp5 = _mm512_fmadd_ps(vp5, vt5, vc1); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x144()
135 vp6 = _mm512_fmadd_ps(vp6, vt6, vc1); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x144()
136 vp7 = _mm512_fmadd_ps(vp7, vt7, vc1); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x144()
137 vp8 = _mm512_fmadd_ps(vp8, vt8, vc1); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x144()
[all …]
Davx512f-p5-scalef-x144-acc3.c32 const __m512 vc1 = _mm512_set1_ps(0x1.FFFFF6p-1f); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x144_acc3() local
131 vp0 = _mm512_fmadd_ps(vp0, vt0, vc1); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x144_acc3()
132 vp1 = _mm512_fmadd_ps(vp1, vt1, vc1); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x144_acc3()
133 vp2 = _mm512_fmadd_ps(vp2, vt2, vc1); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x144_acc3()
134 vp3 = _mm512_fmadd_ps(vp3, vt3, vc1); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x144_acc3()
135 vp4 = _mm512_fmadd_ps(vp4, vt4, vc1); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x144_acc3()
136 vp5 = _mm512_fmadd_ps(vp5, vt5, vc1); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x144_acc3()
137 vp6 = _mm512_fmadd_ps(vp6, vt6, vc1); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x144_acc3()
138 vp7 = _mm512_fmadd_ps(vp7, vt7, vc1); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x144_acc3()
139 vp8 = _mm512_fmadd_ps(vp8, vt8, vc1); in xnn_f32_raddstoreexpminusmax_ukernel__avx512f_p5_scalef_x144_acc3()
[all …]
/external/XNNPACK/src/f32-vscaleextexp/gen/
Davx512f-p5-scalef-x192.c33 const __m512 vc1 = _mm512_set1_ps(0x1.FFFFF6p-1f); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x192() local
140 vp0 = _mm512_fmadd_ps(vp0, vt0, vc1); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x192()
141 vp1 = _mm512_fmadd_ps(vp1, vt1, vc1); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x192()
142 vp2 = _mm512_fmadd_ps(vp2, vt2, vc1); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x192()
143 vp3 = _mm512_fmadd_ps(vp3, vt3, vc1); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x192()
144 vp4 = _mm512_fmadd_ps(vp4, vt4, vc1); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x192()
145 vp5 = _mm512_fmadd_ps(vp5, vt5, vc1); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x192()
146 vp6 = _mm512_fmadd_ps(vp6, vt6, vc1); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x192()
147 vp7 = _mm512_fmadd_ps(vp7, vt7, vc1); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x192()
148 vp8 = _mm512_fmadd_ps(vp8, vt8, vc1); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x192()
[all …]
Davx512f-p5-scalef-x176.c33 const __m512 vc1 = _mm512_set1_ps(0x1.FFFFF6p-1f); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x176() local
133 vp0 = _mm512_fmadd_ps(vp0, vt0, vc1); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x176()
134 vp1 = _mm512_fmadd_ps(vp1, vt1, vc1); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x176()
135 vp2 = _mm512_fmadd_ps(vp2, vt2, vc1); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x176()
136 vp3 = _mm512_fmadd_ps(vp3, vt3, vc1); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x176()
137 vp4 = _mm512_fmadd_ps(vp4, vt4, vc1); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x176()
138 vp5 = _mm512_fmadd_ps(vp5, vt5, vc1); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x176()
139 vp6 = _mm512_fmadd_ps(vp6, vt6, vc1); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x176()
140 vp7 = _mm512_fmadd_ps(vp7, vt7, vc1); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x176()
141 vp8 = _mm512_fmadd_ps(vp8, vt8, vc1); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x176()
[all …]
Davx512f-p5-scalef-x160.c33 const __m512 vc1 = _mm512_set1_ps(0x1.FFFFF6p-1f); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x160() local
126 vp0 = _mm512_fmadd_ps(vp0, vt0, vc1); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x160()
127 vp1 = _mm512_fmadd_ps(vp1, vt1, vc1); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x160()
128 vp2 = _mm512_fmadd_ps(vp2, vt2, vc1); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x160()
129 vp3 = _mm512_fmadd_ps(vp3, vt3, vc1); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x160()
130 vp4 = _mm512_fmadd_ps(vp4, vt4, vc1); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x160()
131 vp5 = _mm512_fmadd_ps(vp5, vt5, vc1); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x160()
132 vp6 = _mm512_fmadd_ps(vp6, vt6, vc1); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x160()
133 vp7 = _mm512_fmadd_ps(vp7, vt7, vc1); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x160()
134 vp8 = _mm512_fmadd_ps(vp8, vt8, vc1); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x160()
[all …]
Davx512f-p5-scalef-x144.c33 const __m512 vc1 = _mm512_set1_ps(0x1.FFFFF6p-1f); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x144() local
119 vp0 = _mm512_fmadd_ps(vp0, vt0, vc1); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x144()
120 vp1 = _mm512_fmadd_ps(vp1, vt1, vc1); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x144()
121 vp2 = _mm512_fmadd_ps(vp2, vt2, vc1); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x144()
122 vp3 = _mm512_fmadd_ps(vp3, vt3, vc1); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x144()
123 vp4 = _mm512_fmadd_ps(vp4, vt4, vc1); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x144()
124 vp5 = _mm512_fmadd_ps(vp5, vt5, vc1); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x144()
125 vp6 = _mm512_fmadd_ps(vp6, vt6, vc1); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x144()
126 vp7 = _mm512_fmadd_ps(vp7, vt7, vc1); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x144()
127 vp8 = _mm512_fmadd_ps(vp8, vt8, vc1); in xnn_f32_vscaleextexp_ukernel__avx512f_p5_scalef_x144()
[all …]
/external/XNNPACK/src/f32-vscaleexpminusmax/gen/
Davx512f-p5-scalef-x192.c32 const __m512 vc1 = _mm512_set1_ps(0x1.FFFFF6p-1f); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x192() local
153 vp0 = _mm512_fmadd_ps(vp0, vt0, vc1); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x192()
154 vp1 = _mm512_fmadd_ps(vp1, vt1, vc1); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x192()
155 vp2 = _mm512_fmadd_ps(vp2, vt2, vc1); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x192()
156 vp3 = _mm512_fmadd_ps(vp3, vt3, vc1); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x192()
157 vp4 = _mm512_fmadd_ps(vp4, vt4, vc1); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x192()
158 vp5 = _mm512_fmadd_ps(vp5, vt5, vc1); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x192()
159 vp6 = _mm512_fmadd_ps(vp6, vt6, vc1); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x192()
160 vp7 = _mm512_fmadd_ps(vp7, vt7, vc1); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x192()
161 vp8 = _mm512_fmadd_ps(vp8, vt8, vc1); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x192()
[all …]
Davx512f-p5-scalef-x176.c32 const __m512 vc1 = _mm512_set1_ps(0x1.FFFFF6p-1f); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x176() local
145 vp0 = _mm512_fmadd_ps(vp0, vt0, vc1); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x176()
146 vp1 = _mm512_fmadd_ps(vp1, vt1, vc1); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x176()
147 vp2 = _mm512_fmadd_ps(vp2, vt2, vc1); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x176()
148 vp3 = _mm512_fmadd_ps(vp3, vt3, vc1); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x176()
149 vp4 = _mm512_fmadd_ps(vp4, vt4, vc1); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x176()
150 vp5 = _mm512_fmadd_ps(vp5, vt5, vc1); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x176()
151 vp6 = _mm512_fmadd_ps(vp6, vt6, vc1); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x176()
152 vp7 = _mm512_fmadd_ps(vp7, vt7, vc1); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x176()
153 vp8 = _mm512_fmadd_ps(vp8, vt8, vc1); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x176()
[all …]
Davx512f-p5-scalef-x160.c32 const __m512 vc1 = _mm512_set1_ps(0x1.FFFFF6p-1f); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x160() local
137 vp0 = _mm512_fmadd_ps(vp0, vt0, vc1); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x160()
138 vp1 = _mm512_fmadd_ps(vp1, vt1, vc1); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x160()
139 vp2 = _mm512_fmadd_ps(vp2, vt2, vc1); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x160()
140 vp3 = _mm512_fmadd_ps(vp3, vt3, vc1); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x160()
141 vp4 = _mm512_fmadd_ps(vp4, vt4, vc1); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x160()
142 vp5 = _mm512_fmadd_ps(vp5, vt5, vc1); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x160()
143 vp6 = _mm512_fmadd_ps(vp6, vt6, vc1); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x160()
144 vp7 = _mm512_fmadd_ps(vp7, vt7, vc1); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x160()
145 vp8 = _mm512_fmadd_ps(vp8, vt8, vc1); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x160()
[all …]
Davx512f-p5-scalef-x144.c32 const __m512 vc1 = _mm512_set1_ps(0x1.FFFFF6p-1f); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x144() local
129 vp0 = _mm512_fmadd_ps(vp0, vt0, vc1); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x144()
130 vp1 = _mm512_fmadd_ps(vp1, vt1, vc1); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x144()
131 vp2 = _mm512_fmadd_ps(vp2, vt2, vc1); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x144()
132 vp3 = _mm512_fmadd_ps(vp3, vt3, vc1); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x144()
133 vp4 = _mm512_fmadd_ps(vp4, vt4, vc1); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x144()
134 vp5 = _mm512_fmadd_ps(vp5, vt5, vc1); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x144()
135 vp6 = _mm512_fmadd_ps(vp6, vt6, vc1); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x144()
136 vp7 = _mm512_fmadd_ps(vp7, vt7, vc1); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x144()
137 vp8 = _mm512_fmadd_ps(vp8, vt8, vc1); in xnn_f32_vscaleexpminusmax_ukernel__avx512f_p5_scalef_x144()
[all …]

12345678910>>...19