1 // Auto-generated file. Do not edit!
2 // Template: src/f32-vsqrt/neonfma-nr1rsqrts1fma1adj.c.in
3 // Generator: tools/xngen
4 //
5 // Copyright 2020 Google LLC
6 //
7 // This source code is licensed under the BSD-style license found in the
8 // LICENSE file in the root directory of this source tree.
9
10 #include <assert.h>
11 #include <math.h>
12
13 #include <arm_neon.h>
14
15 #include <xnnpack/common.h>
16 #include <xnnpack/vunary.h>
17
18
xnn_f32_vsqrt_ukernel__neonfma_nr1rsqrts1fma1adj_x36(size_t n,const float * x,float * y,const union xnn_f32_sqrt_params params[restrict XNN_MIN_ELEMENTS (1)])19 void xnn_f32_vsqrt_ukernel__neonfma_nr1rsqrts1fma1adj_x36(
20 size_t n,
21 const float* x,
22 float* y,
23 const union xnn_f32_sqrt_params params[restrict XNN_MIN_ELEMENTS(1)]) XNN_DISABLE_TSAN
24 {
25 assert(n != 0);
26 assert(n % sizeof(float) == 0);
27
28 const float32x4_t vhalf = vmovq_n_f32(0.5f);
29 for (; n >= 36 * sizeof(float); n -= 36 * sizeof(float)) {
30 const float32x4_t vx0123 = vld1q_f32(x); x += 4;
31 const float32x4_t vx4567 = vld1q_f32(x); x += 4;
32 const float32x4_t vx89AB = vld1q_f32(x); x += 4;
33 const float32x4_t vxCDEF = vld1q_f32(x); x += 4;
34 const float32x4_t vxGHIJ = vld1q_f32(x); x += 4;
35 const float32x4_t vxKLMN = vld1q_f32(x); x += 4;
36 const float32x4_t vxOPQR = vld1q_f32(x); x += 4;
37 const float32x4_t vxSTUV = vld1q_f32(x); x += 4;
38 const float32x4_t vxWXYZ = vld1q_f32(x); x += 4;
39
40 float32x4_t vrsqrtx0123 = vrsqrteq_f32(vx0123);
41 float32x4_t vrsqrtx4567 = vrsqrteq_f32(vx4567);
42 float32x4_t vrsqrtx89AB = vrsqrteq_f32(vx89AB);
43 float32x4_t vrsqrtxCDEF = vrsqrteq_f32(vxCDEF);
44 float32x4_t vrsqrtxGHIJ = vrsqrteq_f32(vxGHIJ);
45 float32x4_t vrsqrtxKLMN = vrsqrteq_f32(vxKLMN);
46 float32x4_t vrsqrtxOPQR = vrsqrteq_f32(vxOPQR);
47 float32x4_t vrsqrtxSTUV = vrsqrteq_f32(vxSTUV);
48 float32x4_t vrsqrtxWXYZ = vrsqrteq_f32(vxWXYZ);
49
50 const float32x4_t vrx0123 = vmulq_f32(vrsqrtx0123, vrsqrtx0123);
51 const float32x4_t vrx4567 = vmulq_f32(vrsqrtx4567, vrsqrtx4567);
52 const float32x4_t vrx89AB = vmulq_f32(vrsqrtx89AB, vrsqrtx89AB);
53 const float32x4_t vrxCDEF = vmulq_f32(vrsqrtxCDEF, vrsqrtxCDEF);
54 const float32x4_t vrxGHIJ = vmulq_f32(vrsqrtxGHIJ, vrsqrtxGHIJ);
55 const float32x4_t vrxKLMN = vmulq_f32(vrsqrtxKLMN, vrsqrtxKLMN);
56 const float32x4_t vrxOPQR = vmulq_f32(vrsqrtxOPQR, vrsqrtxOPQR);
57 const float32x4_t vrxSTUV = vmulq_f32(vrsqrtxSTUV, vrsqrtxSTUV);
58 const float32x4_t vrxWXYZ = vmulq_f32(vrsqrtxWXYZ, vrsqrtxWXYZ);
59
60 const float32x4_t vcorrection0123 = vrsqrtsq_f32(vx0123, vrx0123);
61 const float32x4_t vcorrection4567 = vrsqrtsq_f32(vx4567, vrx4567);
62 const float32x4_t vcorrection89AB = vrsqrtsq_f32(vx89AB, vrx89AB);
63 const float32x4_t vcorrectionCDEF = vrsqrtsq_f32(vxCDEF, vrxCDEF);
64 const float32x4_t vcorrectionGHIJ = vrsqrtsq_f32(vxGHIJ, vrxGHIJ);
65 const float32x4_t vcorrectionKLMN = vrsqrtsq_f32(vxKLMN, vrxKLMN);
66 const float32x4_t vcorrectionOPQR = vrsqrtsq_f32(vxOPQR, vrxOPQR);
67 const float32x4_t vcorrectionSTUV = vrsqrtsq_f32(vxSTUV, vrxSTUV);
68 const float32x4_t vcorrectionWXYZ = vrsqrtsq_f32(vxWXYZ, vrxWXYZ);
69
70 vrsqrtx0123 = vmulq_f32(vrsqrtx0123, vcorrection0123);
71 vrsqrtx4567 = vmulq_f32(vrsqrtx4567, vcorrection4567);
72 vrsqrtx89AB = vmulq_f32(vrsqrtx89AB, vcorrection89AB);
73 vrsqrtxCDEF = vmulq_f32(vrsqrtxCDEF, vcorrectionCDEF);
74 vrsqrtxGHIJ = vmulq_f32(vrsqrtxGHIJ, vcorrectionGHIJ);
75 vrsqrtxKLMN = vmulq_f32(vrsqrtxKLMN, vcorrectionKLMN);
76 vrsqrtxOPQR = vmulq_f32(vrsqrtxOPQR, vcorrectionOPQR);
77 vrsqrtxSTUV = vmulq_f32(vrsqrtxSTUV, vcorrectionSTUV);
78 vrsqrtxWXYZ = vmulq_f32(vrsqrtxWXYZ, vcorrectionWXYZ);
79
80 float32x4_t vsqrtx0123 = vmulq_f32(vrsqrtx0123, vx0123);
81 float32x4_t vhalfrsqrtx0123 = vmulq_f32(vrsqrtx0123, vhalf);
82 float32x4_t vsqrtx4567 = vmulq_f32(vrsqrtx4567, vx4567);
83 float32x4_t vhalfrsqrtx4567 = vmulq_f32(vrsqrtx4567, vhalf);
84 float32x4_t vsqrtx89AB = vmulq_f32(vrsqrtx89AB, vx89AB);
85 float32x4_t vhalfrsqrtx89AB = vmulq_f32(vrsqrtx89AB, vhalf);
86 float32x4_t vsqrtxCDEF = vmulq_f32(vrsqrtxCDEF, vxCDEF);
87 float32x4_t vhalfrsqrtxCDEF = vmulq_f32(vrsqrtxCDEF, vhalf);
88 float32x4_t vsqrtxGHIJ = vmulq_f32(vrsqrtxGHIJ, vxGHIJ);
89 float32x4_t vhalfrsqrtxGHIJ = vmulq_f32(vrsqrtxGHIJ, vhalf);
90 float32x4_t vsqrtxKLMN = vmulq_f32(vrsqrtxKLMN, vxKLMN);
91 float32x4_t vhalfrsqrtxKLMN = vmulq_f32(vrsqrtxKLMN, vhalf);
92 float32x4_t vsqrtxOPQR = vmulq_f32(vrsqrtxOPQR, vxOPQR);
93 float32x4_t vhalfrsqrtxOPQR = vmulq_f32(vrsqrtxOPQR, vhalf);
94 float32x4_t vsqrtxSTUV = vmulq_f32(vrsqrtxSTUV, vxSTUV);
95 float32x4_t vhalfrsqrtxSTUV = vmulq_f32(vrsqrtxSTUV, vhalf);
96 float32x4_t vsqrtxWXYZ = vmulq_f32(vrsqrtxWXYZ, vxWXYZ);
97 float32x4_t vhalfrsqrtxWXYZ = vmulq_f32(vrsqrtxWXYZ, vhalf);
98
99 const float32x4_t vresidual0123 = vfmsq_f32(vhalf, vsqrtx0123, vhalfrsqrtx0123);
100 const float32x4_t vresidual4567 = vfmsq_f32(vhalf, vsqrtx4567, vhalfrsqrtx4567);
101 const float32x4_t vresidual89AB = vfmsq_f32(vhalf, vsqrtx89AB, vhalfrsqrtx89AB);
102 const float32x4_t vresidualCDEF = vfmsq_f32(vhalf, vsqrtxCDEF, vhalfrsqrtxCDEF);
103 const float32x4_t vresidualGHIJ = vfmsq_f32(vhalf, vsqrtxGHIJ, vhalfrsqrtxGHIJ);
104 const float32x4_t vresidualKLMN = vfmsq_f32(vhalf, vsqrtxKLMN, vhalfrsqrtxKLMN);
105 const float32x4_t vresidualOPQR = vfmsq_f32(vhalf, vsqrtxOPQR, vhalfrsqrtxOPQR);
106 const float32x4_t vresidualSTUV = vfmsq_f32(vhalf, vsqrtxSTUV, vhalfrsqrtxSTUV);
107 const float32x4_t vresidualWXYZ = vfmsq_f32(vhalf, vsqrtxWXYZ, vhalfrsqrtxWXYZ);
108
109 vhalfrsqrtx0123 = vfmaq_f32(vhalfrsqrtx0123, vresidual0123, vhalfrsqrtx0123);
110 vsqrtx0123 = vfmaq_f32(vsqrtx0123, vresidual0123, vsqrtx0123);
111 vhalfrsqrtx4567 = vfmaq_f32(vhalfrsqrtx4567, vresidual4567, vhalfrsqrtx4567);
112 vsqrtx4567 = vfmaq_f32(vsqrtx4567, vresidual4567, vsqrtx4567);
113 vhalfrsqrtx89AB = vfmaq_f32(vhalfrsqrtx89AB, vresidual89AB, vhalfrsqrtx89AB);
114 vsqrtx89AB = vfmaq_f32(vsqrtx89AB, vresidual89AB, vsqrtx89AB);
115 vhalfrsqrtxCDEF = vfmaq_f32(vhalfrsqrtxCDEF, vresidualCDEF, vhalfrsqrtxCDEF);
116 vsqrtxCDEF = vfmaq_f32(vsqrtxCDEF, vresidualCDEF, vsqrtxCDEF);
117 vhalfrsqrtxGHIJ = vfmaq_f32(vhalfrsqrtxGHIJ, vresidualGHIJ, vhalfrsqrtxGHIJ);
118 vsqrtxGHIJ = vfmaq_f32(vsqrtxGHIJ, vresidualGHIJ, vsqrtxGHIJ);
119 vhalfrsqrtxKLMN = vfmaq_f32(vhalfrsqrtxKLMN, vresidualKLMN, vhalfrsqrtxKLMN);
120 vsqrtxKLMN = vfmaq_f32(vsqrtxKLMN, vresidualKLMN, vsqrtxKLMN);
121 vhalfrsqrtxOPQR = vfmaq_f32(vhalfrsqrtxOPQR, vresidualOPQR, vhalfrsqrtxOPQR);
122 vsqrtxOPQR = vfmaq_f32(vsqrtxOPQR, vresidualOPQR, vsqrtxOPQR);
123 vhalfrsqrtxSTUV = vfmaq_f32(vhalfrsqrtxSTUV, vresidualSTUV, vhalfrsqrtxSTUV);
124 vsqrtxSTUV = vfmaq_f32(vsqrtxSTUV, vresidualSTUV, vsqrtxSTUV);
125 vhalfrsqrtxWXYZ = vfmaq_f32(vhalfrsqrtxWXYZ, vresidualWXYZ, vhalfrsqrtxWXYZ);
126 vsqrtxWXYZ = vfmaq_f32(vsqrtxWXYZ, vresidualWXYZ, vsqrtxWXYZ);
127
128 const float32x4_t vadjustment0123 = vfmsq_f32(vx0123, vsqrtx0123, vsqrtx0123);
129 const float32x4_t vadjustment4567 = vfmsq_f32(vx4567, vsqrtx4567, vsqrtx4567);
130 const float32x4_t vadjustment89AB = vfmsq_f32(vx89AB, vsqrtx89AB, vsqrtx89AB);
131 const float32x4_t vadjustmentCDEF = vfmsq_f32(vxCDEF, vsqrtxCDEF, vsqrtxCDEF);
132 const float32x4_t vadjustmentGHIJ = vfmsq_f32(vxGHIJ, vsqrtxGHIJ, vsqrtxGHIJ);
133 const float32x4_t vadjustmentKLMN = vfmsq_f32(vxKLMN, vsqrtxKLMN, vsqrtxKLMN);
134 const float32x4_t vadjustmentOPQR = vfmsq_f32(vxOPQR, vsqrtxOPQR, vsqrtxOPQR);
135 const float32x4_t vadjustmentSTUV = vfmsq_f32(vxSTUV, vsqrtxSTUV, vsqrtxSTUV);
136 const float32x4_t vadjustmentWXYZ = vfmsq_f32(vxWXYZ, vsqrtxWXYZ, vsqrtxWXYZ);
137
138 const float32x4_t vy0123 = vfmaq_f32(vsqrtx0123, vhalfrsqrtx0123, vadjustment0123);
139 const float32x4_t vy4567 = vfmaq_f32(vsqrtx4567, vhalfrsqrtx4567, vadjustment4567);
140 const float32x4_t vy89AB = vfmaq_f32(vsqrtx89AB, vhalfrsqrtx89AB, vadjustment89AB);
141 const float32x4_t vyCDEF = vfmaq_f32(vsqrtxCDEF, vhalfrsqrtxCDEF, vadjustmentCDEF);
142 const float32x4_t vyGHIJ = vfmaq_f32(vsqrtxGHIJ, vhalfrsqrtxGHIJ, vadjustmentGHIJ);
143 const float32x4_t vyKLMN = vfmaq_f32(vsqrtxKLMN, vhalfrsqrtxKLMN, vadjustmentKLMN);
144 const float32x4_t vyOPQR = vfmaq_f32(vsqrtxOPQR, vhalfrsqrtxOPQR, vadjustmentOPQR);
145 const float32x4_t vySTUV = vfmaq_f32(vsqrtxSTUV, vhalfrsqrtxSTUV, vadjustmentSTUV);
146 const float32x4_t vyWXYZ = vfmaq_f32(vsqrtxWXYZ, vhalfrsqrtxWXYZ, vadjustmentWXYZ);
147
148 vst1q_f32(y, vy0123); y += 4;
149 vst1q_f32(y, vy4567); y += 4;
150 vst1q_f32(y, vy89AB); y += 4;
151 vst1q_f32(y, vyCDEF); y += 4;
152 vst1q_f32(y, vyGHIJ); y += 4;
153 vst1q_f32(y, vyKLMN); y += 4;
154 vst1q_f32(y, vyOPQR); y += 4;
155 vst1q_f32(y, vySTUV); y += 4;
156 vst1q_f32(y, vyWXYZ); y += 4;
157 }
158 if XNN_UNLIKELY(n != 0) {
159 do {
160 const float vx = *x++;
161 const float vy = sqrtf(vx);
162 *y++ = vy;
163 n -= sizeof(float);
164 } while (n != 0);
165 }
166 }
167