Home
last modified time | relevance | path

Searched refs:vaddw_s32 (Results 1 – 22 of 22) sorted by relevance

/external/XNNPACK/src/qs8-gavgpool/gen/
D7x-minmax-neon-c32-acc2.c160 const int64x2_t vadjprod01 = vaddw_s32(vprod01, vget_low_s32(vsgnacc0123)); in xnn_qs8_gavgpool_minmax_ukernel_7x__neon_c32_acc2()
162 const int64x2_t vadjprod45 = vaddw_s32(vprod45, vget_low_s32(vsgnacc4567)); in xnn_qs8_gavgpool_minmax_ukernel_7x__neon_c32_acc2()
164 const int64x2_t vadjprod89 = vaddw_s32(vprod89, vget_low_s32(vsgnacc89AB)); in xnn_qs8_gavgpool_minmax_ukernel_7x__neon_c32_acc2()
166 const int64x2_t vadjprodCD = vaddw_s32(vprodCD, vget_low_s32(vsgnaccCDEF)); in xnn_qs8_gavgpool_minmax_ukernel_7x__neon_c32_acc2()
168 const int64x2_t vadjprodGH = vaddw_s32(vprodGH, vget_low_s32(vsgnaccGHIJ)); in xnn_qs8_gavgpool_minmax_ukernel_7x__neon_c32_acc2()
170 const int64x2_t vadjprodKL = vaddw_s32(vprodKL, vget_low_s32(vsgnaccKLMN)); in xnn_qs8_gavgpool_minmax_ukernel_7x__neon_c32_acc2()
172 const int64x2_t vadjprodOP = vaddw_s32(vprodOP, vget_low_s32(vsgnaccOPQR)); in xnn_qs8_gavgpool_minmax_ukernel_7x__neon_c32_acc2()
174 const int64x2_t vadjprodST = vaddw_s32(vprodST, vget_low_s32(vsgnaccSTUV)); in xnn_qs8_gavgpool_minmax_ukernel_7x__neon_c32_acc2()
194 const int64x2_t vadjprod01 = vaddw_s32(vprod01, vget_low_s32(vsgnacc0123)); in xnn_qs8_gavgpool_minmax_ukernel_7x__neon_c32_acc2()
195 const int64x2_t vadjprod23 = vaddw_s32(vprod23, vget_high_s32(vsgnacc0123)); in xnn_qs8_gavgpool_minmax_ukernel_7x__neon_c32_acc2()
[all …]
D7x-minmax-neon-c24-acc2.c139 const int64x2_t vadjprod01 = vaddw_s32(vprod01, vget_low_s32(vsgnacc0123)); in xnn_qs8_gavgpool_minmax_ukernel_7x__neon_c24_acc2()
141 const int64x2_t vadjprod45 = vaddw_s32(vprod45, vget_low_s32(vsgnacc4567)); in xnn_qs8_gavgpool_minmax_ukernel_7x__neon_c24_acc2()
143 const int64x2_t vadjprod89 = vaddw_s32(vprod89, vget_low_s32(vsgnacc89AB)); in xnn_qs8_gavgpool_minmax_ukernel_7x__neon_c24_acc2()
145 const int64x2_t vadjprodCD = vaddw_s32(vprodCD, vget_low_s32(vsgnaccCDEF)); in xnn_qs8_gavgpool_minmax_ukernel_7x__neon_c24_acc2()
147 const int64x2_t vadjprodGH = vaddw_s32(vprodGH, vget_low_s32(vsgnaccGHIJ)); in xnn_qs8_gavgpool_minmax_ukernel_7x__neon_c24_acc2()
149 const int64x2_t vadjprodKL = vaddw_s32(vprodKL, vget_low_s32(vsgnaccKLMN)); in xnn_qs8_gavgpool_minmax_ukernel_7x__neon_c24_acc2()
165 const int64x2_t vadjprod01 = vaddw_s32(vprod01, vget_low_s32(vsgnacc0123)); in xnn_qs8_gavgpool_minmax_ukernel_7x__neon_c24_acc2()
166 const int64x2_t vadjprod23 = vaddw_s32(vprod23, vget_high_s32(vsgnacc0123)); in xnn_qs8_gavgpool_minmax_ukernel_7x__neon_c24_acc2()
167 const int64x2_t vadjprod45 = vaddw_s32(vprod45, vget_low_s32(vsgnacc4567)); in xnn_qs8_gavgpool_minmax_ukernel_7x__neon_c24_acc2()
168 const int64x2_t vadjprod67 = vaddw_s32(vprod67, vget_high_s32(vsgnacc4567)); in xnn_qs8_gavgpool_minmax_ukernel_7x__neon_c24_acc2()
[all …]
D7x-minmax-neon-c16-acc2.c118 const int64x2_t vadjprod01 = vaddw_s32(vprod01, vget_low_s32(vsgnacc0123)); in xnn_qs8_gavgpool_minmax_ukernel_7x__neon_c16_acc2()
120 const int64x2_t vadjprod45 = vaddw_s32(vprod45, vget_low_s32(vsgnacc4567)); in xnn_qs8_gavgpool_minmax_ukernel_7x__neon_c16_acc2()
122 const int64x2_t vadjprod89 = vaddw_s32(vprod89, vget_low_s32(vsgnacc89AB)); in xnn_qs8_gavgpool_minmax_ukernel_7x__neon_c16_acc2()
124 const int64x2_t vadjprodCD = vaddw_s32(vprodCD, vget_low_s32(vsgnaccCDEF)); in xnn_qs8_gavgpool_minmax_ukernel_7x__neon_c16_acc2()
136 const int64x2_t vadjprod01 = vaddw_s32(vprod01, vget_low_s32(vsgnacc0123)); in xnn_qs8_gavgpool_minmax_ukernel_7x__neon_c16_acc2()
137 const int64x2_t vadjprod23 = vaddw_s32(vprod23, vget_high_s32(vsgnacc0123)); in xnn_qs8_gavgpool_minmax_ukernel_7x__neon_c16_acc2()
138 const int64x2_t vadjprod45 = vaddw_s32(vprod45, vget_low_s32(vsgnacc4567)); in xnn_qs8_gavgpool_minmax_ukernel_7x__neon_c16_acc2()
139 const int64x2_t vadjprod67 = vaddw_s32(vprod67, vget_high_s32(vsgnacc4567)); in xnn_qs8_gavgpool_minmax_ukernel_7x__neon_c16_acc2()
140 const int64x2_t vadjprod89 = vaddw_s32(vprod89, vget_low_s32(vsgnacc89AB)); in xnn_qs8_gavgpool_minmax_ukernel_7x__neon_c16_acc2()
141 const int64x2_t vadjprodAB = vaddw_s32(vprodAB, vget_high_s32(vsgnacc89AB)); in xnn_qs8_gavgpool_minmax_ukernel_7x__neon_c16_acc2()
[all …]
D7x-minmax-neon-c8-acc2.c97 const int64x2_t vadjprod01 = vaddw_s32(vprod01, vget_low_s32(vsgnacc0123)); in xnn_qs8_gavgpool_minmax_ukernel_7x__neon_c8_acc2()
99 const int64x2_t vadjprod45 = vaddw_s32(vprod45, vget_low_s32(vsgnacc4567)); in xnn_qs8_gavgpool_minmax_ukernel_7x__neon_c8_acc2()
107 const int64x2_t vadjprod01 = vaddw_s32(vprod01, vget_low_s32(vsgnacc0123)); in xnn_qs8_gavgpool_minmax_ukernel_7x__neon_c8_acc2()
108 const int64x2_t vadjprod23 = vaddw_s32(vprod23, vget_high_s32(vsgnacc0123)); in xnn_qs8_gavgpool_minmax_ukernel_7x__neon_c8_acc2()
109 const int64x2_t vadjprod45 = vaddw_s32(vprod45, vget_low_s32(vsgnacc4567)); in xnn_qs8_gavgpool_minmax_ukernel_7x__neon_c8_acc2()
110 const int64x2_t vadjprod67 = vaddw_s32(vprod67, vget_high_s32(vsgnacc4567)); in xnn_qs8_gavgpool_minmax_ukernel_7x__neon_c8_acc2()
174 const int64x2_t vadjprod01 = vaddw_s32(vprod01, vget_low_s32(vsgnacc0123)); in xnn_qs8_gavgpool_minmax_ukernel_7x__neon_c8_acc2()
176 const int64x2_t vadjprod45 = vaddw_s32(vprod45, vget_low_s32(vsgnacc4567)); in xnn_qs8_gavgpool_minmax_ukernel_7x__neon_c8_acc2()
184 const int64x2_t vadjprod01 = vaddw_s32(vprod01, vget_low_s32(vsgnacc0123)); in xnn_qs8_gavgpool_minmax_ukernel_7x__neon_c8_acc2()
185 const int64x2_t vadjprod23 = vaddw_s32(vprod23, vget_high_s32(vsgnacc0123)); in xnn_qs8_gavgpool_minmax_ukernel_7x__neon_c8_acc2()
[all …]
D7p7x-minmax-neon-c32-acc2.c417 const int64x2_t vadjprod01 = vaddw_s32(vprod01, vget_low_s32(vsgnacc0123)); in xnn_qs8_gavgpool_minmax_ukernel_7p7x__neon_c32_acc2()
419 const int64x2_t vadjprod45 = vaddw_s32(vprod45, vget_low_s32(vsgnacc4567)); in xnn_qs8_gavgpool_minmax_ukernel_7p7x__neon_c32_acc2()
421 const int64x2_t vadjprod89 = vaddw_s32(vprod89, vget_low_s32(vsgnacc89AB)); in xnn_qs8_gavgpool_minmax_ukernel_7p7x__neon_c32_acc2()
423 const int64x2_t vadjprodCD = vaddw_s32(vprodCD, vget_low_s32(vsgnaccCDEF)); in xnn_qs8_gavgpool_minmax_ukernel_7p7x__neon_c32_acc2()
425 const int64x2_t vadjprodGH = vaddw_s32(vprodGH, vget_low_s32(vsgnaccGHIJ)); in xnn_qs8_gavgpool_minmax_ukernel_7p7x__neon_c32_acc2()
427 const int64x2_t vadjprodKL = vaddw_s32(vprodKL, vget_low_s32(vsgnaccKLMN)); in xnn_qs8_gavgpool_minmax_ukernel_7p7x__neon_c32_acc2()
429 const int64x2_t vadjprodOP = vaddw_s32(vprodOP, vget_low_s32(vsgnaccOPQR)); in xnn_qs8_gavgpool_minmax_ukernel_7p7x__neon_c32_acc2()
431 const int64x2_t vadjprodST = vaddw_s32(vprodST, vget_low_s32(vsgnaccSTUV)); in xnn_qs8_gavgpool_minmax_ukernel_7p7x__neon_c32_acc2()
451 const int64x2_t vadjprod01 = vaddw_s32(vprod01, vget_low_s32(vsgnacc0123)); in xnn_qs8_gavgpool_minmax_ukernel_7p7x__neon_c32_acc2()
452 const int64x2_t vadjprod23 = vaddw_s32(vprod23, vget_high_s32(vsgnacc0123)); in xnn_qs8_gavgpool_minmax_ukernel_7p7x__neon_c32_acc2()
[all …]
D7p7x-minmax-neon-c16-acc2.c238 const int64x2_t vadjprod01 = vaddw_s32(vprod01, vget_low_s32(vsgnacc0123)); in xnn_qs8_gavgpool_minmax_ukernel_7p7x__neon_c16_acc2()
240 const int64x2_t vadjprod45 = vaddw_s32(vprod45, vget_low_s32(vsgnacc4567)); in xnn_qs8_gavgpool_minmax_ukernel_7p7x__neon_c16_acc2()
242 const int64x2_t vadjprod89 = vaddw_s32(vprod89, vget_low_s32(vsgnacc89AB)); in xnn_qs8_gavgpool_minmax_ukernel_7p7x__neon_c16_acc2()
244 const int64x2_t vadjprodCD = vaddw_s32(vprodCD, vget_low_s32(vsgnaccCDEF)); in xnn_qs8_gavgpool_minmax_ukernel_7p7x__neon_c16_acc2()
256 const int64x2_t vadjprod01 = vaddw_s32(vprod01, vget_low_s32(vsgnacc0123)); in xnn_qs8_gavgpool_minmax_ukernel_7p7x__neon_c16_acc2()
257 const int64x2_t vadjprod23 = vaddw_s32(vprod23, vget_high_s32(vsgnacc0123)); in xnn_qs8_gavgpool_minmax_ukernel_7p7x__neon_c16_acc2()
258 const int64x2_t vadjprod45 = vaddw_s32(vprod45, vget_low_s32(vsgnacc4567)); in xnn_qs8_gavgpool_minmax_ukernel_7p7x__neon_c16_acc2()
259 const int64x2_t vadjprod67 = vaddw_s32(vprod67, vget_high_s32(vsgnacc4567)); in xnn_qs8_gavgpool_minmax_ukernel_7p7x__neon_c16_acc2()
260 const int64x2_t vadjprod89 = vaddw_s32(vprod89, vget_low_s32(vsgnacc89AB)); in xnn_qs8_gavgpool_minmax_ukernel_7p7x__neon_c16_acc2()
261 const int64x2_t vadjprodAB = vaddw_s32(vprodAB, vget_high_s32(vsgnacc89AB)); in xnn_qs8_gavgpool_minmax_ukernel_7p7x__neon_c16_acc2()
[all …]
D7p7x-minmax-neon-c24-acc2.c358 const int64x2_t vadjprod01 = vaddw_s32(vprod01, vget_low_s32(vsgnacc0123)); in xnn_qs8_gavgpool_minmax_ukernel_7p7x__neon_c24_acc2()
360 const int64x2_t vadjprod45 = vaddw_s32(vprod45, vget_low_s32(vsgnacc4567)); in xnn_qs8_gavgpool_minmax_ukernel_7p7x__neon_c24_acc2()
362 const int64x2_t vadjprod89 = vaddw_s32(vprod89, vget_low_s32(vsgnacc89AB)); in xnn_qs8_gavgpool_minmax_ukernel_7p7x__neon_c24_acc2()
364 const int64x2_t vadjprodCD = vaddw_s32(vprodCD, vget_low_s32(vsgnaccCDEF)); in xnn_qs8_gavgpool_minmax_ukernel_7p7x__neon_c24_acc2()
366 const int64x2_t vadjprodGH = vaddw_s32(vprodGH, vget_low_s32(vsgnaccGHIJ)); in xnn_qs8_gavgpool_minmax_ukernel_7p7x__neon_c24_acc2()
368 const int64x2_t vadjprodKL = vaddw_s32(vprodKL, vget_low_s32(vsgnaccKLMN)); in xnn_qs8_gavgpool_minmax_ukernel_7p7x__neon_c24_acc2()
384 const int64x2_t vadjprod01 = vaddw_s32(vprod01, vget_low_s32(vsgnacc0123)); in xnn_qs8_gavgpool_minmax_ukernel_7p7x__neon_c24_acc2()
385 const int64x2_t vadjprod23 = vaddw_s32(vprod23, vget_high_s32(vsgnacc0123)); in xnn_qs8_gavgpool_minmax_ukernel_7p7x__neon_c24_acc2()
386 const int64x2_t vadjprod45 = vaddw_s32(vprod45, vget_low_s32(vsgnacc4567)); in xnn_qs8_gavgpool_minmax_ukernel_7p7x__neon_c24_acc2()
387 const int64x2_t vadjprod67 = vaddw_s32(vprod67, vget_high_s32(vsgnacc4567)); in xnn_qs8_gavgpool_minmax_ukernel_7p7x__neon_c24_acc2()
[all …]
D7p7x-minmax-neon-c8-acc2.c179 const int64x2_t vadjprod01 = vaddw_s32(vprod01, vget_low_s32(vsgnacc0123)); in xnn_qs8_gavgpool_minmax_ukernel_7p7x__neon_c8_acc2()
181 const int64x2_t vadjprod45 = vaddw_s32(vprod45, vget_low_s32(vsgnacc4567)); in xnn_qs8_gavgpool_minmax_ukernel_7p7x__neon_c8_acc2()
189 const int64x2_t vadjprod01 = vaddw_s32(vprod01, vget_low_s32(vsgnacc0123)); in xnn_qs8_gavgpool_minmax_ukernel_7p7x__neon_c8_acc2()
190 const int64x2_t vadjprod23 = vaddw_s32(vprod23, vget_high_s32(vsgnacc0123)); in xnn_qs8_gavgpool_minmax_ukernel_7p7x__neon_c8_acc2()
191 const int64x2_t vadjprod45 = vaddw_s32(vprod45, vget_low_s32(vsgnacc4567)); in xnn_qs8_gavgpool_minmax_ukernel_7p7x__neon_c8_acc2()
192 const int64x2_t vadjprod67 = vaddw_s32(vprod67, vget_high_s32(vsgnacc4567)); in xnn_qs8_gavgpool_minmax_ukernel_7p7x__neon_c8_acc2()
259 const int64x2_t vadjprod01 = vaddw_s32(vprod01, vget_low_s32(vsgnacc0123)); in xnn_qs8_gavgpool_minmax_ukernel_7p7x__neon_c8_acc2()
261 const int64x2_t vadjprod45 = vaddw_s32(vprod45, vget_low_s32(vsgnacc4567)); in xnn_qs8_gavgpool_minmax_ukernel_7p7x__neon_c8_acc2()
269 const int64x2_t vadjprod01 = vaddw_s32(vprod01, vget_low_s32(vsgnacc0123)); in xnn_qs8_gavgpool_minmax_ukernel_7p7x__neon_c8_acc2()
270 const int64x2_t vadjprod23 = vaddw_s32(vprod23, vget_high_s32(vsgnacc0123)); in xnn_qs8_gavgpool_minmax_ukernel_7p7x__neon_c8_acc2()
[all …]
/external/XNNPACK/src/qs8-requantization/
Dprecise-neon.c81 …const int64x2_t x01_adjusted_product = vaddw_s32(x01_product, vreinterpret_s32_u32(vget_low_u32(x_… in xnn_qs8_requantize_precise__neon()
83 …const int64x2_t y01_adjusted_product = vaddw_s32(y01_product, vreinterpret_s32_u32(vget_low_u32(y_… in xnn_qs8_requantize_precise__neon()
85 …const int64x2_t z01_adjusted_product = vaddw_s32(z01_product, vreinterpret_s32_u32(vget_low_u32(z_… in xnn_qs8_requantize_precise__neon()
87 …const int64x2_t w01_adjusted_product = vaddw_s32(w01_product, vreinterpret_s32_u32(vget_low_u32(w_… in xnn_qs8_requantize_precise__neon()
90 …const int64x2_t x01_adjusted_product = vaddw_s32(x01_product, vreinterpret_s32_u32(vget_low_u32(x_… in xnn_qs8_requantize_precise__neon()
91 …const int64x2_t x23_adjusted_product = vaddw_s32(x23_product, vreinterpret_s32_u32(vget_high_u32(x… in xnn_qs8_requantize_precise__neon()
92 …const int64x2_t y01_adjusted_product = vaddw_s32(y01_product, vreinterpret_s32_u32(vget_low_u32(y_… in xnn_qs8_requantize_precise__neon()
93 …const int64x2_t y23_adjusted_product = vaddw_s32(y23_product, vreinterpret_s32_u32(vget_high_u32(y… in xnn_qs8_requantize_precise__neon()
94 …const int64x2_t z01_adjusted_product = vaddw_s32(z01_product, vreinterpret_s32_u32(vget_low_u32(z_… in xnn_qs8_requantize_precise__neon()
95 …const int64x2_t z23_adjusted_product = vaddw_s32(z23_product, vreinterpret_s32_u32(vget_high_u32(z… in xnn_qs8_requantize_precise__neon()
[all …]
/external/XNNPACK/src/qu8-requantization/
Dprecise-neon.c81 …const int64x2_t x01_adjusted_product = vaddw_s32(x01_product, vreinterpret_s32_u32(vget_low_u32(x_… in xnn_qu8_requantize_precise__neon()
83 …const int64x2_t y01_adjusted_product = vaddw_s32(y01_product, vreinterpret_s32_u32(vget_low_u32(y_… in xnn_qu8_requantize_precise__neon()
85 …const int64x2_t z01_adjusted_product = vaddw_s32(z01_product, vreinterpret_s32_u32(vget_low_u32(z_… in xnn_qu8_requantize_precise__neon()
87 …const int64x2_t w01_adjusted_product = vaddw_s32(w01_product, vreinterpret_s32_u32(vget_low_u32(w_… in xnn_qu8_requantize_precise__neon()
90 …const int64x2_t x01_adjusted_product = vaddw_s32(x01_product, vreinterpret_s32_u32(vget_low_u32(x_… in xnn_qu8_requantize_precise__neon()
91 …const int64x2_t x23_adjusted_product = vaddw_s32(x23_product, vreinterpret_s32_u32(vget_high_u32(x… in xnn_qu8_requantize_precise__neon()
92 …const int64x2_t y01_adjusted_product = vaddw_s32(y01_product, vreinterpret_s32_u32(vget_low_u32(y_… in xnn_qu8_requantize_precise__neon()
93 …const int64x2_t y23_adjusted_product = vaddw_s32(y23_product, vreinterpret_s32_u32(vget_high_u32(y… in xnn_qu8_requantize_precise__neon()
94 …const int64x2_t z01_adjusted_product = vaddw_s32(z01_product, vreinterpret_s32_u32(vget_low_u32(z_… in xnn_qu8_requantize_precise__neon()
95 …const int64x2_t z23_adjusted_product = vaddw_s32(z23_product, vreinterpret_s32_u32(vget_high_u32(z… in xnn_qu8_requantize_precise__neon()
[all …]
/external/XNNPACK/src/qu8-gavgpool/
D7x-minmax-neon-c8.c95 const int64x2_t vadjusted_product01 = vaddw_s32(vproduct01, vget_low_s32(vneg_mask_lo)); in xnn_qu8_gavgpool_minmax_ukernel_7x__neon_c8()
97 const int64x2_t vadjusted_product45 = vaddw_s32(vproduct45, vget_low_s32(vneg_mask_hi)); in xnn_qu8_gavgpool_minmax_ukernel_7x__neon_c8()
105 const int64x2_t vadjusted_product01 = vaddw_s32(vproduct01, vget_low_s32(vneg_mask_lo)); in xnn_qu8_gavgpool_minmax_ukernel_7x__neon_c8()
106 const int64x2_t vadjusted_product23 = vaddw_s32(vproduct23, vget_high_s32(vneg_mask_lo)); in xnn_qu8_gavgpool_minmax_ukernel_7x__neon_c8()
107 const int64x2_t vadjusted_product45 = vaddw_s32(vproduct45, vget_low_s32(vneg_mask_hi)); in xnn_qu8_gavgpool_minmax_ukernel_7x__neon_c8()
108 const int64x2_t vadjusted_product67 = vaddw_s32(vproduct67, vget_high_s32(vneg_mask_hi)); in xnn_qu8_gavgpool_minmax_ukernel_7x__neon_c8()
165 const int64x2_t vadjusted_product01 = vaddw_s32(vproduct01, vget_low_s32(vneg_mask_lo)); in xnn_qu8_gavgpool_minmax_ukernel_7x__neon_c8()
167 const int64x2_t vadjusted_product45 = vaddw_s32(vproduct45, vget_low_s32(vneg_mask_hi)); in xnn_qu8_gavgpool_minmax_ukernel_7x__neon_c8()
175 const int64x2_t vadjusted_product01 = vaddw_s32(vproduct01, vget_low_s32(vneg_mask_lo)); in xnn_qu8_gavgpool_minmax_ukernel_7x__neon_c8()
176 const int64x2_t vadjusted_product23 = vaddw_s32(vproduct23, vget_high_s32(vneg_mask_lo)); in xnn_qu8_gavgpool_minmax_ukernel_7x__neon_c8()
[all …]
D7p7x-minmax-neon-c8.c171 const int64x2_t vadjusted_product01 = vaddw_s32(vproduct01, vget_low_s32(vneg_mask_lo)); in xnn_qu8_gavgpool_minmax_ukernel_7p7x__neon_c8()
173 const int64x2_t vadjusted_product45 = vaddw_s32(vproduct45, vget_low_s32(vneg_mask_hi)); in xnn_qu8_gavgpool_minmax_ukernel_7p7x__neon_c8()
181 const int64x2_t vadjusted_product01 = vaddw_s32(vproduct01, vget_low_s32(vneg_mask_lo)); in xnn_qu8_gavgpool_minmax_ukernel_7p7x__neon_c8()
182 const int64x2_t vadjusted_product23 = vaddw_s32(vproduct23, vget_high_s32(vneg_mask_lo)); in xnn_qu8_gavgpool_minmax_ukernel_7p7x__neon_c8()
183 const int64x2_t vadjusted_product45 = vaddw_s32(vproduct45, vget_low_s32(vneg_mask_hi)); in xnn_qu8_gavgpool_minmax_ukernel_7p7x__neon_c8()
184 const int64x2_t vadjusted_product67 = vaddw_s32(vproduct67, vget_high_s32(vneg_mask_hi)); in xnn_qu8_gavgpool_minmax_ukernel_7p7x__neon_c8()
243 const int64x2_t vadjusted_product01 = vaddw_s32(vproduct01, vget_low_s32(vneg_mask_lo)); in xnn_qu8_gavgpool_minmax_ukernel_7p7x__neon_c8()
245 const int64x2_t vadjusted_product45 = vaddw_s32(vproduct45, vget_low_s32(vneg_mask_hi)); in xnn_qu8_gavgpool_minmax_ukernel_7p7x__neon_c8()
253 const int64x2_t vadjusted_product01 = vaddw_s32(vproduct01, vget_low_s32(vneg_mask_lo)); in xnn_qu8_gavgpool_minmax_ukernel_7p7x__neon_c8()
254 const int64x2_t vadjusted_product23 = vaddw_s32(vproduct23, vget_high_s32(vneg_mask_lo)); in xnn_qu8_gavgpool_minmax_ukernel_7p7x__neon_c8()
[all …]
/external/XNNPACK/src/qu8-avgpool/
D9x-minmax-neon-c8.c150 const int64x2_t vadjusted_product01 = vaddw_s32(vproduct01, vget_low_s32(vneg_mask_lo)); in xnn_qu8_avgpool_minmax_ukernel_9x__neon_c8()
152 const int64x2_t vadjusted_product45 = vaddw_s32(vproduct45, vget_low_s32(vneg_mask_hi)); in xnn_qu8_avgpool_minmax_ukernel_9x__neon_c8()
160 const int64x2_t vadjusted_product01 = vaddw_s32(vproduct01, vget_low_s32(vneg_mask_lo)); in xnn_qu8_avgpool_minmax_ukernel_9x__neon_c8()
161 const int64x2_t vadjusted_product23 = vaddw_s32(vproduct23, vget_high_s32(vneg_mask_lo)); in xnn_qu8_avgpool_minmax_ukernel_9x__neon_c8()
162 const int64x2_t vadjusted_product45 = vaddw_s32(vproduct45, vget_low_s32(vneg_mask_hi)); in xnn_qu8_avgpool_minmax_ukernel_9x__neon_c8()
163 const int64x2_t vadjusted_product67 = vaddw_s32(vproduct67, vget_high_s32(vneg_mask_hi)); in xnn_qu8_avgpool_minmax_ukernel_9x__neon_c8()
223 const int64x2_t vadjusted_product01 = vaddw_s32(vproduct01, vget_low_s32(vneg_mask_lo)); in xnn_qu8_avgpool_minmax_ukernel_9x__neon_c8()
225 const int64x2_t vadjusted_product45 = vaddw_s32(vproduct45, vget_low_s32(vneg_mask_hi)); in xnn_qu8_avgpool_minmax_ukernel_9x__neon_c8()
233 const int64x2_t vadjusted_product01 = vaddw_s32(vproduct01, vget_low_s32(vneg_mask_lo)); in xnn_qu8_avgpool_minmax_ukernel_9x__neon_c8()
234 const int64x2_t vadjusted_product23 = vaddw_s32(vproduct23, vget_high_s32(vneg_mask_lo)); in xnn_qu8_avgpool_minmax_ukernel_9x__neon_c8()
[all …]
D9p8x-minmax-neon-c8.c294 const int64x2_t vadjusted_product01 = vaddw_s32(vproduct01, vget_low_s32(vneg_mask_lo)); in xnn_qu8_avgpool_minmax_ukernel_9p8x__neon_c8()
296 const int64x2_t vadjusted_product45 = vaddw_s32(vproduct45, vget_low_s32(vneg_mask_hi)); in xnn_qu8_avgpool_minmax_ukernel_9p8x__neon_c8()
304 const int64x2_t vadjusted_product01 = vaddw_s32(vproduct01, vget_low_s32(vneg_mask_lo)); in xnn_qu8_avgpool_minmax_ukernel_9p8x__neon_c8()
305 const int64x2_t vadjusted_product23 = vaddw_s32(vproduct23, vget_high_s32(vneg_mask_lo)); in xnn_qu8_avgpool_minmax_ukernel_9p8x__neon_c8()
306 const int64x2_t vadjusted_product45 = vaddw_s32(vproduct45, vget_low_s32(vneg_mask_hi)); in xnn_qu8_avgpool_minmax_ukernel_9p8x__neon_c8()
307 const int64x2_t vadjusted_product67 = vaddw_s32(vproduct67, vget_high_s32(vneg_mask_hi)); in xnn_qu8_avgpool_minmax_ukernel_9p8x__neon_c8()
368 const int64x2_t vadjusted_product01 = vaddw_s32(vproduct01, vget_low_s32(vneg_mask_lo)); in xnn_qu8_avgpool_minmax_ukernel_9p8x__neon_c8()
370 const int64x2_t vadjusted_product45 = vaddw_s32(vproduct45, vget_low_s32(vneg_mask_hi)); in xnn_qu8_avgpool_minmax_ukernel_9p8x__neon_c8()
378 const int64x2_t vadjusted_product01 = vaddw_s32(vproduct01, vget_low_s32(vneg_mask_lo)); in xnn_qu8_avgpool_minmax_ukernel_9p8x__neon_c8()
379 const int64x2_t vadjusted_product23 = vaddw_s32(vproduct23, vget_high_s32(vneg_mask_lo)); in xnn_qu8_avgpool_minmax_ukernel_9p8x__neon_c8()
[all …]
/external/XNNPACK/src/qs8-gavgpool/
Dunipass-neon.c.in94 …const int64x2_t vadjprod${ABC[C:C+2]} = vaddw_s32(vprod${ABC[C:C+2]}, vget_low_s32(vsgnacc${ABC[C:…
102 …const int64x2_t vadjprod${ABC[C:C+2]} = vaddw_s32(vprod${ABC[C:C+2]}, vget_low_s32(vsgnacc${ABC[C:…
103 …const int64x2_t vadjprod${ABC[C+2:C+4]} = vaddw_s32(vprod${ABC[C+2:C+4]}, vget_high_s32(vsgnacc${A…
191 …const int64x2_t vadjprod${ABC[0:2]} = vaddw_s32(vprod${ABC[0:2]}, vget_low_s32(vsgnacc${ABC[0:4]})…
193 …const int64x2_t vadjprod${ABC[4:6]} = vaddw_s32(vprod${ABC[4:6]}, vget_low_s32(vsgnacc${ABC[4:8]})…
201 …const int64x2_t vadjprod${ABC[0:2]} = vaddw_s32(vprod${ABC[0:2]}, vget_low_s32(vsgnacc${ABC[0:4]})…
202 …const int64x2_t vadjprod${ABC[2:4]} = vaddw_s32(vprod${ABC[2:4]}, vget_high_s32(vsgnacc${ABC[0:4]}…
203 …const int64x2_t vadjprod${ABC[4:6]} = vaddw_s32(vprod${ABC[4:6]}, vget_low_s32(vsgnacc${ABC[4:8]})…
204 …const int64x2_t vadjprod${ABC[6:8]} = vaddw_s32(vprod${ABC[6:8]}, vget_high_s32(vsgnacc${ABC[4:8]}…
Dmultipass-neon.c.in247 …const int64x2_t vadjprod${ABC[C:C+2]} = vaddw_s32(vprod${ABC[C:C+2]}, vget_low_s32(vsgnacc${ABC[C:…
255 …const int64x2_t vadjprod${ABC[C:C+2]} = vaddw_s32(vprod${ABC[C:C+2]}, vget_low_s32(vsgnacc${ABC[C:…
256 …const int64x2_t vadjprod${ABC[C+2:C+4]} = vaddw_s32(vprod${ABC[C+2:C+4]}, vget_high_s32(vsgnacc${A…
350 …const int64x2_t vadjprod${ABC[0:2]} = vaddw_s32(vprod${ABC[0:2]}, vget_low_s32(vsgnacc${ABC[0:4]})…
352 …const int64x2_t vadjprod${ABC[4:6]} = vaddw_s32(vprod${ABC[4:6]}, vget_low_s32(vsgnacc${ABC[4:8]})…
360 …const int64x2_t vadjprod${ABC[0:2]} = vaddw_s32(vprod${ABC[0:2]}, vget_low_s32(vsgnacc${ABC[0:4]})…
361 …const int64x2_t vadjprod${ABC[2:4]} = vaddw_s32(vprod${ABC[2:4]}, vget_high_s32(vsgnacc${ABC[0:4]}…
362 …const int64x2_t vadjprod${ABC[4:6]} = vaddw_s32(vprod${ABC[4:6]}, vget_low_s32(vsgnacc${ABC[4:8]})…
363 …const int64x2_t vadjprod${ABC[6:8]} = vaddw_s32(vprod${ABC[6:8]}, vget_high_s32(vsgnacc${ABC[4:8]}…
/external/libopus/silk/fixed/arm/
Dwarped_autocorrelation_FIX_neon_intr.c213 t_s64x2 = vaddw_s32( t_s64x2, vget_low_s32( t_s32x4 ) ); in silk_warped_autocorrelation_FIX_neon()
214 t_s64x2 = vaddw_s32( t_s64x2, vget_high_s32( t_s32x4 ) ); in silk_warped_autocorrelation_FIX_neon()
/external/llvm-project/clang/test/CodeGen/
Daarch64-neon-intrinsics.c6732 return vaddw_s32(a, b); in test_vaddw_s32()
Darm_neon_intrinsics.c731 return vaddw_s32(a, b); in test_vaddw_s32()
/external/clang/test/CodeGen/
Daarch64-neon-intrinsics.c7413 return vaddw_s32(a, b); in test_vaddw_s32()
Darm_neon_intrinsics.c852 return vaddw_s32(a, b); in test_vaddw_s32()
/external/neon_2_sse/
DNEON_2_SSE.h473 _NEON2SSESTORAGE int64x2_t vaddw_s32(int64x2_t a, int32x2_t b); // VADDW.S32 q0,q0,d0
2950 _NEON2SSESTORAGE int64x2_t vaddw_s32(int64x2_t a, int32x2_t b); // VADDW.S32 q0,q0,d0
2951 _NEON2SSE_INLINE int64x2_t vaddw_s32(int64x2_t a, int32x2_t b) // VADDW.S32 q0,q0,d0 in vaddw_s32() function