/external/XNNPACK/src/f32-dwconv2d-chw/gen/ |
D | 5x5p2-minmax-neon-4x4.c | 259 const float32x4_t vi4x6789 = vextq_f32(vi4x4567, vi4x89AB, 2); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_4x4() local 276 vo3p0 = vmlaq_lane_f32(vo3p0, vi4x6789, vget_high_f32(vw89AB), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_4x4() 280 vo2p0 = vmlaq_lane_f32(vo2p0, vi4x6789, vget_high_f32(vwCDEF), 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_4x4() 284 vo1p0 = vmlaq_lane_f32(vo1p0, vi4x6789, vget_low_f32(vwKLMN), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_4x4() 288 vo0p0 = vmlaq_lane_f32(vo0p0, vi4x6789, vwOP, 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_4x4() 477 const float32x4_t vi4x6789 = vextq_f32(vi4x4567, vi4x89AB, 2); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_4x4() local 494 vo3p0 = vmlaq_lane_f32(vo3p0, vi4x6789, vget_high_f32(vw89AB), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_4x4() 498 vo2p0 = vmlaq_lane_f32(vo2p0, vi4x6789, vget_high_f32(vwCDEF), 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_4x4() 502 vo1p0 = vmlaq_lane_f32(vo1p0, vi4x6789, vget_low_f32(vwKLMN), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_4x4() 506 vo0p0 = vmlaq_lane_f32(vo0p0, vi4x6789, vwOP, 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_4x4() [all …]
|
D | 5x5p2-minmax-neon-4x4-acc2.c | 259 const float32x4_t vi4x6789 = vextq_f32(vi4x4567, vi4x89AB, 2); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_4x4_acc2() local 276 vo3p1 = vmlaq_lane_f32(vo3p1, vi4x6789, vget_high_f32(vw89AB), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_4x4_acc2() 280 vo2p0 = vmlaq_lane_f32(vo2p0, vi4x6789, vget_high_f32(vwCDEF), 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_4x4_acc2() 284 vo1p1 = vmlaq_lane_f32(vo1p1, vi4x6789, vget_low_f32(vwKLMN), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_4x4_acc2() 288 vo0p0 = vmlaq_lane_f32(vo0p0, vi4x6789, vwOP, 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_4x4_acc2() 481 const float32x4_t vi4x6789 = vextq_f32(vi4x4567, vi4x89AB, 2); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_4x4_acc2() local 498 vo3p1 = vmlaq_lane_f32(vo3p1, vi4x6789, vget_high_f32(vw89AB), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_4x4_acc2() 502 vo2p0 = vmlaq_lane_f32(vo2p0, vi4x6789, vget_high_f32(vwCDEF), 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_4x4_acc2() 506 vo1p1 = vmlaq_lane_f32(vo1p1, vi4x6789, vget_low_f32(vwKLMN), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_4x4_acc2() 510 vo0p0 = vmlaq_lane_f32(vo0p0, vi4x6789, vwOP, 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_4x4_acc2() [all …]
|
D | 5x5p2-minmax-neonfma-4x4.c | 259 const float32x4_t vi4x6789 = vextq_f32(vi4x4567, vi4x89AB, 2); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_4x4() local 276 vo3p0 = vfmaq_lane_f32(vo3p0, vi4x6789, vget_high_f32(vw89AB), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_4x4() 280 vo2p0 = vfmaq_lane_f32(vo2p0, vi4x6789, vget_high_f32(vwCDEF), 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_4x4() 284 vo1p0 = vfmaq_lane_f32(vo1p0, vi4x6789, vget_low_f32(vwKLMN), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_4x4() 288 vo0p0 = vfmaq_lane_f32(vo0p0, vi4x6789, vwOP, 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_4x4() 477 const float32x4_t vi4x6789 = vextq_f32(vi4x4567, vi4x89AB, 2); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_4x4() local 494 vo3p0 = vfmaq_lane_f32(vo3p0, vi4x6789, vget_high_f32(vw89AB), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_4x4() 498 vo2p0 = vfmaq_lane_f32(vo2p0, vi4x6789, vget_high_f32(vwCDEF), 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_4x4() 502 vo1p0 = vfmaq_lane_f32(vo1p0, vi4x6789, vget_low_f32(vwKLMN), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_4x4() 506 vo0p0 = vfmaq_lane_f32(vo0p0, vi4x6789, vwOP, 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_4x4() [all …]
|
D | 5x5p2-minmax-neonfma-4x4-acc2.c | 259 const float32x4_t vi4x6789 = vextq_f32(vi4x4567, vi4x89AB, 2); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_4x4_acc2() local 276 vo3p1 = vfmaq_lane_f32(vo3p1, vi4x6789, vget_high_f32(vw89AB), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_4x4_acc2() 280 vo2p0 = vfmaq_lane_f32(vo2p0, vi4x6789, vget_high_f32(vwCDEF), 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_4x4_acc2() 284 vo1p1 = vfmaq_lane_f32(vo1p1, vi4x6789, vget_low_f32(vwKLMN), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_4x4_acc2() 288 vo0p0 = vfmaq_lane_f32(vo0p0, vi4x6789, vwOP, 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_4x4_acc2() 481 const float32x4_t vi4x6789 = vextq_f32(vi4x4567, vi4x89AB, 2); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_4x4_acc2() local 498 vo3p1 = vfmaq_lane_f32(vo3p1, vi4x6789, vget_high_f32(vw89AB), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_4x4_acc2() 502 vo2p0 = vfmaq_lane_f32(vo2p0, vi4x6789, vget_high_f32(vwCDEF), 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_4x4_acc2() 506 vo1p1 = vfmaq_lane_f32(vo1p1, vi4x6789, vget_low_f32(vwKLMN), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_4x4_acc2() 510 vo0p0 = vfmaq_lane_f32(vo0p0, vi4x6789, vwOP, 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_4x4_acc2() [all …]
|
D | 5x5p2-minmax-neonfma-1x4-acc2.c | 157 const float32x4_t vi4x6789 = vextq_f32(vi4x4567, vi4x89AB, 2); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_1x4_acc2() local 168 vo0p0 = vfmaq_lane_f32(vo0p0, vi4x6789, vwOP, 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_1x4_acc2() 265 const float32x4_t vi4x6789 = vextq_f32(vi4x4567, vi4x89AB, 2); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_1x4_acc2() local 276 vo0p0 = vfmaq_lane_f32(vo0p0, vi4x6789, vwOP, 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_1x4_acc2() 362 const float32x4_t vi4x6789 = vextq_f32(vi4x5678, vzero, 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_1x4_acc2() local 372 vo0p0 = vfmaq_lane_f32(vo0p0, vi4x6789, vwOP, 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_1x4_acc2()
|
D | 5x5p2-minmax-neonfma-1x4-acc3.c | 157 const float32x4_t vi4x6789 = vextq_f32(vi4x4567, vi4x89AB, 2); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_1x4_acc3() local 168 vo0p2 = vfmaq_lane_f32(vo0p2, vi4x6789, vwOP, 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_1x4_acc3() 266 const float32x4_t vi4x6789 = vextq_f32(vi4x4567, vi4x89AB, 2); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_1x4_acc3() local 277 vo0p2 = vfmaq_lane_f32(vo0p2, vi4x6789, vwOP, 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_1x4_acc3() 364 const float32x4_t vi4x6789 = vextq_f32(vi4x5678, vzero, 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_1x4_acc3() local 374 vo0p2 = vfmaq_lane_f32(vo0p2, vi4x6789, vwOP, 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_1x4_acc3()
|
D | 5x5p2-minmax-neon-1x4.c | 157 const float32x4_t vi4x6789 = vextq_f32(vi4x4567, vi4x89AB, 2); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_1x4() local 168 vo0p0 = vmlaq_lane_f32(vo0p0, vi4x6789, vwOP, 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_1x4() 264 const float32x4_t vi4x6789 = vextq_f32(vi4x4567, vi4x89AB, 2); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_1x4() local 275 vo0p0 = vmlaq_lane_f32(vo0p0, vi4x6789, vwOP, 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_1x4() 360 const float32x4_t vi4x6789 = vextq_f32(vi4x5678, vzero, 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_1x4() local 370 vo0p0 = vmlaq_lane_f32(vo0p0, vi4x6789, vwOP, 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_1x4()
|
D | 5x5p2-minmax-neonfma-1x4.c | 157 const float32x4_t vi4x6789 = vextq_f32(vi4x4567, vi4x89AB, 2); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_1x4() local 168 vo0p0 = vfmaq_lane_f32(vo0p0, vi4x6789, vwOP, 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_1x4() 264 const float32x4_t vi4x6789 = vextq_f32(vi4x4567, vi4x89AB, 2); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_1x4() local 275 vo0p0 = vfmaq_lane_f32(vo0p0, vi4x6789, vwOP, 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_1x4() 360 const float32x4_t vi4x6789 = vextq_f32(vi4x5678, vzero, 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_1x4() local 370 vo0p0 = vfmaq_lane_f32(vo0p0, vi4x6789, vwOP, 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_1x4()
|
D | 5x5p2-minmax-neon-1x4-acc3.c | 157 const float32x4_t vi4x6789 = vextq_f32(vi4x4567, vi4x89AB, 2); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_1x4_acc3() local 168 vo0p2 = vmlaq_lane_f32(vo0p2, vi4x6789, vwOP, 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_1x4_acc3() 266 const float32x4_t vi4x6789 = vextq_f32(vi4x4567, vi4x89AB, 2); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_1x4_acc3() local 277 vo0p2 = vmlaq_lane_f32(vo0p2, vi4x6789, vwOP, 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_1x4_acc3() 364 const float32x4_t vi4x6789 = vextq_f32(vi4x5678, vzero, 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_1x4_acc3() local 374 vo0p2 = vmlaq_lane_f32(vo0p2, vi4x6789, vwOP, 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_1x4_acc3()
|
D | 5x5p2-minmax-neon-1x4-acc2.c | 157 const float32x4_t vi4x6789 = vextq_f32(vi4x4567, vi4x89AB, 2); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_1x4_acc2() local 168 vo0p0 = vmlaq_lane_f32(vo0p0, vi4x6789, vwOP, 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_1x4_acc2() 265 const float32x4_t vi4x6789 = vextq_f32(vi4x4567, vi4x89AB, 2); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_1x4_acc2() local 276 vo0p0 = vmlaq_lane_f32(vo0p0, vi4x6789, vwOP, 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_1x4_acc2() 362 const float32x4_t vi4x6789 = vextq_f32(vi4x5678, vzero, 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_1x4_acc2() local 372 vo0p0 = vmlaq_lane_f32(vo0p0, vi4x6789, vwOP, 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_1x4_acc2()
|
D | 5x5p2-minmax-neon-2x4-acc2.c | 191 const float32x4_t vi4x6789 = vextq_f32(vi4x4567, vi4x89AB, 2); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_2x4_acc2() local 206 vo1p1 = vmlaq_lane_f32(vo1p1, vi4x6789, vget_low_f32(vwKLMN), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_2x4_acc2() 208 vo0p0 = vmlaq_lane_f32(vo0p0, vi4x6789, vwOP, 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_2x4_acc2() 337 const float32x4_t vi4x6789 = vextq_f32(vi4x4567, vi4x89AB, 2); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_2x4_acc2() local 352 vo1p1 = vmlaq_lane_f32(vo1p1, vi4x6789, vget_low_f32(vwKLMN), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_2x4_acc2() 354 vo0p0 = vmlaq_lane_f32(vo0p0, vi4x6789, vwOP, 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_2x4_acc2() 470 const float32x4_t vi4x6789 = vextq_f32(vi4x5678, vzero, 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_2x4_acc2() local 483 vo1p1 = vmlaq_lane_f32(vo1p1, vi4x6789, vget_low_f32(vwKLMN), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_2x4_acc2() 485 vo0p0 = vmlaq_lane_f32(vo0p0, vi4x6789, vwOP, 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_2x4_acc2()
|
D | 5x5p2-minmax-neonfma-2x4.c | 191 const float32x4_t vi4x6789 = vextq_f32(vi4x4567, vi4x89AB, 2); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_2x4() local 206 vo1p0 = vfmaq_lane_f32(vo1p0, vi4x6789, vget_low_f32(vwKLMN), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_2x4() 208 vo0p0 = vfmaq_lane_f32(vo0p0, vi4x6789, vwOP, 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_2x4() 335 const float32x4_t vi4x6789 = vextq_f32(vi4x4567, vi4x89AB, 2); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_2x4() local 350 vo1p0 = vfmaq_lane_f32(vo1p0, vi4x6789, vget_low_f32(vwKLMN), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_2x4() 352 vo0p0 = vfmaq_lane_f32(vo0p0, vi4x6789, vwOP, 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_2x4() 466 const float32x4_t vi4x6789 = vextq_f32(vi4x5678, vzero, 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_2x4() local 479 vo1p0 = vfmaq_lane_f32(vo1p0, vi4x6789, vget_low_f32(vwKLMN), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_2x4() 481 vo0p0 = vfmaq_lane_f32(vo0p0, vi4x6789, vwOP, 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_2x4()
|
D | 5x5p2-minmax-neon-2x4.c | 191 const float32x4_t vi4x6789 = vextq_f32(vi4x4567, vi4x89AB, 2); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_2x4() local 206 vo1p0 = vmlaq_lane_f32(vo1p0, vi4x6789, vget_low_f32(vwKLMN), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_2x4() 208 vo0p0 = vmlaq_lane_f32(vo0p0, vi4x6789, vwOP, 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_2x4() 335 const float32x4_t vi4x6789 = vextq_f32(vi4x4567, vi4x89AB, 2); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_2x4() local 350 vo1p0 = vmlaq_lane_f32(vo1p0, vi4x6789, vget_low_f32(vwKLMN), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_2x4() 352 vo0p0 = vmlaq_lane_f32(vo0p0, vi4x6789, vwOP, 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_2x4() 466 const float32x4_t vi4x6789 = vextq_f32(vi4x5678, vzero, 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_2x4() local 479 vo1p0 = vmlaq_lane_f32(vo1p0, vi4x6789, vget_low_f32(vwKLMN), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_2x4() 481 vo0p0 = vmlaq_lane_f32(vo0p0, vi4x6789, vwOP, 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_2x4()
|
D | 5x5p2-minmax-neonfma-2x4-acc2.c | 191 const float32x4_t vi4x6789 = vextq_f32(vi4x4567, vi4x89AB, 2); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_2x4_acc2() local 206 vo1p1 = vfmaq_lane_f32(vo1p1, vi4x6789, vget_low_f32(vwKLMN), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_2x4_acc2() 208 vo0p0 = vfmaq_lane_f32(vo0p0, vi4x6789, vwOP, 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_2x4_acc2() 337 const float32x4_t vi4x6789 = vextq_f32(vi4x4567, vi4x89AB, 2); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_2x4_acc2() local 352 vo1p1 = vfmaq_lane_f32(vo1p1, vi4x6789, vget_low_f32(vwKLMN), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_2x4_acc2() 354 vo0p0 = vfmaq_lane_f32(vo0p0, vi4x6789, vwOP, 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_2x4_acc2() 470 const float32x4_t vi4x6789 = vextq_f32(vi4x5678, vzero, 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_2x4_acc2() local 483 vo1p1 = vfmaq_lane_f32(vo1p1, vi4x6789, vget_low_f32(vwKLMN), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_2x4_acc2() 485 vo0p0 = vfmaq_lane_f32(vo0p0, vi4x6789, vwOP, 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_2x4_acc2()
|
D | 5x5p2-minmax-neonfma-3x4.c | 225 const float32x4_t vi4x6789 = vextq_f32(vi4x4567, vi4x89AB, 2); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_3x4() local 242 vo2p0 = vfmaq_lane_f32(vo2p0, vi4x6789, vget_high_f32(vwCDEF), 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_3x4() 245 vo1p0 = vfmaq_lane_f32(vo1p0, vi4x6789, vget_low_f32(vwKLMN), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_3x4() 248 vo0p0 = vfmaq_lane_f32(vo0p0, vi4x6789, vwOP, 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_3x4() 406 const float32x4_t vi4x6789 = vextq_f32(vi4x4567, vi4x89AB, 2); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_3x4() local 423 vo2p0 = vfmaq_lane_f32(vo2p0, vi4x6789, vget_high_f32(vwCDEF), 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_3x4() 426 vo1p0 = vfmaq_lane_f32(vo1p0, vi4x6789, vget_low_f32(vwKLMN), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_3x4() 429 vo0p0 = vfmaq_lane_f32(vo0p0, vi4x6789, vwOP, 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_3x4() 572 const float32x4_t vi4x6789 = vextq_f32(vi4x5678, vzero, 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_3x4() local 586 vo2p0 = vfmaq_lane_f32(vo2p0, vi4x6789, vget_high_f32(vwCDEF), 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_3x4() [all …]
|
D | 5x5p2-minmax-wasmsimd-arm-loadsplat-3x4.c | 254 const v128_t vi4x6789 = wasm_v32x4_shuffle(vi4x4567, vi4x89AB, 2, 3, 4, 5); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_3x4() local 271 vo2p0 = wasm_f32x4_add(vo2p0, wasm_f32x4_mul(vi4x6789, vk24)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_3x4() 274 vo1p0 = wasm_f32x4_add(vo1p0, wasm_f32x4_mul(vi4x6789, vk34)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_3x4() 277 vo0p0 = wasm_f32x4_add(vo0p0, wasm_f32x4_mul(vi4x6789, vk44)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_3x4() 434 const v128_t vi4x6789 = wasm_v32x4_shuffle(vi4x4567, vi4x89AB, 2, 3, 4, 5); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_3x4() local 451 vo2p0 = wasm_f32x4_add(vo2p0, wasm_f32x4_mul(vi4x6789, vk24)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_3x4() 454 vo1p0 = wasm_f32x4_add(vo1p0, wasm_f32x4_mul(vi4x6789, vk34)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_3x4() 457 vo0p0 = wasm_f32x4_add(vo0p0, wasm_f32x4_mul(vi4x6789, vk44)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_3x4() 598 const v128_t vi4x6789 = wasm_v32x4_shuffle(vi4x5678, vzero, 1, 2, 3, 4); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_3x4() local 612 vo2p0 = wasm_f32x4_add(vo2p0, wasm_f32x4_mul(vi4x6789, vk24)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_3x4() [all …]
|
D | 5x5p2-minmax-wasmsimd-arm-loadsplat-3x4-acc2.c | 254 const v128_t vi4x6789 = wasm_v32x4_shuffle(vi4x4567, vi4x89AB, 2, 3, 4, 5); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_3x4_acc2() local 271 vo2p0 = wasm_f32x4_add(vo2p0, wasm_f32x4_mul(vi4x6789, vk24)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_3x4_acc2() 274 vo1p1 = wasm_f32x4_add(vo1p1, wasm_f32x4_mul(vi4x6789, vk34)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_3x4_acc2() 277 vo0p0 = wasm_f32x4_add(vo0p0, wasm_f32x4_mul(vi4x6789, vk44)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_3x4_acc2() 437 const v128_t vi4x6789 = wasm_v32x4_shuffle(vi4x4567, vi4x89AB, 2, 3, 4, 5); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_3x4_acc2() local 454 vo2p0 = wasm_f32x4_add(vo2p0, wasm_f32x4_mul(vi4x6789, vk24)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_3x4_acc2() 457 vo1p1 = wasm_f32x4_add(vo1p1, wasm_f32x4_mul(vi4x6789, vk34)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_3x4_acc2() 460 vo0p0 = wasm_f32x4_add(vo0p0, wasm_f32x4_mul(vi4x6789, vk44)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_3x4_acc2() 604 const v128_t vi4x6789 = wasm_v32x4_shuffle(vi4x5678, vzero, 1, 2, 3, 4); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_3x4_acc2() local 618 vo2p0 = wasm_f32x4_add(vo2p0, wasm_f32x4_mul(vi4x6789, vk24)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_3x4_acc2() [all …]
|
D | 5x5p2-minmax-wasmsimd-x86-loadsplat-3x4.c | 254 const v128_t vi4x6789 = wasm_v32x4_shuffle(vi4x4567, vi4x89AB, 2, 3, 4, 5); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_3x4() local 271 vo2p0 = wasm_f32x4_add(vo2p0, wasm_f32x4_mul(vi4x6789, vk24)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_3x4() 274 vo1p0 = wasm_f32x4_add(vo1p0, wasm_f32x4_mul(vi4x6789, vk34)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_3x4() 277 vo0p0 = wasm_f32x4_add(vo0p0, wasm_f32x4_mul(vi4x6789, vk44)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_3x4() 434 const v128_t vi4x6789 = wasm_v32x4_shuffle(vi4x4567, vi4x89AB, 2, 3, 4, 5); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_3x4() local 451 vo2p0 = wasm_f32x4_add(vo2p0, wasm_f32x4_mul(vi4x6789, vk24)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_3x4() 454 vo1p0 = wasm_f32x4_add(vo1p0, wasm_f32x4_mul(vi4x6789, vk34)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_3x4() 457 vo0p0 = wasm_f32x4_add(vo0p0, wasm_f32x4_mul(vi4x6789, vk44)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_3x4() 598 const v128_t vi4x6789 = wasm_v32x4_shuffle(vi4x5678, vzero, 1, 2, 3, 4); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_3x4() local 612 vo2p0 = wasm_f32x4_add(vo2p0, wasm_f32x4_mul(vi4x6789, vk24)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_3x4() [all …]
|
D | 5x5p2-minmax-neon-3x4-acc2.c | 225 const float32x4_t vi4x6789 = vextq_f32(vi4x4567, vi4x89AB, 2); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_3x4_acc2() local 242 vo2p0 = vmlaq_lane_f32(vo2p0, vi4x6789, vget_high_f32(vwCDEF), 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_3x4_acc2() 245 vo1p1 = vmlaq_lane_f32(vo1p1, vi4x6789, vget_low_f32(vwKLMN), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_3x4_acc2() 248 vo0p0 = vmlaq_lane_f32(vo0p0, vi4x6789, vwOP, 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_3x4_acc2() 409 const float32x4_t vi4x6789 = vextq_f32(vi4x4567, vi4x89AB, 2); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_3x4_acc2() local 426 vo2p0 = vmlaq_lane_f32(vo2p0, vi4x6789, vget_high_f32(vwCDEF), 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_3x4_acc2() 429 vo1p1 = vmlaq_lane_f32(vo1p1, vi4x6789, vget_low_f32(vwKLMN), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_3x4_acc2() 432 vo0p0 = vmlaq_lane_f32(vo0p0, vi4x6789, vwOP, 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_3x4_acc2() 578 const float32x4_t vi4x6789 = vextq_f32(vi4x5678, vzero, 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_3x4_acc2() local 592 vo2p0 = vmlaq_lane_f32(vo2p0, vi4x6789, vget_high_f32(vwCDEF), 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_3x4_acc2() [all …]
|
D | 5x5p2-minmax-neon-3x4.c | 225 const float32x4_t vi4x6789 = vextq_f32(vi4x4567, vi4x89AB, 2); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_3x4() local 242 vo2p0 = vmlaq_lane_f32(vo2p0, vi4x6789, vget_high_f32(vwCDEF), 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_3x4() 245 vo1p0 = vmlaq_lane_f32(vo1p0, vi4x6789, vget_low_f32(vwKLMN), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_3x4() 248 vo0p0 = vmlaq_lane_f32(vo0p0, vi4x6789, vwOP, 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_3x4() 406 const float32x4_t vi4x6789 = vextq_f32(vi4x4567, vi4x89AB, 2); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_3x4() local 423 vo2p0 = vmlaq_lane_f32(vo2p0, vi4x6789, vget_high_f32(vwCDEF), 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_3x4() 426 vo1p0 = vmlaq_lane_f32(vo1p0, vi4x6789, vget_low_f32(vwKLMN), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_3x4() 429 vo0p0 = vmlaq_lane_f32(vo0p0, vi4x6789, vwOP, 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_3x4() 572 const float32x4_t vi4x6789 = vextq_f32(vi4x5678, vzero, 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_3x4() local 586 vo2p0 = vmlaq_lane_f32(vo2p0, vi4x6789, vget_high_f32(vwCDEF), 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_3x4() [all …]
|
D | 5x5p2-minmax-neonfma-3x4-acc2.c | 225 const float32x4_t vi4x6789 = vextq_f32(vi4x4567, vi4x89AB, 2); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_3x4_acc2() local 242 vo2p0 = vfmaq_lane_f32(vo2p0, vi4x6789, vget_high_f32(vwCDEF), 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_3x4_acc2() 245 vo1p1 = vfmaq_lane_f32(vo1p1, vi4x6789, vget_low_f32(vwKLMN), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_3x4_acc2() 248 vo0p0 = vfmaq_lane_f32(vo0p0, vi4x6789, vwOP, 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_3x4_acc2() 409 const float32x4_t vi4x6789 = vextq_f32(vi4x4567, vi4x89AB, 2); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_3x4_acc2() local 426 vo2p0 = vfmaq_lane_f32(vo2p0, vi4x6789, vget_high_f32(vwCDEF), 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_3x4_acc2() 429 vo1p1 = vfmaq_lane_f32(vo1p1, vi4x6789, vget_low_f32(vwKLMN), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_3x4_acc2() 432 vo0p0 = vfmaq_lane_f32(vo0p0, vi4x6789, vwOP, 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_3x4_acc2() 578 const float32x4_t vi4x6789 = vextq_f32(vi4x5678, vzero, 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_3x4_acc2() local 592 vo2p0 = vfmaq_lane_f32(vo2p0, vi4x6789, vget_high_f32(vwCDEF), 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_3x4_acc2() [all …]
|
D | 5x5p2-minmax-wasmsimd-x86-loadsplat-5x4.c | 322 const v128_t vi4x6789 = wasm_v32x4_shuffle(vi4x4567, vi4x89AB, 2, 3, 4, 5); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_5x4() local 337 vo4p0 = wasm_f32x4_add(vo4p0, wasm_f32x4_mul(vi4x6789, vk04)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_5x4() 342 vo3p0 = wasm_f32x4_add(vo3p0, wasm_f32x4_mul(vi4x6789, vk14)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_5x4() 347 vo2p0 = wasm_f32x4_add(vo2p0, wasm_f32x4_mul(vi4x6789, vk24)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_5x4() 352 vo1p0 = wasm_f32x4_add(vo1p0, wasm_f32x4_mul(vi4x6789, vk34)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_5x4() 357 vo0p0 = wasm_f32x4_add(vo0p0, wasm_f32x4_mul(vi4x6789, vk44)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_5x4() 576 const v128_t vi4x6789 = wasm_v32x4_shuffle(vi4x4567, vi4x89AB, 2, 3, 4, 5); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_5x4() local 591 vo4p0 = wasm_f32x4_add(vo4p0, wasm_f32x4_mul(vi4x6789, vk04)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_5x4() 596 vo3p0 = wasm_f32x4_add(vo3p0, wasm_f32x4_mul(vi4x6789, vk14)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_5x4() 601 vo2p0 = wasm_f32x4_add(vo2p0, wasm_f32x4_mul(vi4x6789, vk24)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_x86_loadsplat_5x4() [all …]
|
D | 5x5p2-minmax-neonfma-5x4.c | 293 const float32x4_t vi4x6789 = vextq_f32(vi4x4567, vi4x89AB, 2); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_5x4() local 308 vo4p0 = vfmaq_lane_f32(vo4p0, vi4x6789, vget_low_f32(vw4567), 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_5x4() 313 vo3p0 = vfmaq_lane_f32(vo3p0, vi4x6789, vget_high_f32(vw89AB), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_5x4() 318 vo2p0 = vfmaq_lane_f32(vo2p0, vi4x6789, vget_high_f32(vwCDEF), 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_5x4() 323 vo1p0 = vfmaq_lane_f32(vo1p0, vi4x6789, vget_low_f32(vwKLMN), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_5x4() 328 vo0p0 = vfmaq_lane_f32(vo0p0, vi4x6789, vwOP, 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_5x4() 548 const float32x4_t vi4x6789 = vextq_f32(vi4x4567, vi4x89AB, 2); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_5x4() local 563 vo4p0 = vfmaq_lane_f32(vo4p0, vi4x6789, vget_low_f32(vw4567), 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_5x4() 568 vo3p0 = vfmaq_lane_f32(vo3p0, vi4x6789, vget_high_f32(vw89AB), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_5x4() 573 vo2p0 = vfmaq_lane_f32(vo2p0, vi4x6789, vget_high_f32(vwCDEF), 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neonfma_5x4() [all …]
|
D | 5x5p2-minmax-neon-5x4.c | 293 const float32x4_t vi4x6789 = vextq_f32(vi4x4567, vi4x89AB, 2); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_5x4() local 308 vo4p0 = vmlaq_lane_f32(vo4p0, vi4x6789, vget_low_f32(vw4567), 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_5x4() 313 vo3p0 = vmlaq_lane_f32(vo3p0, vi4x6789, vget_high_f32(vw89AB), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_5x4() 318 vo2p0 = vmlaq_lane_f32(vo2p0, vi4x6789, vget_high_f32(vwCDEF), 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_5x4() 323 vo1p0 = vmlaq_lane_f32(vo1p0, vi4x6789, vget_low_f32(vwKLMN), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_5x4() 328 vo0p0 = vmlaq_lane_f32(vo0p0, vi4x6789, vwOP, 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_5x4() 548 const float32x4_t vi4x6789 = vextq_f32(vi4x4567, vi4x89AB, 2); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_5x4() local 563 vo4p0 = vmlaq_lane_f32(vo4p0, vi4x6789, vget_low_f32(vw4567), 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_5x4() 568 vo3p0 = vmlaq_lane_f32(vo3p0, vi4x6789, vget_high_f32(vw89AB), 0); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_5x4() 573 vo2p0 = vmlaq_lane_f32(vo2p0, vi4x6789, vget_high_f32(vwCDEF), 1); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__neon_5x4() [all …]
|
D | 5x5p2-minmax-wasmsimd-arm-loadsplat-5x4.c | 322 const v128_t vi4x6789 = wasm_v32x4_shuffle(vi4x4567, vi4x89AB, 2, 3, 4, 5); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_5x4() local 337 vo4p0 = wasm_f32x4_add(vo4p0, wasm_f32x4_mul(vi4x6789, vk04)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_5x4() 342 vo3p0 = wasm_f32x4_add(vo3p0, wasm_f32x4_mul(vi4x6789, vk14)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_5x4() 347 vo2p0 = wasm_f32x4_add(vo2p0, wasm_f32x4_mul(vi4x6789, vk24)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_5x4() 352 vo1p0 = wasm_f32x4_add(vo1p0, wasm_f32x4_mul(vi4x6789, vk34)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_5x4() 357 vo0p0 = wasm_f32x4_add(vo0p0, wasm_f32x4_mul(vi4x6789, vk44)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_5x4() 576 const v128_t vi4x6789 = wasm_v32x4_shuffle(vi4x4567, vi4x89AB, 2, 3, 4, 5); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_5x4() local 591 vo4p0 = wasm_f32x4_add(vo4p0, wasm_f32x4_mul(vi4x6789, vk04)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_5x4() 596 vo3p0 = wasm_f32x4_add(vo3p0, wasm_f32x4_mul(vi4x6789, vk14)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_5x4() 601 vo2p0 = wasm_f32x4_add(vo2p0, wasm_f32x4_mul(vi4x6789, vk24)); in xnn_f32_dwconv2d_chw_ukernel_5x5p2__wasmsimd_arm_loadsplat_5x4() [all …]
|