Home
last modified time | relevance | path

Searched refs:vk21c0 (Results 1 – 4 of 4) sorted by relevance

/external/XNNPACK/src/f32-conv-hwc2chw/
D3x3s2p1c3x4-neonfma-2x2.c200 const float32x4_t vk21c0 = vld1q_f32(w + 48); in xnn_f32_conv_hwc2chw_ukernel_3x3s2p1c3x4__neonfma_2x2() local
202 vo0x0 = vfmaq_laneq_f32(vo0x0, vk21c0, vi2x1, 0); in xnn_f32_conv_hwc2chw_ukernel_3x3s2p1c3x4__neonfma_2x2()
203 vo1x0 = vfmaq_laneq_f32(vo1x0, vk21c0, vi4x1, 0); in xnn_f32_conv_hwc2chw_ukernel_3x3s2p1c3x4__neonfma_2x2()
204 vo0x1 = vfmaq_laneq_f32(vo0x1, vk21c0, vi2x2, 2); in xnn_f32_conv_hwc2chw_ukernel_3x3s2p1c3x4__neonfma_2x2()
205 vo1x1 = vfmaq_laneq_f32(vo1x1, vk21c0, vi4x2, 2); in xnn_f32_conv_hwc2chw_ukernel_3x3s2p1c3x4__neonfma_2x2()
466 const float32x4_t vk21c0 = vld1q_f32(w + 48); in xnn_f32_conv_hwc2chw_ukernel_3x3s2p1c3x4__neonfma_2x2() local
468 vo0x0 = vfmaq_laneq_f32(vo0x0, vk21c0, vi2x1, 0); in xnn_f32_conv_hwc2chw_ukernel_3x3s2p1c3x4__neonfma_2x2()
469 vo1x0 = vfmaq_laneq_f32(vo1x0, vk21c0, vi4x1, 0); in xnn_f32_conv_hwc2chw_ukernel_3x3s2p1c3x4__neonfma_2x2()
471 vo0x1 = vfmaq_laneq_f32(vo0x1, vk21c0, vi2x2, 2); in xnn_f32_conv_hwc2chw_ukernel_3x3s2p1c3x4__neonfma_2x2()
472 vo1x1 = vfmaq_laneq_f32(vo1x1, vk21c0, vi4x2, 2); in xnn_f32_conv_hwc2chw_ukernel_3x3s2p1c3x4__neonfma_2x2()
D3x3s2p1c3x4-neon-2x2.c200 const float32x4_t vk21c0 = vld1q_f32(w + 48); in xnn_f32_conv_hwc2chw_ukernel_3x3s2p1c3x4__neon_2x2() local
202 vo0x0 = vmlaq_lane_f32(vo0x0, vk21c0, vget_low_f32(vi2x1), 0); in xnn_f32_conv_hwc2chw_ukernel_3x3s2p1c3x4__neon_2x2()
203 vo1x0 = vmlaq_lane_f32(vo1x0, vk21c0, vget_low_f32(vi4x1), 0); in xnn_f32_conv_hwc2chw_ukernel_3x3s2p1c3x4__neon_2x2()
204 vo0x1 = vmlaq_lane_f32(vo0x1, vk21c0, vget_high_f32(vi2x2), 0); in xnn_f32_conv_hwc2chw_ukernel_3x3s2p1c3x4__neon_2x2()
205 vo1x1 = vmlaq_lane_f32(vo1x1, vk21c0, vget_high_f32(vi4x2), 0); in xnn_f32_conv_hwc2chw_ukernel_3x3s2p1c3x4__neon_2x2()
464 const float32x4_t vk21c0 = vld1q_f32(w + 48); in xnn_f32_conv_hwc2chw_ukernel_3x3s2p1c3x4__neon_2x2() local
466 vo0x0 = vmlaq_lane_f32(vo0x0, vk21c0, vget_low_f32(vi2x1), 0); in xnn_f32_conv_hwc2chw_ukernel_3x3s2p1c3x4__neon_2x2()
467 vo1x0 = vmlaq_lane_f32(vo1x0, vk21c0, vget_low_f32(vi4x1), 0); in xnn_f32_conv_hwc2chw_ukernel_3x3s2p1c3x4__neon_2x2()
469 vo0x1 = vmlaq_lane_f32(vo0x1, vk21c0, vget_high_f32(vi2x2), 0); in xnn_f32_conv_hwc2chw_ukernel_3x3s2p1c3x4__neon_2x2()
470 vo1x1 = vmlaq_lane_f32(vo1x1, vk21c0, vget_high_f32(vi4x2), 0); in xnn_f32_conv_hwc2chw_ukernel_3x3s2p1c3x4__neon_2x2()
D3x3s2p1c3x4-wasmsimd-2x2.c201 const v128_t vk21c0 = wasm_v128_load(w + 48); in xnn_f32_conv_hwc2chw_ukernel_3x3s2p1c3x4__wasmsimd_2x2() local
203 …vo0x0 = wasm_f32x4_add(vo0x0, wasm_f32x4_mul(vk21c0, wasm_v32x4_shuffle(vi2x1, vi2x1, 0, 0, 0, 0))… in xnn_f32_conv_hwc2chw_ukernel_3x3s2p1c3x4__wasmsimd_2x2()
204 …vo1x0 = wasm_f32x4_add(vo1x0, wasm_f32x4_mul(vk21c0, wasm_v32x4_shuffle(vi4x1, vi4x1, 0, 0, 0, 0))… in xnn_f32_conv_hwc2chw_ukernel_3x3s2p1c3x4__wasmsimd_2x2()
205 …vo0x1 = wasm_f32x4_add(vo0x1, wasm_f32x4_mul(vk21c0, wasm_v32x4_shuffle(vi2x2, vi2x2, 2, 2, 2, 2))… in xnn_f32_conv_hwc2chw_ukernel_3x3s2p1c3x4__wasmsimd_2x2()
206 …vo1x1 = wasm_f32x4_add(vo1x1, wasm_f32x4_mul(vk21c0, wasm_v32x4_shuffle(vi4x2, vi4x2, 2, 2, 2, 2))… in xnn_f32_conv_hwc2chw_ukernel_3x3s2p1c3x4__wasmsimd_2x2()
466 const v128_t vk21c0 = wasm_v128_load(w + 48); in xnn_f32_conv_hwc2chw_ukernel_3x3s2p1c3x4__wasmsimd_2x2() local
468 …vo0x0 = wasm_f32x4_add(vo0x0, wasm_f32x4_mul(vk21c0, wasm_v32x4_shuffle(vi2x1, vi2x1, 0, 0, 0, 0))… in xnn_f32_conv_hwc2chw_ukernel_3x3s2p1c3x4__wasmsimd_2x2()
469 …vo1x0 = wasm_f32x4_add(vo1x0, wasm_f32x4_mul(vk21c0, wasm_v32x4_shuffle(vi4x1, vi4x1, 0, 0, 0, 0))… in xnn_f32_conv_hwc2chw_ukernel_3x3s2p1c3x4__wasmsimd_2x2()
471 …vo0x1 = wasm_f32x4_add(vo0x1, wasm_f32x4_mul(vk21c0, wasm_v32x4_shuffle(vi2x2, vi2x2, 2, 2, 2, 2))… in xnn_f32_conv_hwc2chw_ukernel_3x3s2p1c3x4__wasmsimd_2x2()
472 …vo1x1 = wasm_f32x4_add(vo1x1, wasm_f32x4_mul(vk21c0, wasm_v32x4_shuffle(vi4x2, vi4x2, 2, 2, 2, 2))… in xnn_f32_conv_hwc2chw_ukernel_3x3s2p1c3x4__wasmsimd_2x2()
D3x3s2p1c3x4-sse-2x2.c200 const __m128 vk21c0 = _mm_load_ps(w + 48); in xnn_f32_conv_hwc2chw_ukernel_3x3s2p1c3x4__sse_2x2() local
202 …vo0x0 = _mm_add_ps(vo0x0, _mm_mul_ps(vk21c0, _mm_shuffle_ps(vi2x1, vi2x1, _MM_SHUFFLE(0, 0, 0, 0))… in xnn_f32_conv_hwc2chw_ukernel_3x3s2p1c3x4__sse_2x2()
203 …vo1x0 = _mm_add_ps(vo1x0, _mm_mul_ps(vk21c0, _mm_shuffle_ps(vi4x1, vi4x1, _MM_SHUFFLE(0, 0, 0, 0))… in xnn_f32_conv_hwc2chw_ukernel_3x3s2p1c3x4__sse_2x2()
204 …vo0x1 = _mm_add_ps(vo0x1, _mm_mul_ps(vk21c0, _mm_shuffle_ps(vi2x2, vi2x2, _MM_SHUFFLE(2, 2, 2, 2))… in xnn_f32_conv_hwc2chw_ukernel_3x3s2p1c3x4__sse_2x2()
205 …vo1x1 = _mm_add_ps(vo1x1, _mm_mul_ps(vk21c0, _mm_shuffle_ps(vi4x2, vi4x2, _MM_SHUFFLE(2, 2, 2, 2))… in xnn_f32_conv_hwc2chw_ukernel_3x3s2p1c3x4__sse_2x2()
466 const __m128 vk21c0 = _mm_load_ps(w + 48); in xnn_f32_conv_hwc2chw_ukernel_3x3s2p1c3x4__sse_2x2() local
468 …vo0x0 = _mm_add_ps(vo0x0, _mm_mul_ps(vk21c0, _mm_shuffle_ps(vi2x1, vi2x1, _MM_SHUFFLE(0, 0, 0, 0))… in xnn_f32_conv_hwc2chw_ukernel_3x3s2p1c3x4__sse_2x2()
469 …vo1x0 = _mm_add_ps(vo1x0, _mm_mul_ps(vk21c0, _mm_shuffle_ps(vi4x1, vi4x1, _MM_SHUFFLE(0, 0, 0, 0))… in xnn_f32_conv_hwc2chw_ukernel_3x3s2p1c3x4__sse_2x2()
471 …vo0x1 = _mm_add_ps(vo0x1, _mm_mul_ps(vk21c0, _mm_shuffle_ps(vi2x2, vi2x2, _MM_SHUFFLE(2, 2, 2, 2))… in xnn_f32_conv_hwc2chw_ukernel_3x3s2p1c3x4__sse_2x2()
472 …vo1x1 = _mm_add_ps(vo1x1, _mm_mul_ps(vk21c0, _mm_shuffle_ps(vi4x2, vi4x2, _MM_SHUFFLE(2, 2, 2, 2))… in xnn_f32_conv_hwc2chw_ukernel_3x3s2p1c3x4__sse_2x2()