/external/XNNPACK/src/f32-vbinary/gen/ |
D | vminc-wasmsimd-x86-x16.c | 35 const v128_t va89AB = wasm_v128_load(a + 8); in xnn_f32_vminc_ukernel__wasmsimd_x86_x16() local 41 const v128_t vm89AB = wasm_f32x4_lt(va89AB, vb); in xnn_f32_vminc_ukernel__wasmsimd_x86_x16() 46 v128_t vy89AB = wasm_v128_bitselect(va89AB, vb, vm89AB); in xnn_f32_vminc_ukernel__wasmsimd_x86_x16()
|
D | vmaxc-wasmsimd-x86-x16.c | 35 const v128_t va89AB = wasm_v128_load(a + 8); in xnn_f32_vmaxc_ukernel__wasmsimd_x86_x16() local 41 const v128_t vm89AB = wasm_f32x4_le(va89AB, vb); in xnn_f32_vmaxc_ukernel__wasmsimd_x86_x16() 46 v128_t vy89AB = wasm_v128_bitselect(vb, va89AB, vm89AB); in xnn_f32_vmaxc_ukernel__wasmsimd_x86_x16()
|
D | vmax-wasmsimd-x86-x16.c | 35 const v128_t va89AB = wasm_v128_load(a + 8); in xnn_f32_vmax_ukernel__wasmsimd_x86_x16() local 47 const v128_t vm89AB = wasm_f32x4_le(va89AB, vb89AB); in xnn_f32_vmax_ukernel__wasmsimd_x86_x16() 52 v128_t vy89AB = wasm_v128_bitselect(vb89AB, va89AB, vm89AB); in xnn_f32_vmax_ukernel__wasmsimd_x86_x16()
|
D | vmin-wasmsimd-x86-x16.c | 35 const v128_t va89AB = wasm_v128_load(a + 8); in xnn_f32_vmin_ukernel__wasmsimd_x86_x16() local 47 const v128_t vm89AB = wasm_f32x4_lt(va89AB, vb89AB); in xnn_f32_vmin_ukernel__wasmsimd_x86_x16() 52 v128_t vy89AB = wasm_v128_bitselect(va89AB, vb89AB, vm89AB); in xnn_f32_vmin_ukernel__wasmsimd_x86_x16()
|
D | vmulc-wasmsimd-x16.c | 35 const v128_t va89AB = wasm_v128_load(a + 8); in xnn_f32_vmulc_ukernel__wasmsimd_x16() local 41 v128_t vy89AB = wasm_f32x4_mul(va89AB, vb); in xnn_f32_vmulc_ukernel__wasmsimd_x16()
|
D | vmaxc-wasmsimd-arm-x16.c | 35 const v128_t va89AB = wasm_v128_load(a + 8); in xnn_f32_vmaxc_ukernel__wasmsimd_arm_x16() local 41 v128_t vy89AB = wasm_f32x4_max(va89AB, vb); in xnn_f32_vmaxc_ukernel__wasmsimd_arm_x16()
|
D | vrsubc-wasmsimd-x16.c | 35 const v128_t va89AB = wasm_v128_load(a + 8); in xnn_f32_vrsubc_ukernel__wasmsimd_x16() local 41 v128_t vy89AB = wasm_f32x4_sub(vb, va89AB); in xnn_f32_vrsubc_ukernel__wasmsimd_x16()
|
D | vminc-wasmsimd-arm-x16.c | 35 const v128_t va89AB = wasm_v128_load(a + 8); in xnn_f32_vminc_ukernel__wasmsimd_arm_x16() local 41 v128_t vy89AB = wasm_f32x4_min(va89AB, vb); in xnn_f32_vminc_ukernel__wasmsimd_arm_x16()
|
D | vdivc-wasmsimd-x16.c | 35 const v128_t va89AB = wasm_v128_load(a + 8); in xnn_f32_vdivc_ukernel__wasmsimd_x16() local 41 v128_t vy89AB = wasm_f32x4_div(va89AB, vb); in xnn_f32_vdivc_ukernel__wasmsimd_x16()
|
D | vsubc-wasmsimd-x16.c | 35 const v128_t va89AB = wasm_v128_load(a + 8); in xnn_f32_vsubc_ukernel__wasmsimd_x16() local 41 v128_t vy89AB = wasm_f32x4_sub(va89AB, vb); in xnn_f32_vsubc_ukernel__wasmsimd_x16()
|
D | vaddc-wasmsimd-x16.c | 35 const v128_t va89AB = wasm_v128_load(a + 8); in xnn_f32_vaddc_ukernel__wasmsimd_x16() local 41 v128_t vy89AB = wasm_f32x4_add(va89AB, vb); in xnn_f32_vaddc_ukernel__wasmsimd_x16()
|
D | vrdivc-wasmsimd-x16.c | 35 const v128_t va89AB = wasm_v128_load(a + 8); in xnn_f32_vrdivc_ukernel__wasmsimd_x16() local 41 v128_t vy89AB = wasm_f32x4_div(vb, va89AB); in xnn_f32_vrdivc_ukernel__wasmsimd_x16()
|
D | vsub-wasmsimd-x16.c | 35 const v128_t va89AB = wasm_v128_load(a + 8); in xnn_f32_vsub_ukernel__wasmsimd_x16() local 47 v128_t vy89AB = wasm_f32x4_sub(va89AB, vb89AB); in xnn_f32_vsub_ukernel__wasmsimd_x16()
|
D | vsqrdiffc-wasmsimd-x16.c | 35 const v128_t va89AB = wasm_v128_load(a + 8); in xnn_f32_vsqrdiffc_ukernel__wasmsimd_x16() local 41 v128_t vy89AB = wasm_f32x4_sub(va89AB, vb); in xnn_f32_vsqrdiffc_ukernel__wasmsimd_x16()
|
D | vmul-wasmsimd-x16.c | 35 const v128_t va89AB = wasm_v128_load(a + 8); in xnn_f32_vmul_ukernel__wasmsimd_x16() local 47 v128_t vy89AB = wasm_f32x4_mul(va89AB, vb89AB); in xnn_f32_vmul_ukernel__wasmsimd_x16()
|
D | vadd-wasmsimd-x16.c | 35 const v128_t va89AB = wasm_v128_load(a + 8); in xnn_f32_vadd_ukernel__wasmsimd_x16() local 47 v128_t vy89AB = wasm_f32x4_add(va89AB, vb89AB); in xnn_f32_vadd_ukernel__wasmsimd_x16()
|
D | vmax-wasmsimd-arm-x16.c | 35 const v128_t va89AB = wasm_v128_load(a + 8); in xnn_f32_vmax_ukernel__wasmsimd_arm_x16() local 47 v128_t vy89AB = wasm_f32x4_max(va89AB, vb89AB); in xnn_f32_vmax_ukernel__wasmsimd_arm_x16()
|
D | vmin-wasmsimd-arm-x16.c | 35 const v128_t va89AB = wasm_v128_load(a + 8); in xnn_f32_vmin_ukernel__wasmsimd_arm_x16() local 47 v128_t vy89AB = wasm_f32x4_min(va89AB, vb89AB); in xnn_f32_vmin_ukernel__wasmsimd_arm_x16()
|
D | vdiv-wasmsimd-x16.c | 35 const v128_t va89AB = wasm_v128_load(a + 8); in xnn_f32_vdiv_ukernel__wasmsimd_x16() local 47 v128_t vy89AB = wasm_f32x4_div(va89AB, vb89AB); in xnn_f32_vdiv_ukernel__wasmsimd_x16()
|
D | vmulc-relu-wasmsimd-x16.c | 36 const v128_t va89AB = wasm_v128_load(a + 8); in xnn_f32_vmulc_relu_ukernel__wasmsimd_x16() local 42 v128_t vy89AB = wasm_f32x4_mul(va89AB, vb); in xnn_f32_vmulc_relu_ukernel__wasmsimd_x16()
|
D | vdivc-relu-wasmsimd-x16.c | 36 const v128_t va89AB = wasm_v128_load(a + 8); in xnn_f32_vdivc_relu_ukernel__wasmsimd_x16() local 42 v128_t vy89AB = wasm_f32x4_div(va89AB, vb); in xnn_f32_vdivc_relu_ukernel__wasmsimd_x16()
|
D | vrsubc-relu-wasmsimd-x16.c | 36 const v128_t va89AB = wasm_v128_load(a + 8); in xnn_f32_vrsubc_relu_ukernel__wasmsimd_x16() local 42 v128_t vy89AB = wasm_f32x4_sub(vb, va89AB); in xnn_f32_vrsubc_relu_ukernel__wasmsimd_x16()
|
D | vaddc-relu-wasmsimd-x16.c | 36 const v128_t va89AB = wasm_v128_load(a + 8); in xnn_f32_vaddc_relu_ukernel__wasmsimd_x16() local 42 v128_t vy89AB = wasm_f32x4_add(va89AB, vb); in xnn_f32_vaddc_relu_ukernel__wasmsimd_x16()
|
D | vrdivc-relu-wasmsimd-x16.c | 36 const v128_t va89AB = wasm_v128_load(a + 8); in xnn_f32_vrdivc_relu_ukernel__wasmsimd_x16() local 42 v128_t vy89AB = wasm_f32x4_div(vb, va89AB); in xnn_f32_vrdivc_relu_ukernel__wasmsimd_x16()
|
D | vsubc-relu-wasmsimd-x16.c | 36 const v128_t va89AB = wasm_v128_load(a + 8); in xnn_f32_vsubc_relu_ukernel__wasmsimd_x16() local 42 v128_t vy89AB = wasm_f32x4_sub(va89AB, vb); in xnn_f32_vsubc_relu_ukernel__wasmsimd_x16()
|