Home
last modified time | relevance | path

Searched refs:vacc02 (Results 1 – 25 of 43) sorted by relevance

12

/external/XNNPACK/src/f32-gemm/gen/
D1x4-minmax-scalar.c45 float vacc02 = w[2]; in xnn_f32_gemm_minmax_ukernel_1x4__scalar() local
61 vacc02 += va0 * vb2; in xnn_f32_gemm_minmax_ukernel_1x4__scalar()
69 vacc02 = math_max_f32(vacc02, vmin); in xnn_f32_gemm_minmax_ukernel_1x4__scalar()
74 vacc02 = math_min_f32(vacc02, vmax); in xnn_f32_gemm_minmax_ukernel_1x4__scalar()
80 c0[2] = vacc02; in xnn_f32_gemm_minmax_ukernel_1x4__scalar()
91 vacc00 = vacc02; in xnn_f32_gemm_minmax_ukernel_1x4__scalar()
D1x4-minmax-wasm.c45 float vacc02 = w[2]; in xnn_f32_gemm_minmax_ukernel_1x4__wasm() local
61 vacc02 += va0 * vb2; in xnn_f32_gemm_minmax_ukernel_1x4__wasm()
69 vacc02 = __builtin_wasm_max_f32(vacc02, vmin); in xnn_f32_gemm_minmax_ukernel_1x4__wasm()
74 vacc02 = __builtin_wasm_min_f32(vacc02, vmax); in xnn_f32_gemm_minmax_ukernel_1x4__wasm()
80 c0[2] = vacc02; in xnn_f32_gemm_minmax_ukernel_1x4__wasm()
91 vacc00 = vacc02; in xnn_f32_gemm_minmax_ukernel_1x4__wasm()
D2x4-minmax-scalar.c51 float vacc02 = w[2]; in xnn_f32_gemm_minmax_ukernel_2x4__scalar() local
56 float vacc12 = vacc02; in xnn_f32_gemm_minmax_ukernel_2x4__scalar()
72 vacc02 += va0 * vb2; in xnn_f32_gemm_minmax_ukernel_2x4__scalar()
84 vacc02 = math_max_f32(vacc02, vmin); in xnn_f32_gemm_minmax_ukernel_2x4__scalar()
93 vacc02 = math_min_f32(vacc02, vmax); in xnn_f32_gemm_minmax_ukernel_2x4__scalar()
108 c0[2] = vacc02; in xnn_f32_gemm_minmax_ukernel_2x4__scalar()
124 vacc00 = vacc02; in xnn_f32_gemm_minmax_ukernel_2x4__scalar()
D2x4-minmax-wasm.c51 float vacc02 = w[2]; in xnn_f32_gemm_minmax_ukernel_2x4__wasm() local
56 float vacc12 = vacc02; in xnn_f32_gemm_minmax_ukernel_2x4__wasm()
72 vacc02 += va0 * vb2; in xnn_f32_gemm_minmax_ukernel_2x4__wasm()
84 vacc02 = __builtin_wasm_max_f32(vacc02, vmin); in xnn_f32_gemm_minmax_ukernel_2x4__wasm()
93 vacc02 = __builtin_wasm_min_f32(vacc02, vmax); in xnn_f32_gemm_minmax_ukernel_2x4__wasm()
108 c0[2] = vacc02; in xnn_f32_gemm_minmax_ukernel_2x4__wasm()
124 vacc00 = vacc02; in xnn_f32_gemm_minmax_ukernel_2x4__wasm()
D1x4-relu-wasm.c43 float vacc02 = w[2]; in xnn_f32_gemm_relu_ukernel_1x4__wasm() local
59 vacc02 += va0 * vb2; in xnn_f32_gemm_relu_ukernel_1x4__wasm()
67 vacc02 = __builtin_wasm_max_f32(vacc02, 0.0f); in xnn_f32_gemm_relu_ukernel_1x4__wasm()
73 c0[2] = vacc02; in xnn_f32_gemm_relu_ukernel_1x4__wasm()
84 vacc00 = vacc02; in xnn_f32_gemm_relu_ukernel_1x4__wasm()
D1x4-relu-scalar.c43 float vacc02 = w[2]; in xnn_f32_gemm_relu_ukernel_1x4__scalar() local
59 vacc02 += va0 * vb2; in xnn_f32_gemm_relu_ukernel_1x4__scalar()
67 vacc02 = math_max_f32(vacc02, 0.0f); in xnn_f32_gemm_relu_ukernel_1x4__scalar()
73 c0[2] = vacc02; in xnn_f32_gemm_relu_ukernel_1x4__scalar()
84 vacc00 = vacc02; in xnn_f32_gemm_relu_ukernel_1x4__scalar()
D4x4-minmax-scalar.c63 float vacc02 = w[2]; in xnn_f32_gemm_minmax_ukernel_4x4__scalar() local
68 float vacc12 = vacc02; in xnn_f32_gemm_minmax_ukernel_4x4__scalar()
72 float vacc22 = vacc02; in xnn_f32_gemm_minmax_ukernel_4x4__scalar()
76 float vacc32 = vacc02; in xnn_f32_gemm_minmax_ukernel_4x4__scalar()
94 vacc02 += va0 * vb2; in xnn_f32_gemm_minmax_ukernel_4x4__scalar()
114 vacc02 = math_max_f32(vacc02, vmin); in xnn_f32_gemm_minmax_ukernel_4x4__scalar()
131 vacc02 = math_min_f32(vacc02, vmax); in xnn_f32_gemm_minmax_ukernel_4x4__scalar()
164 c0[2] = vacc02; in xnn_f32_gemm_minmax_ukernel_4x4__scalar()
190 vacc00 = vacc02; in xnn_f32_gemm_minmax_ukernel_4x4__scalar()
D4x4-minmax-wasm.c63 float vacc02 = w[2]; in xnn_f32_gemm_minmax_ukernel_4x4__wasm() local
68 float vacc12 = vacc02; in xnn_f32_gemm_minmax_ukernel_4x4__wasm()
72 float vacc22 = vacc02; in xnn_f32_gemm_minmax_ukernel_4x4__wasm()
76 float vacc32 = vacc02; in xnn_f32_gemm_minmax_ukernel_4x4__wasm()
94 vacc02 += va0 * vb2; in xnn_f32_gemm_minmax_ukernel_4x4__wasm()
114 vacc02 = __builtin_wasm_max_f32(vacc02, vmin); in xnn_f32_gemm_minmax_ukernel_4x4__wasm()
131 vacc02 = __builtin_wasm_min_f32(vacc02, vmax); in xnn_f32_gemm_minmax_ukernel_4x4__wasm()
164 c0[2] = vacc02; in xnn_f32_gemm_minmax_ukernel_4x4__wasm()
190 vacc00 = vacc02; in xnn_f32_gemm_minmax_ukernel_4x4__wasm()
D2x4-relu-scalar.c49 float vacc02 = w[2]; in xnn_f32_gemm_relu_ukernel_2x4__scalar() local
54 float vacc12 = vacc02; in xnn_f32_gemm_relu_ukernel_2x4__scalar()
70 vacc02 += va0 * vb2; in xnn_f32_gemm_relu_ukernel_2x4__scalar()
82 vacc02 = math_max_f32(vacc02, 0.0f); in xnn_f32_gemm_relu_ukernel_2x4__scalar()
97 c0[2] = vacc02; in xnn_f32_gemm_relu_ukernel_2x4__scalar()
113 vacc00 = vacc02; in xnn_f32_gemm_relu_ukernel_2x4__scalar()
D2x4-relu-wasm.c49 float vacc02 = w[2]; in xnn_f32_gemm_relu_ukernel_2x4__wasm() local
54 float vacc12 = vacc02; in xnn_f32_gemm_relu_ukernel_2x4__wasm()
70 vacc02 += va0 * vb2; in xnn_f32_gemm_relu_ukernel_2x4__wasm()
82 vacc02 = __builtin_wasm_max_f32(vacc02, 0.0f); in xnn_f32_gemm_relu_ukernel_2x4__wasm()
97 c0[2] = vacc02; in xnn_f32_gemm_relu_ukernel_2x4__wasm()
113 vacc00 = vacc02; in xnn_f32_gemm_relu_ukernel_2x4__wasm()
D4x4-relu-wasm.c61 float vacc02 = w[2]; in xnn_f32_gemm_relu_ukernel_4x4__wasm() local
66 float vacc12 = vacc02; in xnn_f32_gemm_relu_ukernel_4x4__wasm()
70 float vacc22 = vacc02; in xnn_f32_gemm_relu_ukernel_4x4__wasm()
74 float vacc32 = vacc02; in xnn_f32_gemm_relu_ukernel_4x4__wasm()
92 vacc02 += va0 * vb2; in xnn_f32_gemm_relu_ukernel_4x4__wasm()
112 vacc02 = __builtin_wasm_max_f32(vacc02, 0.0f); in xnn_f32_gemm_relu_ukernel_4x4__wasm()
145 c0[2] = vacc02; in xnn_f32_gemm_relu_ukernel_4x4__wasm()
171 vacc00 = vacc02; in xnn_f32_gemm_relu_ukernel_4x4__wasm()
D4x4-relu-scalar.c61 float vacc02 = w[2]; in xnn_f32_gemm_relu_ukernel_4x4__scalar() local
66 float vacc12 = vacc02; in xnn_f32_gemm_relu_ukernel_4x4__scalar()
70 float vacc22 = vacc02; in xnn_f32_gemm_relu_ukernel_4x4__scalar()
74 float vacc32 = vacc02; in xnn_f32_gemm_relu_ukernel_4x4__scalar()
92 vacc02 += va0 * vb2; in xnn_f32_gemm_relu_ukernel_4x4__scalar()
112 vacc02 = math_max_f32(vacc02, 0.0f); in xnn_f32_gemm_relu_ukernel_4x4__scalar()
145 c0[2] = vacc02; in xnn_f32_gemm_relu_ukernel_4x4__scalar()
171 vacc00 = vacc02; in xnn_f32_gemm_relu_ukernel_4x4__scalar()
/external/XNNPACK/src/f32-gemm/gen-inc/
D1x4inc-minmax-scalar.c47 float vacc02 = acc[2]; in xnn_f32_gemminc_minmax_ukernel_1x4__scalar() local
63 vacc02 += va0 * vb2; in xnn_f32_gemminc_minmax_ukernel_1x4__scalar()
71 vacc02 = math_max_f32(vacc02, vmin); in xnn_f32_gemminc_minmax_ukernel_1x4__scalar()
76 vacc02 = math_min_f32(vacc02, vmax); in xnn_f32_gemminc_minmax_ukernel_1x4__scalar()
82 c0[2] = vacc02; in xnn_f32_gemminc_minmax_ukernel_1x4__scalar()
93 vacc00 = vacc02; in xnn_f32_gemminc_minmax_ukernel_1x4__scalar()
D1x4inc-minmax-wasm.c47 float vacc02 = acc[2]; in xnn_f32_gemminc_minmax_ukernel_1x4__wasm() local
63 vacc02 += va0 * vb2; in xnn_f32_gemminc_minmax_ukernel_1x4__wasm()
71 vacc02 = __builtin_wasm_max_f32(vacc02, vmin); in xnn_f32_gemminc_minmax_ukernel_1x4__wasm()
76 vacc02 = __builtin_wasm_min_f32(vacc02, vmax); in xnn_f32_gemminc_minmax_ukernel_1x4__wasm()
82 c0[2] = vacc02; in xnn_f32_gemminc_minmax_ukernel_1x4__wasm()
93 vacc00 = vacc02; in xnn_f32_gemminc_minmax_ukernel_1x4__wasm()
D2x4inc-minmax-scalar.c53 float vacc02 = acc[2]; in xnn_f32_gemminc_minmax_ukernel_2x4__scalar() local
74 vacc02 += va0 * vb2; in xnn_f32_gemminc_minmax_ukernel_2x4__scalar()
86 vacc02 = math_max_f32(vacc02, vmin); in xnn_f32_gemminc_minmax_ukernel_2x4__scalar()
95 vacc02 = math_min_f32(vacc02, vmax); in xnn_f32_gemminc_minmax_ukernel_2x4__scalar()
110 c0[2] = vacc02; in xnn_f32_gemminc_minmax_ukernel_2x4__scalar()
126 vacc00 = vacc02; in xnn_f32_gemminc_minmax_ukernel_2x4__scalar()
D2x4inc-minmax-wasm.c53 float vacc02 = acc[2]; in xnn_f32_gemminc_minmax_ukernel_2x4__wasm() local
74 vacc02 += va0 * vb2; in xnn_f32_gemminc_minmax_ukernel_2x4__wasm()
86 vacc02 = __builtin_wasm_max_f32(vacc02, vmin); in xnn_f32_gemminc_minmax_ukernel_2x4__wasm()
95 vacc02 = __builtin_wasm_min_f32(vacc02, vmax); in xnn_f32_gemminc_minmax_ukernel_2x4__wasm()
110 c0[2] = vacc02; in xnn_f32_gemminc_minmax_ukernel_2x4__wasm()
126 vacc00 = vacc02; in xnn_f32_gemminc_minmax_ukernel_2x4__wasm()
/external/XNNPACK/src/f32-igemm/gen/
D1x4-minmax-wasm.c49 float vacc02 = w[2]; in xnn_f32_igemm_minmax_ukernel_1x4__wasm() local
74 vacc02 += va0 * vb2; in xnn_f32_igemm_minmax_ukernel_1x4__wasm()
84 vacc02 = __builtin_wasm_max_f32(vacc02, vmin); in xnn_f32_igemm_minmax_ukernel_1x4__wasm()
89 vacc02 = __builtin_wasm_min_f32(vacc02, vmax); in xnn_f32_igemm_minmax_ukernel_1x4__wasm()
95 c0[2] = vacc02; in xnn_f32_igemm_minmax_ukernel_1x4__wasm()
105 vacc00 = vacc02; in xnn_f32_igemm_minmax_ukernel_1x4__wasm()
D1x4-minmax-scalar.c49 float vacc02 = w[2]; in xnn_f32_igemm_minmax_ukernel_1x4__scalar() local
74 vacc02 += va0 * vb2; in xnn_f32_igemm_minmax_ukernel_1x4__scalar()
84 vacc02 = math_max_f32(vacc02, vmin); in xnn_f32_igemm_minmax_ukernel_1x4__scalar()
89 vacc02 = math_min_f32(vacc02, vmax); in xnn_f32_igemm_minmax_ukernel_1x4__scalar()
95 c0[2] = vacc02; in xnn_f32_igemm_minmax_ukernel_1x4__scalar()
105 vacc00 = vacc02; in xnn_f32_igemm_minmax_ukernel_1x4__scalar()
D2x4-minmax-wasm.c53 float vacc02 = w[2]; in xnn_f32_igemm_minmax_ukernel_2x4__wasm() local
57 float vacc12 = vacc02; in xnn_f32_igemm_minmax_ukernel_2x4__wasm()
88 vacc02 += va0 * vb2; in xnn_f32_igemm_minmax_ukernel_2x4__wasm()
102 vacc02 = __builtin_wasm_max_f32(vacc02, vmin); in xnn_f32_igemm_minmax_ukernel_2x4__wasm()
111 vacc02 = __builtin_wasm_min_f32(vacc02, vmax); in xnn_f32_igemm_minmax_ukernel_2x4__wasm()
126 c0[2] = vacc02; in xnn_f32_igemm_minmax_ukernel_2x4__wasm()
140 vacc00 = vacc02; in xnn_f32_igemm_minmax_ukernel_2x4__wasm()
D2x4-minmax-scalar.c53 float vacc02 = w[2]; in xnn_f32_igemm_minmax_ukernel_2x4__scalar() local
57 float vacc12 = vacc02; in xnn_f32_igemm_minmax_ukernel_2x4__scalar()
88 vacc02 += va0 * vb2; in xnn_f32_igemm_minmax_ukernel_2x4__scalar()
102 vacc02 = math_max_f32(vacc02, vmin); in xnn_f32_igemm_minmax_ukernel_2x4__scalar()
111 vacc02 = math_min_f32(vacc02, vmax); in xnn_f32_igemm_minmax_ukernel_2x4__scalar()
126 c0[2] = vacc02; in xnn_f32_igemm_minmax_ukernel_2x4__scalar()
140 vacc00 = vacc02; in xnn_f32_igemm_minmax_ukernel_2x4__scalar()
D4x4-minmax-scalar.c61 float vacc02 = w[2]; in xnn_f32_igemm_minmax_ukernel_4x4__scalar() local
65 float vacc12 = vacc02; in xnn_f32_igemm_minmax_ukernel_4x4__scalar()
69 float vacc22 = vacc02; in xnn_f32_igemm_minmax_ukernel_4x4__scalar()
73 float vacc32 = vacc02; in xnn_f32_igemm_minmax_ukernel_4x4__scalar()
116 vacc02 += va0 * vb2; in xnn_f32_igemm_minmax_ukernel_4x4__scalar()
138 vacc02 = math_max_f32(vacc02, vmin); in xnn_f32_igemm_minmax_ukernel_4x4__scalar()
155 vacc02 = math_min_f32(vacc02, vmax); in xnn_f32_igemm_minmax_ukernel_4x4__scalar()
188 c0[2] = vacc02; in xnn_f32_igemm_minmax_ukernel_4x4__scalar()
210 vacc00 = vacc02; in xnn_f32_igemm_minmax_ukernel_4x4__scalar()
D4x4-minmax-wasm.c61 float vacc02 = w[2]; in xnn_f32_igemm_minmax_ukernel_4x4__wasm() local
65 float vacc12 = vacc02; in xnn_f32_igemm_minmax_ukernel_4x4__wasm()
69 float vacc22 = vacc02; in xnn_f32_igemm_minmax_ukernel_4x4__wasm()
73 float vacc32 = vacc02; in xnn_f32_igemm_minmax_ukernel_4x4__wasm()
116 vacc02 += va0 * vb2; in xnn_f32_igemm_minmax_ukernel_4x4__wasm()
138 vacc02 = __builtin_wasm_max_f32(vacc02, vmin); in xnn_f32_igemm_minmax_ukernel_4x4__wasm()
155 vacc02 = __builtin_wasm_min_f32(vacc02, vmax); in xnn_f32_igemm_minmax_ukernel_4x4__wasm()
188 c0[2] = vacc02; in xnn_f32_igemm_minmax_ukernel_4x4__wasm()
210 vacc00 = vacc02; in xnn_f32_igemm_minmax_ukernel_4x4__wasm()
D1x4-relu-scalar.c47 float vacc02 = w[2]; in xnn_f32_igemm_relu_ukernel_1x4__scalar() local
72 vacc02 += va0 * vb2; in xnn_f32_igemm_relu_ukernel_1x4__scalar()
82 vacc02 = math_max_f32(vacc02, 0.0f); in xnn_f32_igemm_relu_ukernel_1x4__scalar()
88 c0[2] = vacc02; in xnn_f32_igemm_relu_ukernel_1x4__scalar()
98 vacc00 = vacc02; in xnn_f32_igemm_relu_ukernel_1x4__scalar()
D1x4-relu-wasm.c47 float vacc02 = w[2]; in xnn_f32_igemm_relu_ukernel_1x4__wasm() local
72 vacc02 += va0 * vb2; in xnn_f32_igemm_relu_ukernel_1x4__wasm()
82 vacc02 = __builtin_wasm_max_f32(vacc02, 0.0f); in xnn_f32_igemm_relu_ukernel_1x4__wasm()
88 c0[2] = vacc02; in xnn_f32_igemm_relu_ukernel_1x4__wasm()
98 vacc00 = vacc02; in xnn_f32_igemm_relu_ukernel_1x4__wasm()
D2x4-relu-scalar.c51 float vacc02 = w[2]; in xnn_f32_igemm_relu_ukernel_2x4__scalar() local
55 float vacc12 = vacc02; in xnn_f32_igemm_relu_ukernel_2x4__scalar()
86 vacc02 += va0 * vb2; in xnn_f32_igemm_relu_ukernel_2x4__scalar()
100 vacc02 = math_max_f32(vacc02, 0.0f); in xnn_f32_igemm_relu_ukernel_2x4__scalar()
115 c0[2] = vacc02; in xnn_f32_igemm_relu_ukernel_2x4__scalar()
129 vacc00 = vacc02; in xnn_f32_igemm_relu_ukernel_2x4__scalar()

12