Lines Matching refs:__v2df

33 typedef double __v2df __attribute__ ((__vector_size__ (16)));  typedef
62 return (__m128d)((__v2df)__a + (__v2df)__b); in _mm_add_pd()
75 return (__m128d)((__v2df)__a - (__v2df)__b); in _mm_sub_pd()
88 return (__m128d)((__v2df)__a * (__v2df)__b); in _mm_mul_pd()
101 return (__m128d)((__v2df)__a / (__v2df)__b); in _mm_div_pd()
107 __m128d __c = __builtin_ia32_sqrtsd((__v2df)__b); in _mm_sqrt_sd()
114 return __builtin_ia32_sqrtpd((__v2df)__a); in _mm_sqrt_pd()
120 return __builtin_ia32_minsd((__v2df)__a, (__v2df)__b); in _mm_min_sd()
126 return __builtin_ia32_minpd((__v2df)__a, (__v2df)__b); in _mm_min_pd()
132 return __builtin_ia32_maxsd((__v2df)__a, (__v2df)__b); in _mm_max_sd()
138 return __builtin_ia32_maxpd((__v2df)__a, (__v2df)__b); in _mm_max_pd()
168 return (__m128d)__builtin_ia32_cmpeqpd((__v2df)__a, (__v2df)__b); in _mm_cmpeq_pd()
174 return (__m128d)__builtin_ia32_cmpltpd((__v2df)__a, (__v2df)__b); in _mm_cmplt_pd()
180 return (__m128d)__builtin_ia32_cmplepd((__v2df)__a, (__v2df)__b); in _mm_cmple_pd()
186 return (__m128d)__builtin_ia32_cmpltpd((__v2df)__b, (__v2df)__a); in _mm_cmpgt_pd()
192 return (__m128d)__builtin_ia32_cmplepd((__v2df)__b, (__v2df)__a); in _mm_cmpge_pd()
198 return (__m128d)__builtin_ia32_cmpordpd((__v2df)__a, (__v2df)__b); in _mm_cmpord_pd()
204 return (__m128d)__builtin_ia32_cmpunordpd((__v2df)__a, (__v2df)__b); in _mm_cmpunord_pd()
210 return (__m128d)__builtin_ia32_cmpneqpd((__v2df)__a, (__v2df)__b); in _mm_cmpneq_pd()
216 return (__m128d)__builtin_ia32_cmpnltpd((__v2df)__a, (__v2df)__b); in _mm_cmpnlt_pd()
222 return (__m128d)__builtin_ia32_cmpnlepd((__v2df)__a, (__v2df)__b); in _mm_cmpnle_pd()
228 return (__m128d)__builtin_ia32_cmpnltpd((__v2df)__b, (__v2df)__a); in _mm_cmpngt_pd()
234 return (__m128d)__builtin_ia32_cmpnlepd((__v2df)__b, (__v2df)__a); in _mm_cmpnge_pd()
240 return (__m128d)__builtin_ia32_cmpeqsd((__v2df)__a, (__v2df)__b); in _mm_cmpeq_sd()
246 return (__m128d)__builtin_ia32_cmpltsd((__v2df)__a, (__v2df)__b); in _mm_cmplt_sd()
252 return (__m128d)__builtin_ia32_cmplesd((__v2df)__a, (__v2df)__b); in _mm_cmple_sd()
258 __m128d __c = __builtin_ia32_cmpltsd((__v2df)__b, (__v2df)__a); in _mm_cmpgt_sd()
265 __m128d __c = __builtin_ia32_cmplesd((__v2df)__b, (__v2df)__a); in _mm_cmpge_sd()
272 return (__m128d)__builtin_ia32_cmpordsd((__v2df)__a, (__v2df)__b); in _mm_cmpord_sd()
278 return (__m128d)__builtin_ia32_cmpunordsd((__v2df)__a, (__v2df)__b); in _mm_cmpunord_sd()
284 return (__m128d)__builtin_ia32_cmpneqsd((__v2df)__a, (__v2df)__b); in _mm_cmpneq_sd()
290 return (__m128d)__builtin_ia32_cmpnltsd((__v2df)__a, (__v2df)__b); in _mm_cmpnlt_sd()
296 return (__m128d)__builtin_ia32_cmpnlesd((__v2df)__a, (__v2df)__b); in _mm_cmpnle_sd()
302 __m128d __c = __builtin_ia32_cmpnltsd((__v2df)__b, (__v2df)__a); in _mm_cmpngt_sd()
309 __m128d __c = __builtin_ia32_cmpnlesd((__v2df)__b, (__v2df)__a); in _mm_cmpnge_sd()
316 return __builtin_ia32_comisdeq((__v2df)__a, (__v2df)__b); in _mm_comieq_sd()
322 return __builtin_ia32_comisdlt((__v2df)__a, (__v2df)__b); in _mm_comilt_sd()
328 return __builtin_ia32_comisdle((__v2df)__a, (__v2df)__b); in _mm_comile_sd()
334 return __builtin_ia32_comisdgt((__v2df)__a, (__v2df)__b); in _mm_comigt_sd()
340 return __builtin_ia32_comisdge((__v2df)__a, (__v2df)__b); in _mm_comige_sd()
346 return __builtin_ia32_comisdneq((__v2df)__a, (__v2df)__b); in _mm_comineq_sd()
352 return __builtin_ia32_ucomisdeq((__v2df)__a, (__v2df)__b); in _mm_ucomieq_sd()
358 return __builtin_ia32_ucomisdlt((__v2df)__a, (__v2df)__b); in _mm_ucomilt_sd()
364 return __builtin_ia32_ucomisdle((__v2df)__a, (__v2df)__b); in _mm_ucomile_sd()
370 return __builtin_ia32_ucomisdgt((__v2df)__a, (__v2df)__b); in _mm_ucomigt_sd()
376 return __builtin_ia32_ucomisdge((__v2df)__a, (__v2df)__b); in _mm_ucomige_sd()
382 return __builtin_ia32_ucomisdneq((__v2df)__a, (__v2df)__b); in _mm_ucomineq_sd()
388 return __builtin_ia32_cvtpd2ps((__v2df)__a); in _mm_cvtpd_ps()
395 __builtin_shufflevector((__v4sf)__a, (__v4sf)__a, 0, 1), __v2df); in _mm_cvtps_pd()
402 __builtin_shufflevector((__v4si)__a, (__v4si)__a, 0, 1), __v2df); in _mm_cvtepi32_pd()
408 return __builtin_ia32_cvtpd2dq((__v2df)__a); in _mm_cvtpd_epi32()
414 return __builtin_ia32_cvtsd2si((__v2df)__a); in _mm_cvtsd_si32()
441 return (__m128i)__builtin_ia32_cvttpd2dq((__v2df)__a); in _mm_cvttpd_epi32()
453 return (__m64)__builtin_ia32_cvtpd2pi((__v2df)__a); in _mm_cvtpd_pi32()
459 return (__m64)__builtin_ia32_cvttpd2pi((__v2df)__a); in _mm_cvttpd_pi32()
496 return __builtin_shufflevector((__v2df)__u, (__v2df)__u, 1, 0); in _mm_loadr_pd()
608 __a = __builtin_shufflevector((__v2df)__a, (__v2df)__a, 0, 0); in _mm_store1_pd()
630 __a = __builtin_shufflevector((__v2df)__a, (__v2df)__a, 1, 0); in _mm_storer_pd()
1693 return __builtin_ia32_cvtsd2si64((__v2df)__a); in _mm_cvtsd_si64()
2221 __builtin_nontemporal_store((__v2df)__a, (__v2df*)__p); in _mm_stream_pd()
2392 return __builtin_shufflevector((__v2df)__a, (__v2df)__b, 1, 2+1); in _mm_unpackhi_pd()
2398 return __builtin_shufflevector((__v2df)__a, (__v2df)__b, 0, 2+0); in _mm_unpacklo_pd()
2404 return __builtin_ia32_movmskpd((__v2df)__a); in _mm_movemask_pd()
2408 (__m128d)__builtin_shufflevector((__v2df)(__m128d)(a), (__v2df)(__m128d)(b), \