/external/clang/test/CodeGen/ |
D | sse2-builtins.c | 35 __m128d test_mm_add_pd(__m128d A, __m128d B) { in test_mm_add_pd() 41 __m128d test_mm_add_sd(__m128d A, __m128d B) { in test_mm_add_sd() 74 __m128d test_mm_and_pd(__m128d A, __m128d B) { in test_mm_and_pd() 86 __m128d test_mm_andnot_pd(__m128d A, __m128d B) { in test_mm_andnot_pd() 124 __m128 test_mm_castpd_ps(__m128d A) { in test_mm_castpd_ps() 130 __m128i test_mm_castpd_si128(__m128d A) { in test_mm_castpd_si128() 136 __m128d test_mm_castps_pd(__m128 A) { in test_mm_castps_pd() 148 __m128d test_mm_castsi128_pd(__m128i A) { in test_mm_castsi128_pd() 184 __m128d test_mm_cmpeq_pd(__m128d A, __m128d B) { in test_mm_cmpeq_pd() 193 __m128d test_mm_cmpeq_sd(__m128d A, __m128d B) { in test_mm_cmpeq_sd() [all …]
|
D | fma-builtins.c | 13 __m128d test_mm_fmadd_pd(__m128d a, __m128d b, __m128d c) { in test_mm_fmadd_pd() 23 __m128d test_mm_fmadd_sd(__m128d a, __m128d b, __m128d c) { in test_mm_fmadd_sd() 33 __m128d test_mm_fmsub_pd(__m128d a, __m128d b, __m128d c) { in test_mm_fmsub_pd() 43 __m128d test_mm_fmsub_sd(__m128d a, __m128d b, __m128d c) { in test_mm_fmsub_sd() 53 __m128d test_mm_fnmadd_pd(__m128d a, __m128d b, __m128d c) { in test_mm_fnmadd_pd() 63 __m128d test_mm_fnmadd_sd(__m128d a, __m128d b, __m128d c) { in test_mm_fnmadd_sd() 73 __m128d test_mm_fnmsub_pd(__m128d a, __m128d b, __m128d c) { in test_mm_fnmsub_pd() 83 __m128d test_mm_fnmsub_sd(__m128d a, __m128d b, __m128d c) { in test_mm_fnmsub_sd() 93 __m128d test_mm_fmaddsub_pd(__m128d a, __m128d b, __m128d c) { in test_mm_fmaddsub_pd() 103 __m128d test_mm_fmsubadd_pd(__m128d a, __m128d b, __m128d c) { in test_mm_fmsubadd_pd()
|
D | fma4-builtins.c | 14 __m128d test_mm_macc_pd(__m128d a, __m128d b, __m128d c) { in test_mm_macc_pd() 26 __m128d test_mm_macc_sd(__m128d a, __m128d b, __m128d c) { in test_mm_macc_sd() 38 __m128d test_mm_msub_pd(__m128d a, __m128d b, __m128d c) { in test_mm_msub_pd() 50 __m128d test_mm_msub_sd(__m128d a, __m128d b, __m128d c) { in test_mm_msub_sd() 62 __m128d test_mm_nmacc_pd(__m128d a, __m128d b, __m128d c) { in test_mm_nmacc_pd() 74 __m128d test_mm_nmacc_sd(__m128d a, __m128d b, __m128d c) { in test_mm_nmacc_sd() 86 __m128d test_mm_nmsub_pd(__m128d a, __m128d b, __m128d c) { in test_mm_nmsub_pd() 98 __m128d test_mm_nmsub_sd(__m128d a, __m128d b, __m128d c) { in test_mm_nmsub_sd() 110 __m128d test_mm_maddsub_pd(__m128d a, __m128d b, __m128d c) { in test_mm_maddsub_pd() 122 __m128d test_mm_msubadd_pd(__m128d a, __m128d b, __m128d c) { in test_mm_msubadd_pd()
|
D | avx-cmp-builtins.c | 13 __m128d test_cmp_pd(__m128d a, __m128d b) { in test_cmp_pd() 19 __m128d test_cmp_ps(__m128 a, __m128 b) { in test_cmp_ps() 37 __m128d test_cmp_sd(__m128d a, __m128d b) { in test_cmp_sd() 43 __m128d test_cmp_ss(__m128 a, __m128 b) { in test_cmp_ss() 73 __m128d test_cmpgt_sd(__m128d a, __m128d b) { in test_cmpgt_sd() 79 __m128d test_cmpge_sd(__m128d a, __m128d b) { in test_cmpge_sd() 85 __m128d test_cmpngt_sd(__m128d a, __m128d b) { in test_cmpngt_sd() 91 __m128d test_cmpnge_sd(__m128d a, __m128d b) { in test_cmpnge_sd()
|
D | avx512vldq-builtins.c | 56 __m128d test_mm_mask_andnot_pd (__m128d __W, __mmask8 __U, __m128d __A, __m128d __B) { in test_mm_mask_andnot_pd() 59 return (__m128d) _mm_mask_andnot_pd ( __W, __U, __A, __B); in test_mm_mask_andnot_pd() 62 __m128d test_mm_maskz_andnot_pd (__mmask8 __U, __m128d __A, __m128d __B) { in test_mm_maskz_andnot_pd() 65 return (__m128d) _mm_maskz_andnot_pd (__U, __A, __B); in test_mm_maskz_andnot_pd() 104 __m128d test_mm_mask_and_pd (__m128d __W, __mmask8 __U, __m128d __A, __m128d __B) { in test_mm_mask_and_pd() 107 return (__m128d) _mm_mask_and_pd ( __W, __U, __A, __B); in test_mm_mask_and_pd() 110 __m128d test_mm_maskz_and_pd (__mmask8 __U, __m128d __A, __m128d __B) { in test_mm_maskz_and_pd() 113 return (__m128d) _mm_maskz_and_pd (__U, __A, __B); in test_mm_maskz_and_pd() 152 __m128d test_mm_mask_xor_pd (__m128d __W, __mmask8 __U, __m128d __A, __m128d __B) { in test_mm_mask_xor_pd() 155 return (__m128d) _mm_mask_xor_pd ( __W, __U, __A, __B); in test_mm_mask_xor_pd() [all …]
|
D | sse41-builtins.c | 17 __m128d test_mm_blend_pd(__m128d V1, __m128d V2) { in test_mm_blend_pd() 35 __m128d test_mm_blendv_pd(__m128d V1, __m128d V2, __m128d V3) { in test_mm_blendv_pd() 47 __m128d test_mm_ceil_pd(__m128d x) { in test_mm_ceil_pd() 59 __m128d test_mm_ceil_sd(__m128d x, __m128d y) { in test_mm_ceil_sd() 162 __m128d test_mm_dp_pd(__m128d x, __m128d y) { in test_mm_dp_pd() 199 __m128d test_mm_floor_pd(__m128d x) { in test_mm_floor_pd() 211 __m128d test_mm_floor_sd(__m128d x, __m128d y) { in test_mm_floor_sd() 333 __m128d test_mm_round_pd(__m128d x) { in test_mm_round_pd() 345 __m128d test_mm_round_sd(__m128d x, __m128d y) { in test_mm_round_sd()
|
D | avx512er-builtins.c | 116 __m128d test_mm_rsqrt28_round_sd(__m128d a, __m128d b) { in test_mm_rsqrt28_round_sd() 122 __m128d test_mm_mask_rsqrt28_round_sd(__m128d s, __mmask8 m, __m128d a, __m128d b) { in test_mm_mask_rsqrt28_round_sd() 128 __m128d test_mm_maskz_rsqrt28_round_sd(__mmask8 m, __m128d a, __m128d b) { in test_mm_maskz_rsqrt28_round_sd() 242 __m128d test_mm_rcp28_round_sd(__m128d a, __m128d b) { in test_mm_rcp28_round_sd() 248 __m128d test_mm_mask_rcp28_round_sd(__m128d s, __mmask8 m, __m128d a, __m128d b) { in test_mm_mask_rcp28_round_sd() 254 __m128d test_mm_maskz_rcp28_round_sd(__mmask8 m, __m128d a, __m128d b) { in test_mm_maskz_rcp28_round_sd() 260 __m128d test_mm_rcp28_sd(__m128d a, __m128d b) { in test_mm_rcp28_sd() 266 __m128d test_mm_mask_rcp28_sd(__m128d s, __mmask8 m, __m128d a, __m128d b) { in test_mm_mask_rcp28_sd() 272 __m128d test_mm_maskz_rcp28_sd(__mmask8 m, __m128d a, __m128d b) { in test_mm_maskz_rcp28_sd()
|
D | sse3-builtins.c | 10 __m128d test_mm_addsub_pd(__m128d A, __m128d B) { in test_mm_addsub_pd() 22 __m128d test_mm_hadd_pd(__m128d A, __m128d B) { in test_mm_hadd_pd() 34 __m128d test_mm_hsub_pd(__m128d A, __m128d B) { in test_mm_hsub_pd() 52 __m128d test_mm_loaddup_pd(double const* P) { in test_mm_loaddup_pd() 60 __m128d test_mm_movedup_pd(__m128d A) { in test_mm_movedup_pd()
|
D | avx512f-builtins.c | 1783 __m128d test_mm_add_round_sd(__m128d __A, __m128d __B) { in test_mm_add_round_sd() 1788 __m128d test_mm_mask_add_round_sd(__m128d __W, __mmask8 __U, __m128d __A, __m128d __B) { in test_mm_mask_add_round_sd() 1793 __m128d test_mm_maskz_add_round_sd(__mmask8 __U, __m128d __A, __m128d __B) { in test_mm_maskz_add_round_sd() 1798 __m128d test_mm_mask_add_sd(__m128d __W, __mmask8 __U, __m128d __A, __m128d __B) { in test_mm_mask_add_sd() 1803 __m128d test_mm_maskz_add_sd(__mmask8 __U, __m128d __A, __m128d __B) { in test_mm_maskz_add_sd() 1883 __m128d test_mm_sub_round_sd(__m128d __A, __m128d __B) { in test_mm_sub_round_sd() 1888 __m128d test_mm_mask_sub_round_sd(__m128d __W, __mmask8 __U, __m128d __A, __m128d __B) { in test_mm_mask_sub_round_sd() 1893 __m128d test_mm_maskz_sub_round_sd(__mmask8 __U, __m128d __A, __m128d __B) { in test_mm_maskz_sub_round_sd() 1898 __m128d test_mm_mask_sub_sd(__m128d __W, __mmask8 __U, __m128d __A, __m128d __B) { in test_mm_mask_sub_sd() 1903 __m128d test_mm_maskz_sub_sd(__mmask8 __U, __m128d __A, __m128d __B) { in test_mm_maskz_sub_sd() [all …]
|
D | avx512dq-builtins.c | 638 __m128d test_mm512_range_round_sd(__m128d __A, __m128d __B) { in test_mm512_range_round_sd() 644 __m128d test_mm512_mask_range_round_sd(__m128d __W, __mmask8 __U, __m128d __A, __m128d __B) { in test_mm512_mask_range_round_sd() 650 __m128d test_mm512_maskz_range_round_sd(__mmask8 __U, __m128d __A, __m128d __B) { in test_mm512_maskz_range_round_sd() 656 __m128d test_mm512_range_round_ss(__m128d __A, __m128d __B) { in test_mm512_range_round_ss() 662 __m128d test_mm512_mask_range_round_ss(__m128 __W, __mmask8 __U, __m128 __A, __m128 __B) { in test_mm512_mask_range_round_ss() 674 __m128d test_mm_range_sd(__m128d __A, __m128d __B) { in test_mm_range_sd() 680 __m128d test_mm_mask_range_sd(__m128d __W, __mmask8 __U, __m128d __A, __m128d __B) { in test_mm_mask_range_sd() 686 __m128d test_mm_maskz_range_sd(__mmask8 __U, __m128d __A, __m128d __B) { in test_mm_maskz_range_sd() 692 __m128d test_mm_range_ss(__m128d __A, __m128d __B) { in test_mm_range_ss() 698 __m128d test_mm_mask_range_ss(__m128 __W, __mmask8 __U, __m128 __A, __m128 __B) { in test_mm_mask_range_ss() [all …]
|
D | avx512vl-builtins.c | 1060 __mmask8 test_mm_cmp_pd_mask(__m128d __A, __m128d __B) { in test_mm_cmp_pd_mask() 1066 __mmask8 test_mm_mask_cmp_pd_mask(__mmask8 m, __m128d __A, __m128d __B) { in test_mm_mask_cmp_pd_mask() 1072 __m128d test_mm_mask_fmadd_pd(__m128d __A, __mmask8 __U, __m128d __B, __m128d __C) { in test_mm_mask_fmadd_pd() 1078 __m128d test_mm_mask_fmsub_pd(__m128d __A, __mmask8 __U, __m128d __B, __m128d __C) { in test_mm_mask_fmsub_pd() 1084 __m128d test_mm_mask3_fmadd_pd(__m128d __A, __m128d __B, __m128d __C, __mmask8 __U) { in test_mm_mask3_fmadd_pd() 1090 __m128d test_mm_mask3_fnmadd_pd(__m128d __A, __m128d __B, __m128d __C, __mmask8 __U) { in test_mm_mask3_fnmadd_pd() 1096 __m128d test_mm_maskz_fmadd_pd(__mmask8 __U, __m128d __A, __m128d __B, __m128d __C) { in test_mm_maskz_fmadd_pd() 1102 __m128d test_mm_maskz_fmsub_pd(__mmask8 __U, __m128d __A, __m128d __B, __m128d __C) { in test_mm_maskz_fmsub_pd() 1108 __m128d test_mm_maskz_fnmadd_pd(__mmask8 __U, __m128d __A, __m128d __B, __m128d __C) { in test_mm_maskz_fnmadd_pd() 1114 __m128d test_mm_maskz_fnmsub_pd(__mmask8 __U, __m128d __A, __m128d __B, __m128d __C) { in test_mm_maskz_fnmsub_pd() [all …]
|
D | avx-shuffle-builtins.c | 19 __m128d test_mm_permute_pd(__m128d a) { in test_mm_permute_pd() 110 __m256d test_mm256_insertf128_pd_0(__m256d a, __m128d b) { in test_mm256_insertf128_pd_0() 128 __m256d test_mm256_insertf128_pd_1(__m256d a, __m128d b) { in test_mm256_insertf128_pd_1() 148 __m128d test_mm256_extractf128_pd_0(__m256d a) { in test_mm256_extractf128_pd_0() 166 __m128d test_mm256_extractf128_pd_1(__m256d a) { in test_mm256_extractf128_pd_1() 184 __m256d test_mm256_set_m128d(__m128d hi, __m128d lo) { in test_mm256_set_m128d() 202 __m256d test_mm256_setr_m128d(__m128d hi, __m128d lo) { in test_mm256_setr_m128d()
|
D | target-features-error-2.c | 29 __m128d need_avx(__m128d a, __m128d b) { in need_avx() 35 __m128d need_avx(__m128d a, __m128d b) { in need_avx()
|
D | avx-builtins.c | 85 __m256d test_mm256_broadcast_pd(__m128d* A) { in test_mm256_broadcast_pd() 107 __m128d test_mm_broadcast_ss(float* A) { in test_mm_broadcast_ss() 143 __m256d test_mm256_castpd128_pd256(__m128d A) { in test_mm256_castpd128_pd256() 149 __m128d test_mm256_castpd256_pd128(__m256d A) { in test_mm256_castpd256_pd128() 215 __m128d test_mm_cmp_pd(__m128d A, __m128d B) { in test_mm_cmp_pd() 239 __m128d test_mm_cmp_sd(__m128d A, __m128d B) { in test_mm_cmp_sd() 347 __m128d test_mm256_extractf128_pd(__m256d A) { in test_mm256_extractf128_pd() 429 __m256d test_mm256_insertf128_pd(__m256d A, __m128d B) { in test_mm256_insertf128_pd() 522 __m128d test_mm_maskload_pd(double* A, __m128i B) { in test_mm_maskload_pd() 546 void test_mm_maskstore_pd(double* A, __m128i B, __m128d C) { in test_mm_maskstore_pd() [all …]
|
/external/clang/lib/Headers/ |
D | emmintrin.h | 29 typedef double __m128d __attribute__((__vector_size__(16))); typedef 52 static __inline__ __m128d __DEFAULT_FN_ATTRS 53 _mm_add_sd(__m128d __a, __m128d __b) in _mm_add_sd() 59 static __inline__ __m128d __DEFAULT_FN_ATTRS 60 _mm_add_pd(__m128d __a, __m128d __b) in _mm_add_pd() 62 return (__m128d)((__v2df)__a + (__v2df)__b); in _mm_add_pd() 65 static __inline__ __m128d __DEFAULT_FN_ATTRS 66 _mm_sub_sd(__m128d __a, __m128d __b) in _mm_sub_sd() 72 static __inline__ __m128d __DEFAULT_FN_ATTRS 73 _mm_sub_pd(__m128d __a, __m128d __b) in _mm_sub_pd() [all …]
|
D | fma4intrin.h | 42 static __inline__ __m128d __DEFAULT_FN_ATTRS 43 _mm_macc_pd(__m128d __A, __m128d __B, __m128d __C) in _mm_macc_pd() 45 return (__m128d)__builtin_ia32_vfmaddpd((__v2df)__A, (__v2df)__B, (__v2df)__C); in _mm_macc_pd() 54 static __inline__ __m128d __DEFAULT_FN_ATTRS 55 _mm_macc_sd(__m128d __A, __m128d __B, __m128d __C) in _mm_macc_sd() 57 return (__m128d)__builtin_ia32_vfmaddsd((__v2df)__A, (__v2df)__B, (__v2df)__C); in _mm_macc_sd() 66 static __inline__ __m128d __DEFAULT_FN_ATTRS 67 _mm_msub_pd(__m128d __A, __m128d __B, __m128d __C) in _mm_msub_pd() 69 return (__m128d)__builtin_ia32_vfmsubpd((__v2df)__A, (__v2df)__B, (__v2df)__C); in _mm_msub_pd() 78 static __inline__ __m128d __DEFAULT_FN_ATTRS [all …]
|
D | fmaintrin.h | 40 static __inline__ __m128d __DEFAULT_FN_ATTRS 41 _mm_fmadd_pd(__m128d __A, __m128d __B, __m128d __C) in _mm_fmadd_pd() 43 return (__m128d)__builtin_ia32_vfmaddpd((__v2df)__A, (__v2df)__B, (__v2df)__C); in _mm_fmadd_pd() 52 static __inline__ __m128d __DEFAULT_FN_ATTRS 53 _mm_fmadd_sd(__m128d __A, __m128d __B, __m128d __C) in _mm_fmadd_sd() 55 return (__m128d)__builtin_ia32_vfmaddsd((__v2df)__A, (__v2df)__B, (__v2df)__C); in _mm_fmadd_sd() 64 static __inline__ __m128d __DEFAULT_FN_ATTRS 65 _mm_fmsub_pd(__m128d __A, __m128d __B, __m128d __C) in _mm_fmsub_pd() 67 return (__m128d)__builtin_ia32_vfmsubpd((__v2df)__A, (__v2df)__B, (__v2df)__C); in _mm_fmsub_pd() 76 static __inline__ __m128d __DEFAULT_FN_ATTRS [all …]
|
D | avx512vldqintrin.h | 95 static __inline__ __m128d __DEFAULT_FN_ATTRS 96 _mm_mask_andnot_pd (__m128d __W, __mmask8 __U, __m128d __A, __m128d __B) { in _mm_mask_andnot_pd() 97 return (__m128d) __builtin_ia32_andnpd128_mask ((__v2df) __A, in _mm_mask_andnot_pd() 103 static __inline__ __m128d __DEFAULT_FN_ATTRS 104 _mm_maskz_andnot_pd (__mmask8 __U, __m128d __A, __m128d __B) { in _mm_maskz_andnot_pd() 105 return (__m128d) __builtin_ia32_andnpd128_mask ((__v2df) __A, in _mm_maskz_andnot_pd() 163 static __inline__ __m128d __DEFAULT_FN_ATTRS 164 _mm_mask_and_pd (__m128d __W, __mmask8 __U, __m128d __A, __m128d __B) { in _mm_mask_and_pd() 165 return (__m128d) __builtin_ia32_andpd128_mask ((__v2df) __A, in _mm_mask_and_pd() 171 static __inline__ __m128d __DEFAULT_FN_ATTRS [all …]
|
D | pmmintrin.h | 173 static __inline__ __m128d __DEFAULT_FN_ATTRS 174 _mm_addsub_pd(__m128d __a, __m128d __b) in _mm_addsub_pd() 196 static __inline__ __m128d __DEFAULT_FN_ATTRS 197 _mm_hadd_pd(__m128d __a, __m128d __b) in _mm_hadd_pd() 219 static __inline__ __m128d __DEFAULT_FN_ATTRS 220 _mm_hsub_pd(__m128d __a, __m128d __b) in _mm_hsub_pd() 255 static __inline__ __m128d __DEFAULT_FN_ATTRS 256 _mm_movedup_pd(__m128d __a) in _mm_movedup_pd()
|
D | avx512erintrin.h | 156 (__m128d)__builtin_ia32_rsqrt28sd_round_mask((__v2df)(__m128d)(A), \ 157 (__v2df)(__m128d)(B), \ 162 (__m128d)__builtin_ia32_rsqrt28sd_round_mask((__v2df)(__m128d)(A), \ 163 (__v2df)(__m128d)(B), \ 164 (__v2df)(__m128d)(S), \ 168 (__m128d)__builtin_ia32_rsqrt28sd_round_mask((__v2df)(__m128d)(A), \ 169 (__v2df)(__m128d)(B), \ 259 (__m128d)__builtin_ia32_rcp28sd_round_mask((__v2df)(__m128d)(A), \ 260 (__v2df)(__m128d)(B), \ 265 (__m128d)__builtin_ia32_rcp28sd_round_mask((__v2df)(__m128d)(A), \ [all …]
|
D | avx512fintrin.h | 388 _mm512_broadcastsd_pd(__m128d __A) in _mm512_broadcastsd_pd() 410 static __inline __m128d __DEFAULT_FN_ATTRS 447 _mm512_castpd128_pd512 (__m128d __A) in _mm512_castpd128_pd512() 969 static __inline__ __m128d __DEFAULT_FN_ATTRS 970 _mm_mask_max_sd(__m128d __W, __mmask8 __U,__m128d __A, __m128d __B) { in _mm_mask_max_sd() 971 return (__m128d) __builtin_ia32_maxsd_round_mask ((__v2df) __A, in _mm_mask_max_sd() 978 static __inline__ __m128d __DEFAULT_FN_ATTRS 979 _mm_maskz_max_sd(__mmask8 __U,__m128d __A, __m128d __B) { in _mm_maskz_max_sd() 980 return (__m128d) __builtin_ia32_maxsd_round_mask ((__v2df) __A, in _mm_maskz_max_sd() 988 (__m128d)__builtin_ia32_maxsd_round_mask((__v2df)(__m128d)(A), \ [all …]
|
D | avx512vlintrin.h | 1231 (__mmask8)__builtin_ia32_cmppd128_mask((__v2df)(__m128d)(a), \ 1232 (__v2df)(__m128d)(b), (int)(p), \ 1236 (__mmask8)__builtin_ia32_cmppd128_mask((__v2df)(__m128d)(a), \ 1237 (__v2df)(__m128d)(b), (int)(p), \ 1240 static __inline__ __m128d __DEFAULT_FN_ATTRS 1241 _mm_mask_fmadd_pd(__m128d __A, __mmask8 __U, __m128d __B, __m128d __C) in _mm_mask_fmadd_pd() 1243 return (__m128d) __builtin_ia32_vfmaddpd128_mask ((__v2df) __A, in _mm_mask_fmadd_pd() 1249 static __inline__ __m128d __DEFAULT_FN_ATTRS 1250 _mm_mask3_fmadd_pd(__m128d __A, __m128d __B, __m128d __C, __mmask8 __U) in _mm_mask3_fmadd_pd() 1252 return (__m128d) __builtin_ia32_vfmaddpd128_mask3 ((__v2df) __A, in _mm_mask3_fmadd_pd() [all …]
|
/external/libvpx/libvpx/vpx_ports/ |
D | emmintrin_compat.h | 20 _mm_castpd_ps(__m128d __A) { in _mm_castpd_ps() 26 _mm_castpd_si128(__m128d __A) { in _mm_castpd_si128() 30 extern __inline __m128d 33 return (__m128d)__A; in _mm_castps_pd() 48 extern __inline __m128d 51 return (__m128d)__A; in _mm_castsi128_pd()
|
/external/libaom/libaom/aom_ports/ |
D | emmintrin_compat.h | 21 _mm_castpd_ps(__m128d __A) { in _mm_castpd_ps() 27 _mm_castpd_si128(__m128d __A) { in _mm_castpd_si128() 31 extern __inline __m128d 34 return (__m128d)__A; in _mm_castps_pd() 49 extern __inline __m128d 52 return (__m128d)__A; in _mm_castsi128_pd()
|
/external/clang/test/Sema/ |
D | builtins-x86.c | 5 typedef double __m128d __attribute__((__vector_size__(16))); typedef 17 __m128d test__builtin_ia32_cmppd(__m128d __a, __m128d __b) { in test__builtin_ia32_cmppd() 25 __m128d test__builtin_ia32_cmpsd(__m128d __a, __m128d __b) { in test__builtin_ia32_cmpsd()
|