/external/llvm/test/CodeGen/X86/ |
D | vec_compare.ll | 48 ; CHECK: pshufd $177 59 ; CHECK: pshufd $177 80 ; CHECK: pshufd $160 82 ; CHECK: pshufd $245 84 ; CHECK: pshufd $245 97 ; CHECK: pshufd $160 99 ; CHECK: pshufd $245 101 ; CHECK: pshufd $245 114 ; CHECK: pshufd $160 116 ; CHECK: pshufd $245 [all …]
|
D | lower-bitcast.ll | 13 ; pshufd+paddq+pshufd. This is fixed with the widening legalization. 17 ; CHECK: pshufd 19 ; CHECK-NEXT: pshufd 53 ; CHECK-NOT: pshufd 55 ; CHECK-NOT: pshufd 59 ; CHECK-WIDE-NOT: pshufd 61 ; CHECK-WIDE-NOT: pshufd 71 ; FIXME: At the moment we still produce the sequence pshufd+paddd+pshufd. 76 ; CHECK: pshufd 78 ; CHECK-NEXT: pshufd [all …]
|
D | vector-idiv.ll | 12 ; SSE41-NEXT: pshufd {{.*#+}} xmm2 = xmm1[1,1,3,3] 13 ; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm0[1,1,3,3] 16 ; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm1[1,1,3,3] 29 ; SSE-NEXT: pshufd {{.*#+}} xmm2 = xmm2[1,3,2,3] 30 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm1[1,1,3,3] 31 ; SSE-NEXT: pshufd {{.*#+}} xmm3 = xmm0[1,1,3,3] 33 ; SSE-NEXT: pshufd {{.*#+}} xmm1 = xmm3[1,3,2,3] 63 ; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[1,1,3,3] 64 ; SSE41-NEXT: pshufd {{.*#+}} xmm4 = xmm0[1,1,3,3] 68 ; SSE41-NEXT: pshufd {{.*#+}} xmm5 = xmm5[1,1,3,3] [all …]
|
D | combine-multiplies.ll | 77 ; pshufd $245, %xmm0, %xmm3 # xmm3 = xmm0[1,1,3,3] 79 ; pshufd $232, %xmm0, %xmm0 # xmm0 = xmm0[0,2,2,3] 81 ; pshufd $232, %xmm3, %xmm2 # xmm2 = xmm3[0,2,2,3] 100 ; CHECK-NEXT: pshufd $245, %xmm0, [[T1:%xmm[0-9]]] 102 ; CHECK-NEXT: pshufd $232, [[T2]], [[T3:%xmm[0-9]]] 104 ; CHECK-NEXT: pshufd $232, [[T4]], [[T5:%xmm[0-9]]] 138 ; CHECK-NEXT: pshufd $245, %xmm0, [[T1:%xmm[0-9]]] 140 ; CHECK-NEXT: pshufd $232, [[T2]], [[T3:%xmm[0-9]]] 141 ; CHECK-NEXT: pshufd $245, [[C22]], [[T7:%xmm[0-9]]] 143 ; CHECK-NEXT: pshufd $232, [[T7]], [[T5:%xmm[0-9]]]
|
D | SwizzleShuff.ll | 17 ; CHECK: pshufd 18 ; CHECK-NEXT: pshufd 20 ; CHECK-NEXT: pshufd 21 ; CHECK-NEXT: pshufd 49 ; CHECK-NOT: pshufd 61 ; CHECK: pshufd
|
D | vec_cmp_sint-128.ll | 20 ; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,0,3,2] 116 ; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,0,3,2] 236 ; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2] 238 ; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm1[1,1,3,3] 240 ; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm2[1,1,3,3] 253 ; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2] 255 ; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm1[1,1,3,3] 257 ; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm2[1,1,3,3] 370 ; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2] 372 ; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3] [all …]
|
D | vector-trunc.ll | 12 ; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,2,2,3] 13 ; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,2,2,3] 15 ; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm3[0,2,2,3] 16 ; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm2[0,2,2,3] 22 ; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,2,2,3] 23 ; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,2,2,3] 25 ; SSSE3-NEXT: pshufd {{.*#+}} xmm3 = xmm3[0,2,2,3] 26 ; SSSE3-NEXT: pshufd {{.*#+}} xmm1 = xmm2[0,2,2,3] 32 ; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm1[0,1,0,2] 33 ; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,2,2,3] [all …]
|
D | vec_minmax_sint.ll | 23 ; SSE2-NEXT: pshufd {{.*#+}} xmm5 = xmm4[0,0,2,2] 25 ; SSE2-NEXT: pshufd {{.*#+}} xmm2 = xmm2[1,1,3,3] 27 ; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm4[1,1,3,3] 43 ; SSE41-NEXT: pshufd {{.*#+}} xmm5 = xmm4[0,0,2,2] 45 ; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm0[1,1,3,3] 47 ; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm4[1,1,3,3] 81 ; SSE2-NEXT: pshufd {{.*#+}} xmm8 = xmm7[0,0,2,2] 83 ; SSE2-NEXT: pshufd {{.*#+}} xmm5 = xmm6[1,1,3,3] 85 ; SSE2-NEXT: pshufd {{.*#+}} xmm6 = xmm7[1,1,3,3] 92 ; SSE2-NEXT: pshufd {{.*#+}} xmm8 = xmm7[0,0,2,2] [all …]
|
D | vec_minmax_uint.ll | 23 ; SSE2-NEXT: pshufd {{.*#+}} xmm5 = xmm4[0,0,2,2] 25 ; SSE2-NEXT: pshufd {{.*#+}} xmm2 = xmm2[1,1,3,3] 27 ; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm4[1,1,3,3] 43 ; SSE41-NEXT: pshufd {{.*#+}} xmm5 = xmm4[0,0,2,2] 45 ; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm0[1,1,3,3] 47 ; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm4[1,1,3,3] 88 ; SSE2-NEXT: pshufd {{.*#+}} xmm8 = xmm7[0,0,2,2] 90 ; SSE2-NEXT: pshufd {{.*#+}} xmm5 = xmm6[1,1,3,3] 92 ; SSE2-NEXT: pshufd {{.*#+}} xmm6 = xmm7[1,1,3,3] 99 ; SSE2-NEXT: pshufd {{.*#+}} xmm8 = xmm7[0,0,2,2] [all …]
|
D | vec_cmp_uint-128.ll | 20 ; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,0,3,2] 116 ; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,0,3,2] 236 ; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2] 238 ; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm1[1,1,3,3] 240 ; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm2[1,1,3,3] 253 ; SSE41-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2] 255 ; SSE41-NEXT: pshufd {{.*#+}} xmm0 = xmm1[1,1,3,3] 257 ; SSE41-NEXT: pshufd {{.*#+}} xmm1 = xmm2[1,1,3,3] 399 ; SSE2-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2] 401 ; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3] [all …]
|
D | vector-shuffle-128-v8.ll | 14 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,0,1,1] 27 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[3,2,1,0] 66 ; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,1,0,3] 138 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,1,3,2] 152 ; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,1,0,3] 177 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[1,0,3,2] 191 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,1,3,2] 205 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[1,0,2,3] 220 ; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,3,0,1] 246 ; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,2,2,0] [all …]
|
D | vselect-minmax.ll | 4775 ; SSE2-NEXT: pshufd {{.*#+}} xmm12 = xmm11[0,0,2,2] 4777 ; SSE2-NEXT: pshufd {{.*#+}} xmm10 = xmm10[1,1,3,3] 4779 ; SSE2-NEXT: pshufd {{.*#+}} xmm8 = xmm11[1,1,3,3] 4787 ; SSE2-NEXT: pshufd {{.*#+}} xmm13 = xmm12[0,0,2,2] 4789 ; SSE2-NEXT: pshufd {{.*#+}} xmm11 = xmm11[1,1,3,3] 4791 ; SSE2-NEXT: pshufd {{.*#+}} xmm10 = xmm12[1,1,3,3] 4799 ; SSE2-NEXT: pshufd {{.*#+}} xmm14 = xmm13[0,0,2,2] 4801 ; SSE2-NEXT: pshufd {{.*#+}} xmm11 = xmm12[1,1,3,3] 4803 ; SSE2-NEXT: pshufd {{.*#+}} xmm12 = xmm13[1,1,3,3] 4810 ; SSE2-NEXT: pshufd {{.*#+}} xmm14 = xmm13[0,0,2,2] [all …]
|
D | vector-shuffle-combining.ll | 96 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,0,0,0] 168 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,2,1,3] 186 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,2,1,3] 204 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,2,1,3] 222 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,2,1,3] 240 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,2,1,3] 258 ; SSE-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,2,1,3] 280 ; SSE2-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,2,2,3] 281 ; SSE2-NEXT: pshufd {{.*#+}} xmm1 = xmm2[1,3,2,3] 288 ; SSSE3-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,2,2,3] [all …]
|
D | vector-shuffle-mmx.ll | 12 ; X32-NEXT: pshufd {{.*#+}} xmm0 = xmm0[1,1,2,3] 19 ; X64-NEXT: pshufd {{.*#+}} xmm0 = xmm0[1,1,2,3] 47 ; X32-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,2,2,3] 63 ; X64-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,2,2,3] 96 ; X64-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,0,1,1]
|
D | swizzle-2.ll | 15 ; CHECK-NEXT: pshufd {{.*#+}} xmm0 = xmm0[1,0,3,2] 25 ; CHECK-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,1,3,0] 35 ; CHECK-NEXT: pshufd {{.*#+}} xmm0 = xmm0[1,0,3,2] 45 ; CHECK-NEXT: pshufd {{.*#+}} xmm0 = xmm0[3,1,0,2] 55 ; CHECK-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,3,0,1] 65 ; CHECK-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,0,1,3] 75 ; CHECK-NEXT: pshufd {{.*#+}} xmm0 = xmm0[0,2,3,1] 85 ; CHECK-NEXT: pshufd {{.*#+}} xmm0 = xmm0[1,3,2,0] 95 ; CHECK-NEXT: pshufd {{.*#+}} xmm0 = xmm0[2,3,0,1] 105 ; CHECK-NEXT: pshufd {{.*#+}} xmm0 = xmm0[1,2,0,3] [all …]
|
/external/libvpx/libvpx/vp8/common/x86/ |
D | idctllm_sse2.asm | 151 pshufd xmm0, xmm0, 11011000b 152 pshufd xmm1, xmm4, 11011000b 158 pshufd xmm2, xmm2, 11011000b 159 pshufd xmm3, xmm4, 11011000b 225 pshufd xmm0, xmm2, 11011000b 226 pshufd xmm2, xmm1, 11011000b 228 pshufd xmm1, xmm5, 11011000b 229 pshufd xmm3, xmm7, 11011000b 303 pshufd xmm0, xmm2, 11011000b 304 pshufd xmm2, xmm1, 11011000b [all …]
|
/external/boringssl/linux-x86_64/crypto/aes/ |
D | bsaes-x86_64.S | 330 pshufd $147,%xmm15,%xmm7 331 pshufd $147,%xmm0,%xmm8 333 pshufd $147,%xmm3,%xmm9 335 pshufd $147,%xmm5,%xmm10 337 pshufd $147,%xmm2,%xmm11 339 pshufd $147,%xmm6,%xmm12 341 pshufd $147,%xmm1,%xmm13 343 pshufd $147,%xmm4,%xmm14 350 pshufd $78,%xmm15,%xmm15 352 pshufd $78,%xmm0,%xmm0 [all …]
|
/external/boringssl/mac-x86_64/crypto/aes/ |
D | bsaes-x86_64.S | 328 pshufd $147,%xmm15,%xmm7 329 pshufd $147,%xmm0,%xmm8 331 pshufd $147,%xmm3,%xmm9 333 pshufd $147,%xmm5,%xmm10 335 pshufd $147,%xmm2,%xmm11 337 pshufd $147,%xmm6,%xmm12 339 pshufd $147,%xmm1,%xmm13 341 pshufd $147,%xmm4,%xmm14 348 pshufd $78,%xmm15,%xmm15 350 pshufd $78,%xmm0,%xmm0 [all …]
|
/external/libvpx/libvpx/third_party/libyuv/source/ |
D | compare_win.cc | 51 pshufd xmm1, xmm0, 0xee in SumSquareError_SSE2() 53 pshufd xmm1, xmm0, 0x01 in SumSquareError_SSE2() 174 pshufd xmm2, xmm1, 0x0e // upper 2 dwords in HashDjb2_SSE41() 176 pshufd xmm2, xmm1, 0x01 in HashDjb2_SSE41() 211 pshufd xmm2, xmm1, 0x0e // upper 2 dwords in HashDjb2_AVX2() 213 pshufd xmm2, xmm1, 0x01 in HashDjb2_AVX2()
|
/external/boringssl/win-x86_64/crypto/aes/ |
D | bsaes-x86_64.asm | 332 pshufd xmm7,xmm15,0x93 333 pshufd xmm8,xmm0,0x93 335 pshufd xmm9,xmm3,0x93 337 pshufd xmm10,xmm5,0x93 339 pshufd xmm11,xmm2,0x93 341 pshufd xmm12,xmm6,0x93 343 pshufd xmm13,xmm1,0x93 345 pshufd xmm14,xmm4,0x93 352 pshufd xmm15,xmm15,0x4E 354 pshufd xmm0,xmm0,0x4E [all …]
|
/external/boringssl/src/crypto/sha/asm/ |
D | sha256-586.pl | 534 &pshufd ($Wi,$ABEF,0x1b); # ABCD 535 &pshufd ($ABEF,$ABEF,0xb1); # CDAB 536 &pshufd ($CDGH,$CDGH,0x1b); # EFGH 553 &pshufd ($Wi,$Wi,0x0e); 562 &pshufd ($Wi,$Wi,0x0e); 571 &pshufd ($Wi,$Wi,0x0e); 583 &pshufd ($Wi,$Wi,0x0e); 596 &pshufd ($Wi,$Wi,0x0e); 610 &pshufd ($Wi,$Wi,0x0e); 619 &pshufd ($Wi,$Wi,0x0e); [all …]
|
/external/mesa3d/src/mesa/x86-64/ |
D | xform4.S | 85 pshufd $0x00, %xmm8, %xmm0 /* ox | ox | ox | ox */ 87 pshufd $0x55, %xmm8, %xmm1 /* oy | oy | oy | oy */ 89 pshufd $0xAA, %xmm8, %xmm2 /* oz | oz | oz | ox */ 91 pshufd $0xFF, %xmm8, %xmm3 /* ow | ow | ow | ow */ 171 pshufd $0x00, %xmm8, %xmm0 /* ox | ox | ox | ox */ 173 pshufd $0x55, %xmm8, %xmm1 /* oy | oy | oy | oy */ 175 pshufd $0xAA, %xmm8, %xmm2 /* oz | oz | oz | ox */ 177 pshufd $0xFF, %xmm8, %xmm3 /* ow | ow | ow | ow */
|
/external/libvpx/libvpx/vpx_dsp/x86/ |
D | inv_wht_sse2.asm | 54 pshufd m1, m0, 0x0e 55 pshufd m3, m2, 0x0e 66 pshufd m1, m0, 0x0e 67 pshufd m3, m2, 0x0e
|
/external/boringssl/mac-x86_64/crypto/modes/ |
D | ghash-x86_64.S | 668 pshufd $78,%xmm2,%xmm2 671 pshufd $255,%xmm2,%xmm4 685 pshufd $78,%xmm2,%xmm6 689 pshufd $78,%xmm0,%xmm3 725 pshufd $78,%xmm2,%xmm3 726 pshufd $78,%xmm0,%xmm4 734 pshufd $78,%xmm0,%xmm3 772 pshufd $78,%xmm0,%xmm3 808 pshufd $78,%xmm5,%xmm3 809 pshufd $78,%xmm0,%xmm4 [all …]
|
/external/boringssl/linux-x86_64/crypto/modes/ |
D | ghash-x86_64.S | 669 pshufd $78,%xmm2,%xmm2 672 pshufd $255,%xmm2,%xmm4 686 pshufd $78,%xmm2,%xmm6 690 pshufd $78,%xmm0,%xmm3 726 pshufd $78,%xmm2,%xmm3 727 pshufd $78,%xmm0,%xmm4 735 pshufd $78,%xmm0,%xmm3 773 pshufd $78,%xmm0,%xmm3 809 pshufd $78,%xmm5,%xmm3 810 pshufd $78,%xmm0,%xmm4 [all …]
|