/external/swiftshader/third_party/llvm-7.0/llvm/test/CodeGen/X86/ |
D | shuffle-strided-with-offset-512.ll | 12 ; AVX512F-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[u,u,u,u,u,u,u,u,1,3,5,7,9,11,13,15,u,u,u,u,u,u,u,u,… 13 ; AVX512F-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[1,3,5,7,9,11,13,15,u,u,u,u,u,u,u,u,17,19,21,23,25,2… 24 ; AVX512VL-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[u,u,u,u,u,u,u,u,1,3,5,7,9,11,13,15,u,u,u,u,u,u,u,u… 25 ; AVX512VL-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[1,3,5,7,9,11,13,15,u,u,u,u,u,u,u,u,17,19,21,23,25,… 36 ; AVX512BW-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[u,u,u,u,u,u,u,u,1,3,5,7,9,11,13,15,u,u,u,u,u,u,u,u… 37 ; AVX512BW-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[1,3,5,7,9,11,13,15,u,u,u,u,u,u,u,u,17,19,21,23,25,… 48 ; AVX512BWVL-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[u,u,u,u,u,u,u,u,1,3,5,7,9,11,13,15,u,u,u,u,u,u,u… 49 ; AVX512BWVL-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[1,3,5,7,9,11,13,15,u,u,u,u,u,u,u,u,17,19,21,23,2… 66 ; AVX512F-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[6,7,2,3,4,5,6,7,2,3,6,7,10,11,14,15,22,23,18,19,20,… 67 ; AVX512F-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[2,3,6,7,10,11,14,15,14,15,10,11,12,13,14,15,18,19,2… [all …]
|
D | shuffle-strided-with-offset-256.ll | 16 ; AVX1-NEXT: vpshufb %xmm2, %xmm1, %xmm1 17 ; AVX1-NEXT: vpshufb %xmm2, %xmm0, %xmm0 28 ; AVX2-NEXT: vpshufb %xmm2, %xmm1, %xmm1 29 ; AVX2-NEXT: vpshufb %xmm2, %xmm0, %xmm0 40 ; AVX512-NEXT: vpshufb %xmm2, %xmm1, %xmm1 41 ; AVX512-NEXT: vpshufb %xmm2, %xmm0, %xmm0 58 ; AVX1-NEXT: vpshufb %xmm2, %xmm1, %xmm1 59 ; AVX1-NEXT: vpshufb %xmm2, %xmm0, %xmm0 70 ; AVX2-NEXT: vpshufb %xmm2, %xmm1, %xmm1 71 ; AVX2-NEXT: vpshufb %xmm2, %xmm0, %xmm0 [all …]
|
D | vector-popcnt-512.ll | 15 ; AVX512F-NEXT: vpshufb %ymm3, %ymm4, %ymm3 18 ; AVX512F-NEXT: vpshufb %ymm1, %ymm4, %ymm1 23 ; AVX512F-NEXT: vpshufb %ymm5, %ymm4, %ymm5 26 ; AVX512F-NEXT: vpshufb %ymm0, %ymm4, %ymm0 37 ; AVX512BW-NEXT: vpshufb %zmm2, %zmm3, %zmm2 40 ; AVX512BW-NEXT: vpshufb %zmm0, %zmm3, %zmm0 56 ; BITALG-NEXT: vpshufb %zmm2, %zmm3, %zmm2 59 ; BITALG-NEXT: vpshufb %zmm0, %zmm3, %zmm0 75 ; AVX512F-NEXT: vpshufb %ymm3, %ymm4, %ymm3 78 ; AVX512F-NEXT: vpshufb %ymm1, %ymm4, %ymm1 [all …]
|
D | shuffle-vs-trunc-512.ll | 18 ; AVX512F-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[u,u,u,u,u,u,u,u,0,2,4,6,8,10,12,14,u,u,u,u,u,u,u,u,… 19 ; AVX512F-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[0,2,4,6,8,10,12,14,u,u,u,u,u,u,u,u,16,18,20,22,24,2… 30 ; AVX512VL-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[u,u,u,u,u,u,u,u,0,2,4,6,8,10,12,14,u,u,u,u,u,u,u,u… 31 ; AVX512VL-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[0,2,4,6,8,10,12,14,u,u,u,u,u,u,u,u,16,18,20,22,24,… 42 ; AVX512BW-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[u,u,u,u,u,u,u,u,0,2,4,6,8,10,12,14,u,u,u,u,u,u,u,u… 43 ; AVX512BW-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[0,2,4,6,8,10,12,14,u,u,u,u,u,u,u,u,16,18,20,22,24,… 54 ; AVX512BWVL-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[u,u,u,u,u,u,u,u,0,2,4,6,8,10,12,14,u,u,u,u,u,u,u… 55 ; AVX512BWVL-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[0,2,4,6,8,10,12,14,u,u,u,u,u,u,u,u,16,18,20,22,2… 66 ; AVX512VBMI-NEXT: vpshufb {{.*#+}} ymm1 = ymm1[u,u,u,u,u,u,u,u,0,2,4,6,8,10,12,14,u,u,u,u,u,u,u… 67 ; AVX512VBMI-NEXT: vpshufb {{.*#+}} ymm0 = ymm0[0,2,4,6,8,10,12,14,u,u,u,u,u,u,u,u,16,18,20,22,2… [all …]
|
D | x86-interleaved-access.ll | 439 ; AVX1-NEXT: vpshufb %xmm1, %xmm2, %xmm3 440 ; AVX1-NEXT: vpshufb %xmm1, %xmm0, %xmm1 443 ; AVX1-NEXT: vpshufb %xmm5, %xmm3, %xmm3 444 ; AVX1-NEXT: vpshufb %xmm5, %xmm1, %xmm1 448 ; AVX1-NEXT: vpshufb %xmm3, %xmm2, %xmm2 450 ; AVX1-NEXT: vpshufb %xmm3, %xmm0, %xmm0 454 ; AVX1-NEXT: vpshufb %xmm4, %xmm2, %xmm2 455 ; AVX1-NEXT: vpshufb %xmm4, %xmm0, %xmm0 467 ; AVX-NEXT: vpshufb %xmm1, %xmm2, %xmm3 468 ; AVX-NEXT: vpshufb %xmm1, %xmm0, %xmm1 [all …]
|
D | shuffle-vs-trunc-256.ll | 21 ; AVX1-NEXT: vpshufb %xmm2, %xmm1, %xmm1 22 ; AVX1-NEXT: vpshufb %xmm2, %xmm0, %xmm0 33 ; AVX2-NEXT: vpshufb %xmm2, %xmm1, %xmm1 34 ; AVX2-NEXT: vpshufb %xmm2, %xmm0, %xmm0 45 ; AVX512-NEXT: vpshufb %xmm2, %xmm1, %xmm1 46 ; AVX512-NEXT: vpshufb %xmm2, %xmm0, %xmm0 63 ; AVX1-NEXT: vpshufb %xmm2, %xmm1, %xmm1 64 ; AVX1-NEXT: vpshufb %xmm2, %xmm0, %xmm0 75 ; AVX2-NEXT: vpshufb %xmm2, %xmm1, %xmm1 76 ; AVX2-NEXT: vpshufb %xmm2, %xmm0, %xmm0 [all …]
|
D | vector-shuffle-256-v32.ll | 14 ; AVX1-NEXT: vpshufb %xmm1, %xmm0, %xmm0 30 ; AVX1-NEXT: vpshufb %xmm1, %xmm0, %xmm1 31 ; AVX1-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0] 37 ; AVX2OR512VL-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0] 48 ; AVX1-NEXT: vpshufb %xmm1, %xmm0, %xmm1 49 ; AVX1-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0] 55 ; AVX2OR512VL-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0] 66 ; AVX1-NEXT: vpshufb %xmm1, %xmm0, %xmm1 67 ; AVX1-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0] 73 ; AVX2OR512VL-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0] [all …]
|
D | vector-popcnt-256.ll | 16 ; AVX1-NEXT: vpshufb %xmm3, %xmm4, %xmm3 19 ; AVX1-NEXT: vpshufb %xmm1, %xmm4, %xmm1 24 ; AVX1-NEXT: vpshufb %xmm5, %xmm4, %xmm5 27 ; AVX1-NEXT: vpshufb %xmm0, %xmm4, %xmm0 38 ; AVX2-NEXT: vpshufb %ymm2, %ymm3, %ymm2 41 ; AVX2-NEXT: vpshufb %ymm0, %ymm3, %ymm0 64 ; BITALG_NOVLX-NEXT: vpshufb %ymm2, %ymm3, %ymm2 67 ; BITALG_NOVLX-NEXT: vpshufb %ymm0, %ymm3, %ymm0 78 ; BITALG-NEXT: vpshufb %ymm2, %ymm3, %ymm2 81 ; BITALG-NEXT: vpshufb %ymm0, %ymm3, %ymm0 [all …]
|
D | vector-tzcnt-512.ll | 20 ; AVX512CD-NEXT: vpshufb %ymm3, %ymm4, %ymm3 23 ; AVX512CD-NEXT: vpshufb %ymm1, %ymm4, %ymm1 28 ; AVX512CD-NEXT: vpshufb %ymm5, %ymm4, %ymm5 31 ; AVX512CD-NEXT: vpshufb %ymm0, %ymm4, %ymm0 47 ; AVX512CDBW-NEXT: vpshufb %zmm3, %zmm4, %zmm3 50 ; AVX512CDBW-NEXT: vpshufb %zmm0, %zmm4, %zmm0 65 ; AVX512BW-NEXT: vpshufb %zmm3, %zmm4, %zmm3 68 ; AVX512BW-NEXT: vpshufb %zmm0, %zmm4, %zmm0 93 ; BITALG-NEXT: vpshufb %zmm3, %zmm4, %zmm3 96 ; BITALG-NEXT: vpshufb %zmm0, %zmm4, %zmm0 [all …]
|
D | vector-tzcnt-256.ll | 26 ; AVX1-NEXT: vpshufb %xmm5, %xmm6, %xmm5 29 ; AVX1-NEXT: vpshufb %xmm1, %xmm6, %xmm1 36 ; AVX1-NEXT: vpshufb %xmm3, %xmm6, %xmm3 39 ; AVX1-NEXT: vpshufb %xmm0, %xmm6, %xmm0 55 ; AVX2-NEXT: vpshufb %ymm3, %ymm4, %ymm3 58 ; AVX2-NEXT: vpshufb %ymm0, %ymm4, %ymm0 73 ; AVX512CDVL-NEXT: vpshufb %ymm3, %ymm4, %ymm3 76 ; AVX512CDVL-NEXT: vpshufb %ymm0, %ymm4, %ymm0 91 ; AVX512CD-NEXT: vpshufb %ymm3, %ymm4, %ymm3 94 ; AVX512CD-NEXT: vpshufb %ymm0, %ymm4, %ymm0 [all …]
|
D | vector-lzcnt-256.ll | 19 ; AVX1-NEXT: vpshufb %xmm1, %xmm4, %xmm5 25 ; AVX1-NEXT: vpshufb %xmm6, %xmm4, %xmm6 43 ; AVX1-NEXT: vpshufb %xmm5, %xmm4, %xmm5 48 ; AVX1-NEXT: vpshufb %xmm3, %xmm4, %xmm3 73 ; AVX2-NEXT: vpshufb %ymm2, %ymm3, %ymm2 79 ; AVX2-NEXT: vpshufb %ymm1, %ymm3, %ymm1 103 ; AVX512VL-NEXT: vpshufb %ymm2, %ymm3, %ymm2 109 ; AVX512VL-NEXT: vpshufb %ymm1, %ymm3, %ymm1 133 ; AVX512VLBWDQ-NEXT: vpshufb %ymm2, %ymm3, %ymm2 139 ; AVX512VLBWDQ-NEXT: vpshufb %ymm1, %ymm3, %ymm1 [all …]
|
D | vector-bitreverse.ll | 284 ; AVX-NEXT: vpshufb %xmm2, %xmm3, %xmm2 288 ; AVX-NEXT: vpshufb %xmm0, %xmm1, %xmm0 357 ; AVX-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[1,0,3,2,5,4,7,6,9,8,11,10,13,12,15,14] 361 ; AVX-NEXT: vpshufb %xmm2, %xmm3, %xmm2 365 ; AVX-NEXT: vpshufb %xmm0, %xmm1, %xmm0 434 ; AVX-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[3,2,1,0,7,6,5,4,11,10,9,8,15,14,13,12] 438 ; AVX-NEXT: vpshufb %xmm2, %xmm3, %xmm2 442 ; AVX-NEXT: vpshufb %xmm0, %xmm1, %xmm0 513 ; AVX-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[7,6,5,4,3,2,1,0,15,14,13,12,11,10,9,8] 517 ; AVX-NEXT: vpshufb %xmm2, %xmm3, %xmm2 [all …]
|
D | vector-shuffle-256-v16.ll | 43 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[0,1,0,1,0,1,0,1,0,1,0,1,2,3,0,1] 56 ; AVX512VL-FAST-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[0,1,0,1,0,1,0,1,0,1,0,1,2,3,0,1] 82 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[0,1,0,1,0,1,0,1,0,1,4,5,0,1,0,1] 95 ; AVX512VL-FAST-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[0,1,0,1,0,1,0,1,0,1,4,5,0,1,0,1] 121 ; AVX2-FAST-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[0,1,0,1,0,1,0,1,6,7,0,1,0,1,0,1] 134 ; AVX512VL-FAST-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[0,1,0,1,0,1,0,1,6,7,0,1,0,1,0,1] 146 ; AVX1-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[0,1,0,1,0,1,8,9,0,1,0,1,0,1,0,1] 152 ; AVX2OR512VL-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[0,1,0,1,0,1,0,1,0,1,0,1,0,1,8,9] 164 ; AVX1-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[0,1,0,1,10,11,0,1,0,1,0,1,0,1,0,1] 170 ; AVX2OR512VL-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[0,1,0,1,0,1,0,1,0,1,0,1,10,11,0,1] [all …]
|
D | shuffle-strided-with-offset-128.ll | 43 ; AVX-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[1,3,5,7,9,11,13,15,u,u,u,u,u,u,u,u] 50 ; AVX512F-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[1,3,5,7,9,11,13,15,u,u,u,u,u,u,u,u] 57 ; AVX512VL-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[1,3,5,7,9,11,13,15,u,u,u,u,u,u,u,u] 64 ; AVX512BW-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[1,3,5,7,9,11,13,15,u,u,u,u,u,u,u,u] 99 ; AVX-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[2,3,6,7,10,11,14,15,14,15,10,11,12,13,14,15] 106 ; AVX512F-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[2,3,6,7,10,11,14,15,14,15,10,11,12,13,14,15] 119 ; AVX512BW-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[2,3,6,7,10,11,14,15,14,15,10,11,12,13,14,15] 203 ; AVX-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[1,5,9,13,u,u,u,u,u,u,u,u,u,u,u,u] 210 ; AVX512F-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[1,5,9,13,u,u,u,u,u,u,u,u,u,u,u,u] 224 ; AVX512BW-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[1,5,9,13,u,u,u,u,u,u,u,u,u,u,u,u] [all …]
|
D | vector-lzcnt-512.ll | 35 ; AVX512BW-NEXT: vpshufb %zmm2, %zmm3, %zmm2 39 ; AVX512BW-NEXT: vpshufb %zmm0, %zmm3, %zmm0 64 ; AVX512DQ-NEXT: vpshufb %ymm3, %ymm4, %ymm3 67 ; AVX512DQ-NEXT: vpshufb %ymm1, %ymm4, %ymm1 72 ; AVX512DQ-NEXT: vpshufb %ymm5, %ymm4, %ymm5 75 ; AVX512DQ-NEXT: vpshufb %ymm0, %ymm4, %ymm0 112 ; AVX512BW-NEXT: vpshufb %zmm2, %zmm3, %zmm2 116 ; AVX512BW-NEXT: vpshufb %zmm0, %zmm3, %zmm0 141 ; AVX512DQ-NEXT: vpshufb %ymm3, %ymm4, %ymm3 144 ; AVX512DQ-NEXT: vpshufb %ymm1, %ymm4, %ymm1 [all …]
|
/external/llvm/test/CodeGen/X86/ |
D | vector-popcnt-512.ll | 12 ; AVX512F-NEXT: vpshufb %ymm3, %ymm4, %ymm3 15 ; AVX512F-NEXT: vpshufb %ymm1, %ymm4, %ymm1 20 ; AVX512F-NEXT: vpshufb %ymm5, %ymm4, %ymm5 23 ; AVX512F-NEXT: vpshufb %ymm0, %ymm4, %ymm0 34 ; AVX512BW-NEXT: vpshufb %zmm2, %zmm3, %zmm2 37 ; AVX512BW-NEXT: vpshufb %zmm0, %zmm3, %zmm0 53 ; AVX512F-NEXT: vpshufb %ymm3, %ymm4, %ymm3 56 ; AVX512F-NEXT: vpshufb %ymm1, %ymm4, %ymm1 65 ; AVX512F-NEXT: vpshufb %ymm5, %ymm4, %ymm5 68 ; AVX512F-NEXT: vpshufb %ymm0, %ymm4, %ymm0 [all …]
|
D | vector-shuffle-256-v32.ll | 11 ; AVX1-NEXT: vpshufb %xmm1, %xmm0, %xmm0 27 ; AVX1-NEXT: vpshufb %xmm1, %xmm0, %xmm1 28 ; AVX1-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0] 35 ; AVX2-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0] 46 ; AVX1-NEXT: vpshufb %xmm1, %xmm0, %xmm1 47 ; AVX1-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0] 54 ; AVX2-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0] 65 ; AVX1-NEXT: vpshufb %xmm1, %xmm0, %xmm1 66 ; AVX1-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0] 73 ; AVX2-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0] [all …]
|
D | vector-tzcnt-512.ll | 18 ; AVX512CD-NEXT: vpshufb %ymm3, %ymm4, %ymm3 21 ; AVX512CD-NEXT: vpshufb %ymm1, %ymm4, %ymm1 26 ; AVX512CD-NEXT: vpshufb %ymm5, %ymm4, %ymm5 29 ; AVX512CD-NEXT: vpshufb %ymm0, %ymm4, %ymm0 44 ; AVX512CDBW-NEXT: vpshufb %zmm3, %zmm4, %zmm3 47 ; AVX512CDBW-NEXT: vpshufb %zmm0, %zmm4, %zmm0 61 ; AVX512BW-NEXT: vpshufb %zmm3, %zmm4, %zmm3 64 ; AVX512BW-NEXT: vpshufb %zmm0, %zmm4, %zmm0 102 ; AVX512BW-NEXT: vpshufb %zmm3, %zmm4, %zmm3 105 ; AVX512BW-NEXT: vpshufb %zmm0, %zmm4, %zmm0 [all …]
|
D | vector-popcnt-256.ll | 12 ; AVX1-NEXT: vpshufb %xmm3, %xmm4, %xmm3 15 ; AVX1-NEXT: vpshufb %xmm1, %xmm4, %xmm1 20 ; AVX1-NEXT: vpshufb %xmm5, %xmm4, %xmm5 23 ; AVX1-NEXT: vpshufb %xmm0, %xmm4, %xmm0 34 ; AVX2-NEXT: vpshufb %ymm2, %ymm3, %ymm2 37 ; AVX2-NEXT: vpshufb %ymm0, %ymm3, %ymm0 53 ; AVX1-NEXT: vpshufb %xmm3, %xmm4, %xmm3 56 ; AVX1-NEXT: vpshufb %xmm1, %xmm4, %xmm1 65 ; AVX1-NEXT: vpshufb %xmm5, %xmm4, %xmm5 68 ; AVX1-NEXT: vpshufb %xmm0, %xmm4, %xmm0 [all …]
|
D | avx-cvt-2.ll | 16 ; CHECK-NEXT: vpshufb %xmm2, %xmm1, %xmm1 17 ; CHECK-NEXT: vpshufb %xmm2, %xmm0, %xmm0 33 ; CHECK-NEXT: vpshufb %xmm2, %xmm1, %xmm1 34 ; CHECK-NEXT: vpshufb %xmm2, %xmm0, %xmm0 50 ; CHECK-NEXT: vpshufb %xmm2, %xmm1, %xmm1 51 ; CHECK-NEXT: vpshufb %xmm2, %xmm0, %xmm0 53 ; CHECK-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[0,2,4,6,8,10,12,14,u,u,u,u,u,u,u,u] 68 ; CHECK-NEXT: vpshufb %xmm2, %xmm1, %xmm1 69 ; CHECK-NEXT: vpshufb %xmm2, %xmm0, %xmm0 71 ; CHECK-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[0,2,4,6,8,10,12,14,u,u,u,u,u,u,u,u]
|
D | vector-tzcnt-256.ll | 22 ; AVX1-NEXT: vpshufb %xmm5, %xmm6, %xmm5 25 ; AVX1-NEXT: vpshufb %xmm1, %xmm6, %xmm1 30 ; AVX1-NEXT: vpshufb %xmm3, %xmm6, %xmm3 33 ; AVX1-NEXT: vpshufb %xmm0, %xmm6, %xmm0 49 ; AVX2-NEXT: vpshufb %ymm3, %ymm4, %ymm3 52 ; AVX2-NEXT: vpshufb %ymm0, %ymm4, %ymm0 66 ; AVX512CDVL-NEXT: vpshufb %ymm3, %ymm4, %ymm3 69 ; AVX512CDVL-NEXT: vpshufb %ymm0, %ymm4, %ymm0 84 ; AVX512CD-NEXT: vpshufb %ymm3, %ymm4, %ymm3 87 ; AVX512CD-NEXT: vpshufb %ymm0, %ymm4, %ymm0 [all …]
|
D | vector-shuffle-256-v16.ll | 28 ; AVX1-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[0,1,0,1,0,1,0,1,0,1,0,1,2,3,0,1] 35 ; AVX2-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[0,1,0,1,0,1,0,1,0,1,0,1,2,3,0,1] 47 ; AVX1-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[0,1,0,1,0,1,0,1,0,1,4,5,0,1,0,1] 54 ; AVX2-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[0,1,0,1,0,1,0,1,0,1,4,5,0,1,0,1] 66 ; AVX1-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[0,1,0,1,0,1,0,1,6,7,0,1,0,1,0,1] 73 ; AVX2-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[0,1,0,1,0,1,0,1,6,7,0,1,0,1,0,1] 85 ; AVX1-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[0,1,0,1,0,1,8,9,0,1,0,1,0,1,0,1] 92 ; AVX2-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[0,1,0,1,0,1,8,9,0,1,0,1,0,1,0,1] 104 ; AVX1-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[0,1,0,1,10,11,0,1,0,1,0,1,0,1,0,1] 111 ; AVX2-NEXT: vpshufb {{.*#+}} xmm0 = xmm0[0,1,0,1,10,11,0,1,0,1,0,1,0,1,0,1] [all …]
|
/external/boringssl/mac-x86_64/crypto/fipsmodule/ |
D | aesni-gcm-x86_64.S | 264 vpshufb %xmm0,%xmm1,%xmm6 270 vpshufb %xmm0,%xmm10,%xmm10 272 vpshufb %xmm0,%xmm11,%xmm11 275 vpshufb %xmm0,%xmm12,%xmm12 278 vpshufb %xmm0,%xmm13,%xmm13 279 vpshufb %xmm0,%xmm14,%xmm14 280 vpshufb %xmm0,%xmm1,%xmm1 387 vpshufb %xmm0,%xmm8,%xmm8 414 vpshufb %xmm0,%xmm7,%xmm7 416 vpshufb %xmm0,%xmm4,%xmm4 [all …]
|
/external/boringssl/linux-x86_64/crypto/fipsmodule/ |
D | aesni-gcm-x86_64.S | 264 vpshufb %xmm0,%xmm1,%xmm6 270 vpshufb %xmm0,%xmm10,%xmm10 272 vpshufb %xmm0,%xmm11,%xmm11 275 vpshufb %xmm0,%xmm12,%xmm12 278 vpshufb %xmm0,%xmm13,%xmm13 279 vpshufb %xmm0,%xmm14,%xmm14 280 vpshufb %xmm0,%xmm1,%xmm1 387 vpshufb %xmm0,%xmm8,%xmm8 414 vpshufb %xmm0,%xmm7,%xmm7 416 vpshufb %xmm0,%xmm4,%xmm4 [all …]
|
/external/boringssl/win-x86_64/crypto/fipsmodule/ |
D | aesni-gcm-x86_64.asm | 263 vpshufb xmm6,xmm1,xmm0 269 vpshufb xmm10,xmm10,xmm0 271 vpshufb xmm11,xmm11,xmm0 274 vpshufb xmm12,xmm12,xmm0 277 vpshufb xmm13,xmm13,xmm0 278 vpshufb xmm14,xmm14,xmm0 279 vpshufb xmm1,xmm1,xmm0 409 vpshufb xmm8,xmm8,xmm0 436 vpshufb xmm7,xmm7,xmm0 438 vpshufb xmm4,xmm4,xmm0 [all …]
|