Searched refs:vmovsd (Results 1 – 25 of 59) sorted by relevance
123
/external/llvm/test/CodeGen/X86/ |
D | lower-vec-shift.ll | 38 ; AVX-NEXT: vmovsd 41 ; AVX2-NEXT: vmovsd 70 ; AVX-NEXT: vmovsd 90 ; AVX-NEXT: vmovsd 93 ; AVX2-NEXT: vmovsd 122 ; AVX-NEXT: vmovsd
|
D | pr23103.ll | 11 ; CHECK: vmovsd (%rdi), %xmm0 12 ; CHECK-NEXT: vmovsd %xmm0, {{.*}}(%rsp) {{.*#+}} 8-byte Spill
|
D | chain_order.ll | 4 ; CHECK: vmovsd (%rdi), %xmm{{.*}} 5 ; CHECK-NEXT: vmovsd 16(%rdi), %xmm{{.*}}
|
D | fma-scalar-memfold.ll | 217 ; CHECK: vmovsd (%rcx), %[[XMM:xmm[0-9]+]] 238 ; CHECK: vmovsd (%rcx), %[[XMM:xmm[0-9]+]] 259 ; CHECK: vmovsd (%rcx), %[[XMM:xmm[0-9]+]] 280 ; CHECK: vmovsd (%rcx), %[[XMM:xmm[0-9]+]] 301 ; CHECK: vmovsd (%rcx), %[[XMM:xmm[0-9]+]] 322 ; CHECK: vmovsd (%rcx), %[[XMM:xmm[0-9]+]] 343 ; CHECK: vmovsd (%rcx), %[[XMM:xmm[0-9]+]] 364 ; CHECK: vmovsd (%rcx), %[[XMM:xmm[0-9]+]]
|
D | avx-bitcast.ll | 6 ; CHECK: vmovsd {{.*#+}} xmm0 = mem[0],zero
|
D | avx-shuffle-x86_32.ll | 19 ; CHECK-NEXT: vmovsd {{.*#+}} xmm0 = mem[0],zero
|
D | machine-combiner.ll | 629 ; AVX-NEXT: vmovsd %xmm0, 16(%rsp) 631 ; AVX-NEXT: vmovsd %xmm0, 8(%rsp) 633 ; AVX-NEXT: vmovsd %xmm0, (%rsp) 635 ; AVX-NEXT: vmovsd 8(%rsp), %xmm1 653 ; AVX-NEXT: vmovsd %xmm0, 16(%rsp) 655 ; AVX-NEXT: vmovsd %xmm0, 8(%rsp) 657 ; AVX-NEXT: vmovsd %xmm0, (%rsp) 659 ; AVX-NEXT: vmovsd 8(%rsp), %xmm1
|
D | avx-load-store.ll | 28 ;; + ins_subvec+ zext into only a single vmovss or vmovsd or vinsertps from memory 42 ; CHECK: vmovsd (%
|
D | fast-isel-fptrunc-fpext.ll | 58 ; AVX: vmovsd (%rdi), %xmm0
|
D | merge-consecutive-loads-256.ll | 78 ; AVX-NEXT: vmovsd {{.*#+}} xmm0 = mem[0],zero 84 ; X32-AVX-NEXT: vmovsd {{.*#+}} xmm0 = mem[0],zero 318 ; AVX-NEXT: vmovsd {{.*#+}} xmm0 = mem[0],zero 324 ; X32-AVX-NEXT: vmovsd {{.*#+}} xmm0 = mem[0],zero 652 ; AVX1-NEXT: vmovsd {{.*#+}} xmm0 = mem[0],zero 660 ; AVX2-NEXT: vmovsd {{.*#+}} xmm0 = mem[0],zero 668 ; AVX512F-NEXT: vmovsd {{.*#+}} xmm0 = mem[0],zero 677 ; X32-AVX-NEXT: vmovsd {{.*#+}} xmm0 = mem[0],zero
|
D | logical-load-fold.ll | 23 ; AVX-NEXT: vmovsd {{.*#+}} xmm1 = mem[0],zero
|
D | fp-trunc.ll | 20 ; AVX-NEXT: vmovsd {{[0-9]+}}(%esp), %xmm0
|
D | fp-load-trunc.ll | 20 ; AVX-NEXT: vmovsd (%eax), %xmm0
|
D | avx512-unsafe-fp-math.ll | 99 ; CHECK-NEXT: vmovsd {{.*#+}} xmm1 = mem[0],zero
|
D | avx-cvt.ll | 118 ; CHECK-NEXT: vmovsd %xmm0, -{{[0-9]+}}(%rsp)
|
D | vector-shuffle-variable-256.ll | 17 ; ALL-NEXT: vmovsd {{.*#+}} xmm0 = mem[0],zero 19 ; ALL-NEXT: vmovsd {{.*#+}} xmm1 = mem[0],zero 45 ; ALL-NEXT: vmovsd {{.*#+}} xmm1 = mem[0],zero 65 ; ALL-NEXT: vmovsd {{.*#+}} xmm0 = mem[0],zero 67 ; ALL-NEXT: vmovsd {{.*#+}} xmm1 = mem[0],zero
|
D | fold-load-unops.ll | 73 ; AVX-NEXT: vmovsd (%rdi), %xmm0
|
D | merge-consecutive-loads-512.ll | 261 ; ALL-NEXT: vmovsd {{.*#+}} xmm0 = mem[0],zero 267 ; X32-AVX512F-NEXT: vmovsd {{.*#+}} xmm0 = mem[0],zero 653 ; ALL-NEXT: vmovsd {{.*#+}} xmm0 = mem[0],zero 662 ; X32-AVX512F-NEXT: vmovsd {{.*#+}} xmm0 = mem[0],zero
|
D | merge-consecutive-loads-128.ll | 123 ; AVX-NEXT: vmovsd {{.*#+}} xmm0 = mem[0],zero 189 ; AVX-NEXT: vmovsd {{.*#+}} xmm0 = mem[0],zero 224 ; AVX-NEXT: vmovsd {{.*#+}} xmm0 = mem[0],zero 265 ; AVX-NEXT: vmovsd {{.*#+}} xmm0 = mem[0],zero
|
D | avx512-select.ll | 54 ; CHECK-NEXT: vmovsd %xmm2, %xmm0, %xmm1 {%k1}
|
D | vec_loadsingles.ll | 8 ; ALL-NEXT: vmovsd {{.*#+}} xmm0 = mem[0],zero
|
/external/swiftshader/third_party/LLVM/test/CodeGen/X86/ |
D | avx-bitcast.ll | 3 ; CHECK: vmovsd (%
|
D | avx-load-store.ll | 28 ;; + ins_subvec+ zext into only a single vmovss or vmovsd 37 ; CHECK: vmovsd (%
|
/external/llvm/test/MC/X86/ |
D | x86_64-avx-encoding.s | 269 vmovsd -4(%rbx,%rcx,8), %xmm10 273 vmovsd %xmm14, %xmm10, %xmm15 4239 vmovsd %xmm0, %xmm0, %xmm8 4243 vmovsd %xmm0, %xmm8, %xmm0 4247 vmovsd %xmm8, %xmm0, %xmm0
|
D | avx512-encodings.s | 18602 vmovsd.s %xmm15, %xmm22, %xmm21 18606 vmovsd.s %xmm15, %xmm22, %xmm21 {%k7} 18610 vmovsd.s %xmm15, %xmm22, %xmm21 {%k7} {z} 18614 vmovsd.s %xmm8, %xmm13, %xmm23 18618 vmovsd.s %xmm8, %xmm13, %xmm3 {%k5} 18622 vmovsd.s %xmm8, %xmm13, %xmm3 {%k5} {z} 18626 vmovsd.s %xmm4, %xmm15, %xmm24 18630 vmovsd.s %xmm4, %xmm15, %xmm4 {%k6} 18634 vmovsd.s %xmm4, %xmm15, %xmm4 {%k6} {z} 18638 vmovsd.s %xmm14, %xmm2, %xmm20 [all …]
|
123