1; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py 2; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+bmi,+bmi2 | FileCheck %s --check-prefixes=CHECK 3 4define i64 @bzhi64(i64 %x, i64 %y) { 5; CHECK-LABEL: bzhi64: 6; CHECK: # %bb.0: 7; CHECK-NEXT: bzhiq %rsi, %rdi, %rax 8; CHECK-NEXT: retq 9 %tmp = tail call i64 @llvm.x86.bmi.bzhi.64(i64 %x, i64 %y) 10 ret i64 %tmp 11} 12 13define i64 @bzhi64_load(i64* %x, i64 %y) { 14; CHECK-LABEL: bzhi64_load: 15; CHECK: # %bb.0: 16; CHECK-NEXT: bzhiq %rsi, (%rdi), %rax 17; CHECK-NEXT: retq 18 %x1 = load i64, i64* %x 19 %tmp = tail call i64 @llvm.x86.bmi.bzhi.64(i64 %x1, i64 %y) 20 ret i64 %tmp 21} 22 23declare i64 @llvm.x86.bmi.bzhi.64(i64, i64) 24 25define i64 @pdep64(i64 %x, i64 %y) { 26; CHECK-LABEL: pdep64: 27; CHECK: # %bb.0: 28; CHECK-NEXT: pdepq %rsi, %rdi, %rax 29; CHECK-NEXT: retq 30 %tmp = tail call i64 @llvm.x86.bmi.pdep.64(i64 %x, i64 %y) 31 ret i64 %tmp 32} 33 34define i64 @pdep64_load(i64 %x, i64* %y) { 35; CHECK-LABEL: pdep64_load: 36; CHECK: # %bb.0: 37; CHECK-NEXT: pdepq (%rsi), %rdi, %rax 38; CHECK-NEXT: retq 39 %y1 = load i64, i64* %y 40 %tmp = tail call i64 @llvm.x86.bmi.pdep.64(i64 %x, i64 %y1) 41 ret i64 %tmp 42} 43 44define i64 @pdep64_anyext(i32 %x) { 45; CHECK-LABEL: pdep64_anyext: 46; CHECK: # %bb.0: 47; CHECK-NEXT: # kill: def $edi killed $edi def $rdi 48; CHECK-NEXT: movabsq $6148914691236517205, %rax # imm = 0x5555555555555555 49; CHECK-NEXT: pdepq %rax, %rdi, %rax 50; CHECK-NEXT: retq 51 %x1 = sext i32 %x to i64 52 %tmp = tail call i64 @llvm.x86.bmi.pdep.64(i64 %x1, i64 6148914691236517205) 53 ret i64 %tmp 54} 55 56declare i64 @llvm.x86.bmi.pdep.64(i64, i64) 57 58define i64 @pext64(i64 %x, i64 %y) { 59; CHECK-LABEL: pext64: 60; CHECK: # %bb.0: 61; CHECK-NEXT: pextq %rsi, %rdi, %rax 62; CHECK-NEXT: retq 63 %tmp = tail call i64 @llvm.x86.bmi.pext.64(i64 %x, i64 %y) 64 ret i64 %tmp 65} 66 67define i64 @pext64_load(i64 %x, i64* %y) { 68; CHECK-LABEL: pext64_load: 69; CHECK: # %bb.0: 70; CHECK-NEXT: pextq (%rsi), %rdi, %rax 71; CHECK-NEXT: retq 72 %y1 = load i64, i64* %y 73 %tmp = tail call i64 @llvm.x86.bmi.pext.64(i64 %x, i64 %y1) 74 ret i64 %tmp 75} 76 77define i64 @pext64_knownbits(i64 %x, i64 %y) { 78; CHECK-LABEL: pext64_knownbits: 79; CHECK: # %bb.0: 80; CHECK-NEXT: movabsq $6148914691236517205, %rax # imm = 0x5555555555555555 81; CHECK-NEXT: pextq %rax, %rdi, %rax 82; CHECK-NEXT: retq 83 %tmp = tail call i64 @llvm.x86.bmi.pext.64(i64 %x, i64 6148914691236517205) 84 %tmp2 = and i64 %tmp, 4294967295 85 ret i64 %tmp2 86} 87 88declare i64 @llvm.x86.bmi.pext.64(i64, i64) 89 90define i64 @mulx64(i64 %x, i64 %y, i64* %p) { 91; CHECK-LABEL: mulx64: 92; CHECK: # %bb.0: 93; CHECK-NEXT: movq %rdx, %rcx 94; CHECK-NEXT: movq %rdi, %rdx 95; CHECK-NEXT: mulxq %rsi, %rax, %rdx 96; CHECK-NEXT: movq %rdx, (%rcx) 97; CHECK-NEXT: retq 98 %x1 = zext i64 %x to i128 99 %y1 = zext i64 %y to i128 100 %r1 = mul i128 %x1, %y1 101 %h1 = lshr i128 %r1, 64 102 %h = trunc i128 %h1 to i64 103 %l = trunc i128 %r1 to i64 104 store i64 %h, i64* %p 105 ret i64 %l 106} 107 108define i64 @mulx64_load(i64 %x, i64* %y, i64* %p) { 109; CHECK-LABEL: mulx64_load: 110; CHECK: # %bb.0: 111; CHECK-NEXT: movq %rdx, %rcx 112; CHECK-NEXT: movq %rdi, %rdx 113; CHECK-NEXT: mulxq (%rsi), %rax, %rdx 114; CHECK-NEXT: movq %rdx, (%rcx) 115; CHECK-NEXT: retq 116 %y1 = load i64, i64* %y 117 %x2 = zext i64 %x to i128 118 %y2 = zext i64 %y1 to i128 119 %r1 = mul i128 %x2, %y2 120 %h1 = lshr i128 %r1, 64 121 %h = trunc i128 %h1 to i64 122 %l = trunc i128 %r1 to i64 123 store i64 %h, i64* %p 124 ret i64 %l 125} 126