1; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2; RUN: llc < %s -fast-isel -mtriple=x86_64-unknown-unknown -mattr=+tbm | FileCheck %s --check-prefix=X64
3
4; NOTE: This should use IR equivalent to what is generated by clang/test/CodeGen/tbm-builtins.c
5
6define i64 @test__bextri_u64(i64 %a0) {
7; X64-LABEL: test__bextri_u64:
8; X64:       # BB#0:
9; X64-NEXT:    bextr $1, %rdi, %rax
10; X64-NEXT:    retq
11  %1 = call i64 @llvm.x86.tbm.bextri.u64(i64 %a0, i64 1)
12  ret i64 %1
13}
14
15define i64 @test__blcfill_u64(i64 %a0) {
16; X64-LABEL: test__blcfill_u64:
17; X64:       # BB#0:
18; X64-NEXT:    leaq 1(%rdi), %rax
19; X64-NEXT:    andq %rdi, %rax
20; X64-NEXT:    retq
21  %1 = add i64 %a0, 1
22  %2 = and i64 %a0, %1
23  ret i64 %2
24}
25
26define i64 @test__blci_u64(i64 %a0) {
27; X64-LABEL: test__blci_u64:
28; X64:       # BB#0:
29; X64-NEXT:    leaq 1(%rdi), %rax
30; X64-NEXT:    xorq $-1, %rax
31; X64-NEXT:    orq %rdi, %rax
32; X64-NEXT:    retq
33  %1 = add i64 %a0, 1
34  %2 = xor i64 %1, -1
35  %3 = or i64 %a0, %2
36  ret i64 %3
37}
38
39define i64 @test__blcic_u64(i64 %a0) {
40; X64-LABEL: test__blcic_u64:
41; X64:       # BB#0:
42; X64-NEXT:    movq %rdi, %rax
43; X64-NEXT:    xorq $-1, %rax
44; X64-NEXT:    addq $1, %rdi
45; X64-NEXT:    andq %rax, %rdi
46; X64-NEXT:    movq %rdi, %rax
47; X64-NEXT:    retq
48  %1 = xor i64 %a0, -1
49  %2 = add i64 %a0, 1
50  %3 = and i64 %1, %2
51  ret i64 %3
52}
53
54define i64 @test__blcmsk_u64(i64 %a0) {
55; X64-LABEL: test__blcmsk_u64:
56; X64:       # BB#0:
57; X64-NEXT:    leaq 1(%rdi), %rax
58; X64-NEXT:    xorq %rdi, %rax
59; X64-NEXT:    retq
60  %1 = add i64 %a0, 1
61  %2 = xor i64 %a0, %1
62  ret i64 %2
63}
64
65define i64 @test__blcs_u64(i64 %a0) {
66; X64-LABEL: test__blcs_u64:
67; X64:       # BB#0:
68; X64-NEXT:    leaq 1(%rdi), %rax
69; X64-NEXT:    orq %rdi, %rax
70; X64-NEXT:    retq
71  %1 = add i64 %a0, 1
72  %2 = or i64 %a0, %1
73  ret i64 %2
74}
75
76define i64 @test__blsfill_u64(i64 %a0) {
77; X64-LABEL: test__blsfill_u64:
78; X64:       # BB#0:
79; X64-NEXT:    movq %rdi, %rax
80; X64-NEXT:    subq $1, %rax
81; X64-NEXT:    orq %rdi, %rax
82; X64-NEXT:    retq
83  %1 = sub i64 %a0, 1
84  %2 = or i64 %a0, %1
85  ret i64 %2
86}
87
88define i64 @test__blsic_u64(i64 %a0) {
89; X64-LABEL: test__blsic_u64:
90; X64:       # BB#0:
91; X64-NEXT:    movq %rdi, %rax
92; X64-NEXT:    xorq $-1, %rax
93; X64-NEXT:    subq $1, %rdi
94; X64-NEXT:    orq %rax, %rdi
95; X64-NEXT:    movq %rdi, %rax
96; X64-NEXT:    retq
97  %1 = xor i64 %a0, -1
98  %2 = sub i64 %a0, 1
99  %3 = or i64 %1, %2
100  ret i64 %3
101}
102
103define i64 @test__t1mskc_u64(i64 %a0) {
104; X64-LABEL: test__t1mskc_u64:
105; X64:       # BB#0:
106; X64-NEXT:    movq %rdi, %rax
107; X64-NEXT:    xorq $-1, %rax
108; X64-NEXT:    addq $1, %rdi
109; X64-NEXT:    orq %rax, %rdi
110; X64-NEXT:    movq %rdi, %rax
111; X64-NEXT:    retq
112  %1 = xor i64 %a0, -1
113  %2 = add i64 %a0, 1
114  %3 = or i64 %1, %2
115  ret i64 %3
116}
117
118define i64 @test__tzmsk_u64(i64 %a0) {
119; X64-LABEL: test__tzmsk_u64:
120; X64:       # BB#0:
121; X64-NEXT:    movq %rdi, %rax
122; X64-NEXT:    xorq $-1, %rax
123; X64-NEXT:    subq $1, %rdi
124; X64-NEXT:    andq %rax, %rdi
125; X64-NEXT:    movq %rdi, %rax
126; X64-NEXT:    retq
127  %1 = xor i64 %a0, -1
128  %2 = sub i64 %a0, 1
129  %3 = and i64 %1, %2
130  ret i64 %3
131}
132
133declare i64 @llvm.x86.tbm.bextri.u64(i64, i64)
134