1 // RUN: %clang_cc1 %s -O3 -triple=x86_64-unknown-unknown -target-feature +tbm -emit-llvm -o - | FileCheck %s
2
3 // Don't include mm_malloc.h, it's system specific.
4 #define __MM_MALLOC_H
5
6 #include <x86intrin.h>
7
test__bextri_u32(unsigned int a)8 unsigned int test__bextri_u32(unsigned int a) {
9 // CHECK: call i32 @llvm.x86.tbm.bextri.u32
10 return __bextri_u32(a, 1);
11 }
12
test__bextri_u64(unsigned long long a)13 unsigned long long test__bextri_u64(unsigned long long a) {
14 // CHECK: call i64 @llvm.x86.tbm.bextri.u64
15 return __bextri_u64(a, 2);
16 }
17
test__bextri_u64_bigint(unsigned long long a)18 unsigned long long test__bextri_u64_bigint(unsigned long long a) {
19 // CHECK: call i64 @llvm.x86.tbm.bextri.u64
20 return __bextri_u64(a, 0x7fffffffffLL);
21 }
22
test__blcfill_u32(unsigned int a)23 unsigned int test__blcfill_u32(unsigned int a) {
24 // CHECK: [[TMP:%.*]] = add i32 [[SRC:%.*]], 1
25 // CHECK-NEXT: %{{.*}} = and i32 [[TMP]], [[SRC]]
26 return __blcfill_u32(a);
27 }
28
test__blcfill_u64(unsigned long long a)29 unsigned long long test__blcfill_u64(unsigned long long a) {
30 // CHECK: [[TMPT:%.*]] = add i64 [[SRC:%.*]], 1
31 // CHECK-NEXT: %{{.*}} = and i64 [[TMP]], [[SRC]]
32 return __blcfill_u64(a);
33 }
34
test__blci_u32(unsigned int a)35 unsigned int test__blci_u32(unsigned int a) {
36 // CHECK: [[TMP:%.*]] = sub i32 -2, [[SRC:%.*]]
37 // CHECK-NEXT: %{{.*}} = or i32 [[TMP]], [[SRC]]
38 return __blci_u32(a);
39 }
40
test__blci_u64(unsigned long long a)41 unsigned long long test__blci_u64(unsigned long long a) {
42 // CHECK: [[TMP:%.*]] = sub i64 -2, [[SRC:%.*]]
43 // CHECK-NEXT: %{{.*}} = or i64 [[TMP]], [[SRC]]
44 return __blci_u64(a);
45 }
46
test__blcic_u32(unsigned int a)47 unsigned int test__blcic_u32(unsigned int a) {
48 // CHECK: [[TMP1:%.*]] = xor i32 [[SRC:%.*]], -1
49 // CHECK-NEXT: [[TMP2:%.*]] = add i32 [[SRC]], 1
50 // CHECK-NEXT: {{.*}} = and i32 [[TMP2]], [[TMP1]]
51 return __blcic_u32(a);
52 }
53
test__blcic_u64(unsigned long long a)54 unsigned long long test__blcic_u64(unsigned long long a) {
55 // CHECK: [[TMP1:%.*]] = xor i64 [[SRC:%.*]], -1
56 // CHECK-NEXT: [[TMP2:%.*]] = add i64 [[SRC]], 1
57 // CHECK-NEXT: {{.*}} = and i64 [[TMP2]], [[TMP1]]
58 return __blcic_u64(a);
59 }
60
test__blcmsk_u32(unsigned int a)61 unsigned int test__blcmsk_u32(unsigned int a) {
62 // CHECK: [[TMP:%.*]] = add i32 [[SRC:%.*]], 1
63 // CHECK-NEXT: {{.*}} = xor i32 [[TMP]], [[SRC]]
64 return __blcmsk_u32(a);
65 }
66
test__blcmsk_u64(unsigned long long a)67 unsigned long long test__blcmsk_u64(unsigned long long a) {
68 // CHECK: [[TMP:%.*]] = add i64 [[SRC:%.*]], 1
69 // CHECK-NEXT: {{.*}} = xor i64 [[TMP]], [[SRC]]
70 return __blcmsk_u64(a);
71 }
72
test__blcs_u32(unsigned int a)73 unsigned int test__blcs_u32(unsigned int a) {
74 // CHECK: [[TMP:%.*]] = add i32 [[SRC:%.*]], 1
75 // CHECK-NEXT: {{.*}} = or i32 [[TMP]], [[SRC]]
76 return __blcs_u32(a);
77 }
78
test__blcs_u64(unsigned long long a)79 unsigned long long test__blcs_u64(unsigned long long a) {
80 // CHECK: [[TMP:%.*]] = add i64 [[SRC:%.*]], 1
81 // CHECK-NEXT: {{.*}} = or i64 [[TMP]], [[SRC]]
82 return __blcs_u64(a);
83 }
84
test__blsfill_u32(unsigned int a)85 unsigned int test__blsfill_u32(unsigned int a) {
86 // CHECK: [[TMP:%.*]] = add i32 [[SRC:%.*]], -1
87 // CHECK-NEXT: {{.*}} = or i32 [[TMP]], [[SRC]]
88 return __blsfill_u32(a);
89 }
90
test__blsfill_u64(unsigned long long a)91 unsigned long long test__blsfill_u64(unsigned long long a) {
92 // CHECK: [[TMP:%.*]] = add i64 [[SRC:%.*]], -1
93 // CHECK-NEXT: {{.*}} = or i64 [[TMP]], [[SRC]]
94 return __blsfill_u64(a);
95 }
96
test__blsic_u32(unsigned int a)97 unsigned int test__blsic_u32(unsigned int a) {
98 // CHECK: [[TMP1:%.*]] = xor i32 [[SRC:%.*]], -1
99 // CHECK-NEXT: [[TMP2:%.*]] = add i32 [[SRC:%.*]], -1
100 // CHECK-NEXT: {{.*}} = or i32 [[TMP2]], [[TMP1]]
101 return __blsic_u32(a);
102 }
103
test__blsic_u64(unsigned long long a)104 unsigned long long test__blsic_u64(unsigned long long a) {
105 // CHECK: [[TMP1:%.*]] = xor i64 [[SRC:%.*]], -1
106 // CHECK-NEXT: [[TMP2:%.*]] = add i64 [[SRC:%.*]], -1
107 // CHECK-NEXT: {{.*}} = or i64 [[TMP2]], [[TMP1]]
108 return __blsic_u64(a);
109 }
110
test__t1mskc_u32(unsigned int a)111 unsigned int test__t1mskc_u32(unsigned int a) {
112 // CHECK: [[TMP1:%.*]] = xor i32 [[SRC:%.*]], -1
113 // CHECK-NEXT: [[TMP2:%.*]] = add i32 [[SRC:%.*]], 1
114 // CHECK-NEXT: {{.*}} = or i32 [[TMP2]], [[TMP1]]
115 return __t1mskc_u32(a);
116 }
117
test__t1mskc_u64(unsigned long long a)118 unsigned long long test__t1mskc_u64(unsigned long long a) {
119 // CHECK: [[TMP1:%.*]] = xor i64 [[SRC:%.*]], -1
120 // CHECK-NEXT: [[TMP2:%.*]] = add i64 [[SRC:%.*]], 1
121 // CHECK-NEXT: {{.*}} = or i64 [[TMP2]], [[TMP1]]
122 return __t1mskc_u64(a);
123 }
124
test__tzmsk_u32(unsigned int a)125 unsigned int test__tzmsk_u32(unsigned int a) {
126 // CHECK: [[TMP1:%.*]] = xor i32 [[SRC:%.*]], -1
127 // CHECK-NEXT: [[TMP2:%.*]] = add i32 [[SRC:%.*]], -1
128 // CHECK-NEXT: {{.*}} = and i32 [[TMP2]], [[TMP1]]
129 return __tzmsk_u32(a);
130 }
131
test__tzmsk_u64(unsigned long long a)132 unsigned long long test__tzmsk_u64(unsigned long long a) {
133 // CHECK: [[TMP1:%.*]] = xor i64 [[SRC:%.*]], -1
134 // CHECK-NEXT: [[TMP2:%.*]] = add i64 [[SRC:%.*]], -1
135 // CHECK-NEXT: {{.*}} = and i64 [[TMP2]], [[TMP1]]
136 return __tzmsk_u64(a);
137 }
138