1; RUN: llc -march=amdgcn -verify-machineinstrs < %s | FileCheck -check-prefix=SI %s
2; RUN: llc -march=amdgcn -mcpu=tonga -mattr=-flat-for-global -verify-machineinstrs < %s | FileCheck -check-prefix=SI %s
3
4; SI-LABEL: {{^}}test_i64_eq:
5; SI: v_cmp_eq_u64
6define amdgpu_kernel void @test_i64_eq(i32 addrspace(1)* %out, i64 %a, i64 %b) nounwind {
7  %cmp = icmp eq i64 %a, %b
8  %result = sext i1 %cmp to i32
9  store i32 %result, i32 addrspace(1)* %out, align 4
10  ret void
11}
12
13; SI-LABEL: {{^}}test_i64_ne:
14; SI: v_cmp_ne_u64
15define amdgpu_kernel void @test_i64_ne(i32 addrspace(1)* %out, i64 %a, i64 %b) nounwind {
16  %cmp = icmp ne i64 %a, %b
17  %result = sext i1 %cmp to i32
18  store i32 %result, i32 addrspace(1)* %out, align 4
19  ret void
20}
21
22; SI-LABEL: {{^}}test_i64_slt:
23; SI: v_cmp_lt_i64
24define amdgpu_kernel void @test_i64_slt(i32 addrspace(1)* %out, i64 %a, i64 %b) nounwind {
25  %cmp = icmp slt i64 %a, %b
26  %result = sext i1 %cmp to i32
27  store i32 %result, i32 addrspace(1)* %out, align 4
28  ret void
29}
30
31; SI-LABEL: {{^}}test_i64_ult:
32; SI: v_cmp_lt_u64
33define amdgpu_kernel void @test_i64_ult(i32 addrspace(1)* %out, i64 %a, i64 %b) nounwind {
34  %cmp = icmp ult i64 %a, %b
35  %result = sext i1 %cmp to i32
36  store i32 %result, i32 addrspace(1)* %out, align 4
37  ret void
38}
39
40; SI-LABEL: {{^}}test_i64_sle:
41; SI: v_cmp_le_i64
42define amdgpu_kernel void @test_i64_sle(i32 addrspace(1)* %out, i64 %a, i64 %b) nounwind {
43  %cmp = icmp sle i64 %a, %b
44  %result = sext i1 %cmp to i32
45  store i32 %result, i32 addrspace(1)* %out, align 4
46  ret void
47}
48
49; SI-LABEL: {{^}}test_i64_ule:
50; SI: v_cmp_le_u64
51define amdgpu_kernel void @test_i64_ule(i32 addrspace(1)* %out, i64 %a, i64 %b) nounwind {
52  %cmp = icmp ule i64 %a, %b
53  %result = sext i1 %cmp to i32
54  store i32 %result, i32 addrspace(1)* %out, align 4
55  ret void
56}
57
58; SI-LABEL: {{^}}test_i64_sgt:
59; SI: v_cmp_gt_i64
60define amdgpu_kernel void @test_i64_sgt(i32 addrspace(1)* %out, i64 %a, i64 %b) nounwind {
61  %cmp = icmp sgt i64 %a, %b
62  %result = sext i1 %cmp to i32
63  store i32 %result, i32 addrspace(1)* %out, align 4
64  ret void
65}
66
67; SI-LABEL: {{^}}test_i64_ugt:
68; SI: v_cmp_gt_u64
69define amdgpu_kernel void @test_i64_ugt(i32 addrspace(1)* %out, i64 %a, i64 %b) nounwind {
70  %cmp = icmp ugt i64 %a, %b
71  %result = sext i1 %cmp to i32
72  store i32 %result, i32 addrspace(1)* %out, align 4
73  ret void
74}
75
76; SI-LABEL: {{^}}test_i64_sge:
77; SI: v_cmp_ge_i64
78define amdgpu_kernel void @test_i64_sge(i32 addrspace(1)* %out, i64 %a, i64 %b) nounwind {
79  %cmp = icmp sge i64 %a, %b
80  %result = sext i1 %cmp to i32
81  store i32 %result, i32 addrspace(1)* %out, align 4
82  ret void
83}
84
85; SI-LABEL: {{^}}test_i64_uge:
86; SI: v_cmp_ge_u64
87define amdgpu_kernel void @test_i64_uge(i32 addrspace(1)* %out, i64 %a, i64 %b) nounwind {
88  %cmp = icmp uge i64 %a, %b
89  %result = sext i1 %cmp to i32
90  store i32 %result, i32 addrspace(1)* %out, align 4
91  ret void
92}
93
94