1# NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
2# RUN: llc -mtriple=amdgcn--amdpal -mcpu=gfx900 -run-pass simple-register-coalescing -verify-machineinstrs -o - %s | FileCheck -check-prefix GCN %s
3#
4---
5name:            _amdgpu_ps_main
6alignment:       1
7tracksRegLiveness: true
8registers:
9  - { id: 0, class: sgpr_128 }
10  - { id: 1, class: sreg_32_xm0, preferred-register: '%2' }
11  - { id: 2, class: sreg_32_xm0, preferred-register: '%1' }
12machineFunctionInfo:
13  argumentInfo:
14    privateSegmentBuffer: { reg: '$sgpr0_sgpr1_sgpr2_sgpr3' }
15    privateSegmentWaveByteOffset: { reg: '$sgpr33' }
16body:             |
17  ; GCN-LABEL: name: _amdgpu_ps_main
18  ; GCN: bb.0:
19  ; GCN:   successors: %bb.1(0x80000000)
20  ; GCN:   %3:vgpr_32 = nofpexcept V_TRUNC_F32_e32 undef %4:vgpr_32, implicit $mode, implicit $exec
21  ; GCN:   %5:vgpr_32 = nofpexcept V_CVT_U32_F32_e32 %3, implicit $mode, implicit $exec
22  ; GCN:   [[V_LSHRREV_B32_e32_:%[0-9]+]]:vgpr_32 = V_LSHRREV_B32_e32 4, %5, implicit $exec
23  ; GCN:   undef %11.sub0:vreg_128 = V_MUL_LO_I32 [[V_LSHRREV_B32_e32_]], 3, implicit $exec
24  ; GCN:   %11.sub3:vreg_128 = COPY %11.sub0
25  ; GCN:   [[S_MOV_B32_:%[0-9]+]]:sreg_32_xm0 = S_MOV_B32 0
26  ; GCN: bb.1:
27  ; GCN:   successors: %bb.1(0x7c000000), %bb.2(0x04000000)
28  ; GCN:   [[COPY:%[0-9]+]]:vreg_128 = COPY %11
29  ; GCN:   %11.sub3:vreg_128 = V_ADD_U32_e32 target-flags(amdgpu-rel32-lo) 1, [[COPY]].sub3, implicit $exec
30  ; GCN:   [[S_ADD_I32_:%[0-9]+]]:sreg_32_xm0 = S_ADD_I32 [[S_ADD_I32_]], 1, implicit-def dead $scc
31  ; GCN:   S_CMP_LT_U32 [[S_ADD_I32_]], 3, implicit-def $scc
32  ; GCN:   S_CBRANCH_SCC1 %bb.1, implicit killed $scc
33  ; GCN:   S_BRANCH %bb.2
34  ; GCN: bb.2:
35  ; GCN:   successors: %bb.5(0x40000000), %bb.3(0x40000000)
36  ; GCN:   S_CBRANCH_SCC1 %bb.5, implicit undef $scc
37  ; GCN:   S_BRANCH %bb.3
38  ; GCN: bb.3:
39  ; GCN:   successors: %bb.4(0x80000000)
40  ; GCN:   dead %16:vreg_128 = BUFFER_LOAD_FORMAT_XYZW_IDXEN [[COPY]].sub3, undef %17:sgpr_128, 0, 0, 0, 0, 0, 0, 0, implicit $exec :: (dereferenceable load 16 from constant-pool, align 1, addrspace 4)
41  ; GCN:   dead %18:vgpr_32 = V_MOV_B32_e32 -1, implicit $exec
42  ; GCN:   [[S_AND_B64_:%[0-9]+]]:sreg_64 = S_AND_B64 $exec, -1, implicit-def dead $scc
43  ; GCN:   dead %20:vgpr_32 = V_MOV_B32_e32 0, implicit $exec
44  ; GCN: bb.4:
45  ; GCN:   successors: %bb.4(0x7c000000), %bb.6(0x04000000)
46  ; GCN:   $vcc = COPY [[S_AND_B64_]]
47  ; GCN:   S_CBRANCH_VCCNZ %bb.4, implicit killed $vcc
48  ; GCN:   S_BRANCH %bb.6
49  ; GCN: bb.5:
50  ; GCN:   %21:vgpr_32 = nofpexcept V_MUL_F32_e32 target-flags(amdgpu-gotprel) 0, %11.sub0, implicit $mode, implicit $exec
51  ; GCN:   %22:vgpr_32 = nofpexcept V_MIN_F32_e32 1106771968, %21, implicit $mode, implicit $exec
52  ; GCN:   %23:vgpr_32 = nnan arcp contract reassoc nofpexcept V_MAD_F32 0, %22, 0, 0, 0, 0, 0, 0, implicit $mode, implicit $exec
53  ; GCN:   %24:vgpr_32 = nnan arcp contract reassoc nofpexcept V_MAD_F32 0, %23, 0, 0, 0, 0, 0, 0, implicit $mode, implicit $exec
54  ; GCN:   %25:vgpr_32 = nofpexcept V_MAD_F32 0, %24, 0, 0, 0, 0, 0, 0, implicit $mode, implicit $exec
55  ; GCN:   %26:vgpr_32 = nofpexcept V_CVT_PKRTZ_F16_F32_e64 0, %25, 0, undef %27:vgpr_32, 0, 0, implicit $mode, implicit $exec
56  ; GCN:   EXP_DONE 0, %26, undef %28:vgpr_32, undef %29:vgpr_32, undef %30:vgpr_32, -1, -1, 15, implicit $exec
57  ; GCN:   S_ENDPGM 0
58  ; GCN: bb.6:
59  ; GCN:   S_ENDPGM 0
60  bb.0:
61    %10:vgpr_32 = nofpexcept V_TRUNC_F32_e32 undef %11:vgpr_32, implicit $mode, implicit $exec
62    %12:vgpr_32 = nofpexcept V_CVT_U32_F32_e32 killed %10, implicit $mode, implicit $exec
63    %50:vgpr_32 = V_LSHRREV_B32_e32 4, killed %12, implicit $exec
64    %51:vgpr_32 = V_MUL_LO_I32 killed %50, 3, implicit $exec
65    undef %52.sub0:vreg_128 = COPY %51
66    %52.sub3:vreg_128 = COPY %51
67    %9:sreg_32_xm0 = S_MOV_B32 0
68    %70:sreg_32_xm0 = COPY killed %9
69    %71:vreg_128 = COPY killed %52
70
71  bb.1:
72    successors: %bb.1(0x7c000000), %bb.2(0x04000000)
73
74    %53:vreg_128 = COPY killed %71
75    %1:sreg_32_xm0 = COPY killed %70
76    %57:vgpr_32 = V_ADD_U32_e32 target-flags(amdgpu-rel32-lo) 1, %53.sub3, implicit $exec
77    %55:vreg_128 = COPY %53
78    %55.sub3:vreg_128 = COPY killed %57
79    %2:sreg_32_xm0 = S_ADD_I32 killed %1, 1, implicit-def dead $scc
80    S_CMP_LT_U32 %2, 3, implicit-def $scc
81    %54:vreg_128 = COPY %55
82    %70:sreg_32_xm0 = COPY killed %2
83    %71:vreg_128 = COPY killed %54
84    S_CBRANCH_SCC1 %bb.1, implicit killed $scc
85    S_BRANCH %bb.2
86
87  bb.2:
88    S_CBRANCH_SCC1 %bb.5, implicit undef $scc
89    S_BRANCH %bb.3
90
91  bb.3:
92    dead %22:vreg_128 = BUFFER_LOAD_FORMAT_XYZW_IDXEN killed %53.sub3, undef %24:sgpr_128, 0, 0, 0, 0, 0, 0, 0, implicit $exec :: (dereferenceable load 16 from constant-pool, align 1, addrspace 4)
93    dead %60:vgpr_32 = V_MOV_B32_e32 -1, implicit $exec
94    %36:sreg_64 = S_AND_B64 $exec, -1, implicit-def dead $scc
95    dead %67:vgpr_32 = V_MOV_B32_e32 0, implicit $exec
96
97  bb.4:
98    successors: %bb.4(0x7c000000), %bb.6(0x04000000)
99
100    $vcc = COPY %36
101    S_CBRANCH_VCCNZ %bb.4, implicit killed $vcc
102    S_BRANCH %bb.6
103
104  bb.5:
105    %39:vgpr_32 = nofpexcept V_MUL_F32_e32 target-flags(amdgpu-gotprel) 0, killed %55.sub0, implicit $mode, implicit $exec
106    %41:vgpr_32 = nofpexcept V_MIN_F32_e32 1106771968, killed %39, implicit $mode, implicit $exec
107    %42:vgpr_32 = nnan arcp contract reassoc nofpexcept V_MAD_F32 0, killed %41, 0, 0, 0, 0, 0, 0, implicit $mode, implicit $exec
108    %43:vgpr_32 = nnan arcp contract reassoc nofpexcept V_MAD_F32 0, killed %42, 0, 0, 0, 0, 0, 0, implicit $mode, implicit $exec
109    %44:vgpr_32 = nofpexcept V_MAD_F32 0, killed %43, 0, 0, 0, 0, 0, 0, implicit $mode, implicit $exec
110    %45:vgpr_32 = nofpexcept V_CVT_PKRTZ_F16_F32_e64 0, killed %44, 0, undef %46:vgpr_32, 0, 0, implicit $mode, implicit $exec
111    EXP_DONE 0, killed %45, undef %47:vgpr_32, undef %48:vgpr_32, undef %49:vgpr_32, -1, -1, 15, implicit $exec
112    S_ENDPGM 0
113
114  bb.6:
115    S_ENDPGM 0
116
117...
118