1# NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
2# RUN: llc -mtriple=amdgcn-mesa-mesa3d -run-pass=si-optimize-exec-masking-pre-ra -verify-machineinstrs %s -o - | FileCheck -check-prefix=GCN %s
3
4# Check for regression from assuming an instruction was a copy after
5# dropping the opcode check.
6---
7name: exec_src1_is_not_copy
8tracksRegLiveness: true
9machineFunctionInfo:
10  isEntryFunction: true
11  scratchRSrcReg:  '$sgpr96_sgpr97_sgpr98_sgpr99'
12  frameOffsetReg:  '$sgpr101'
13body:             |
14  ; GCN-LABEL: name: exec_src1_is_not_copy
15  ; GCN: bb.0:
16  ; GCN:   successors: %bb.1(0x40000000), %bb.2(0x40000000)
17  ; GCN:   liveins: $vgpr0
18  ; GCN:   [[COPY:%[0-9]+]]:sreg_64 = COPY $exec
19  ; GCN:   [[DEF:%[0-9]+]]:vgpr_32 = IMPLICIT_DEF
20  ; GCN:   [[V_CMP_NE_U32_e64_:%[0-9]+]]:sreg_64 = V_CMP_NE_U32_e64 0, [[DEF]], implicit $exec
21  ; GCN:   [[COPY1:%[0-9]+]]:sreg_64 = COPY $exec, implicit-def $exec
22  ; GCN:   [[S_AND_B64_:%[0-9]+]]:sreg_64 = S_AND_B64 [[COPY1]], [[V_CMP_NE_U32_e64_]], implicit-def dead $scc
23  ; GCN:   [[S_XOR_B64_:%[0-9]+]]:sreg_64 = S_XOR_B64 [[S_AND_B64_]], [[COPY1]], implicit-def dead $scc
24  ; GCN:   $exec = S_MOV_B64_term [[S_AND_B64_]]
25  ; GCN:   SI_MASK_BRANCH %bb.2, implicit $exec
26  ; GCN:   S_BRANCH %bb.1
27  ; GCN: bb.1:
28  ; GCN:   successors: %bb.2(0x80000000)
29  ; GCN: bb.2:
30  ; GCN:   successors: %bb.3(0x40000000), %bb.6(0x40000000)
31  ; GCN:   [[S_OR_SAVEEXEC_B64_:%[0-9]+]]:sreg_64 = S_OR_SAVEEXEC_B64 [[S_XOR_B64_]], implicit-def $exec, implicit-def $scc, implicit $exec
32  ; GCN:   $exec = S_AND_B64 $exec, [[COPY]], implicit-def dead $scc
33  ; GCN:   [[S_AND_B64_1:%[0-9]+]]:sreg_64 = S_AND_B64 $exec, [[S_OR_SAVEEXEC_B64_]], implicit-def $scc
34  ; GCN:   $exec = S_XOR_B64_term $exec, [[S_AND_B64_1]], implicit-def $scc
35  ; GCN:   SI_MASK_BRANCH %bb.6, implicit $exec
36  ; GCN:   S_BRANCH %bb.3
37  ; GCN: bb.3:
38  ; GCN:   successors: %bb.4(0x40000000), %bb.5(0x40000000)
39  ; GCN:   [[V_CMP_NE_U32_e64_1:%[0-9]+]]:sreg_64 = V_CMP_NE_U32_e64 0, [[DEF]], implicit $exec
40  ; GCN:   [[COPY2:%[0-9]+]]:sreg_64 = COPY $exec, implicit-def $exec
41  ; GCN:   [[S_AND_B64_2:%[0-9]+]]:sreg_64 = S_AND_B64 [[COPY2]], [[V_CMP_NE_U32_e64_1]], implicit-def dead $scc
42  ; GCN:   $exec = S_MOV_B64_term [[S_AND_B64_2]]
43  ; GCN:   SI_MASK_BRANCH %bb.5, implicit $exec
44  ; GCN:   S_BRANCH %bb.4
45  ; GCN: bb.4:
46  ; GCN:   successors: %bb.5(0x80000000)
47  ; GCN: bb.5:
48  ; GCN:   successors: %bb.6(0x80000000)
49  ; GCN:   $exec = S_OR_B64 $exec, [[COPY2]], implicit-def $scc
50  ; GCN: bb.6:
51  ; GCN:   $exec = S_OR_B64 $exec, [[S_AND_B64_1]], implicit-def $scc
52  bb.0:
53    successors: %bb.1, %bb.2
54    liveins: $vgpr0
55
56    %0:sreg_64 = COPY $exec
57    %1:vgpr_32 = IMPLICIT_DEF
58    %2:sreg_64 = V_CMP_NE_U32_e64 0, %1, implicit $exec
59    %3:sreg_64 = COPY $exec, implicit-def $exec
60    %4:sreg_64 = S_AND_B64 %3, %2, implicit-def dead $scc
61    %5:sreg_64 = S_XOR_B64 %4, %3, implicit-def dead $scc
62    $exec = S_MOV_B64_term %4
63    SI_MASK_BRANCH %bb.2, implicit $exec
64    S_BRANCH %bb.1
65
66  bb.1:
67
68  bb.2:
69    successors: %bb.3, %bb.6
70
71    %6:sreg_64 = S_OR_SAVEEXEC_B64 %5, implicit-def $exec, implicit-def $scc, implicit $exec
72    $exec = S_AND_B64 $exec, %0, implicit-def dead $scc
73    %7:sreg_64 = S_AND_B64 $exec, %6, implicit-def $scc
74    $exec = S_XOR_B64_term $exec, %7, implicit-def $scc
75    SI_MASK_BRANCH %bb.6, implicit $exec
76    S_BRANCH %bb.3
77
78  bb.3:
79    successors: %bb.4, %bb.5
80
81    %8:sreg_64 = V_CMP_NE_U32_e64 0, %1, implicit $exec
82    %9:sreg_64 = COPY $exec, implicit-def $exec
83    %10:sreg_64 = S_AND_B64 %9, %8, implicit-def dead $scc
84    $exec = S_MOV_B64_term %10
85    SI_MASK_BRANCH %bb.5, implicit $exec
86    S_BRANCH %bb.4
87
88  bb.4:
89
90  bb.5:
91    $exec = S_OR_B64 $exec, %9, implicit-def $scc
92
93  bb.6:
94    $exec = S_OR_B64 $exec, %7, implicit-def $scc
95
96...
97
98# When folding a v_cndmask and a v_cmp in a pattern leading to
99# s_cbranch_vccz, ensure that an undef operand is handled correctly.
100---
101name: cndmask_cmp_cbranch_fold_undef
102tracksRegLiveness: true
103body:             |
104  ; GCN-LABEL: name: cndmask_cmp_cbranch_fold_undef
105  ; GCN: bb.0:
106  ; GCN:   successors: %bb.1(0x80000000)
107  ; GCN:   $vcc = S_ANDN2_B64 $exec, undef %1:sreg_64_xexec, implicit-def dead $scc
108  ; GCN:   S_CBRANCH_VCCZ %bb.1, implicit $vcc
109  ; GCN: bb.1:
110  bb.0:
111
112    %1:vgpr_32 = V_CNDMASK_B32_e64 0, 0, 0, 1, undef %0:sreg_64_xexec, implicit $exec
113    V_CMP_NE_U32_e32 1, %1, implicit-def $vcc, implicit $exec
114    $vcc = S_AND_B64 $exec, $vcc, implicit-def dead $scc
115    S_CBRANCH_VCCZ %bb.1, implicit $vcc
116
117  bb.1:
118
119...
120
121# Don't crash on exec copy to SGPR subregister.
122---
123name: exec_copy_to_subreg
124tracksRegLiveness: true
125body:             |
126  ; GCN-LABEL: name: exec_copy_to_subreg
127  ; GCN: bb.0:
128  ; GCN:   successors: %bb.1(0x80000000)
129  ; GCN:   dead undef %0.sub0:sgpr_256 = COPY $exec
130  ; GCN:   dead %1:vgpr_32 = V_CNDMASK_B32_e64 0, 0, 0, 1, undef %2:sreg_64_xexec, implicit $exec
131  ; GCN:   S_BRANCH %bb.1
132  ; GCN: bb.1:
133  bb.0:
134
135    undef %0.sub0:sgpr_256 = COPY $exec
136    %2:vgpr_32 = V_CNDMASK_B32_e64 0, 0, 0, 1, undef %1:sreg_64_xexec, implicit $exec
137    S_BRANCH %bb.1
138
139  bb.1:
140
141...
142