1# NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
2# RUN: llc -mtriple aarch64-unknown-unknown -run-pass=instruction-select -verify-machineinstrs %s -o - | FileCheck %s
3#
4# Check folding an AND into a G_BRCOND which has been matched as a TB(N)Z.
5...
6---
7name:            fold_and_rhs
8alignment:       4
9legalized:       true
10regBankSelected: true
11body:             |
12  ; CHECK-LABEL: name: fold_and_rhs
13  ; CHECK: bb.0:
14  ; CHECK:   successors: %bb.0(0x40000000), %bb.1(0x40000000)
15  ; CHECK:   %copy:gpr64all = COPY $x0
16  ; CHECK:   [[COPY:%[0-9]+]]:gpr32all = COPY %copy.sub_32
17  ; CHECK:   [[COPY1:%[0-9]+]]:gpr32 = COPY [[COPY]]
18  ; CHECK:   TBNZW [[COPY1]], 3, %bb.1
19  ; CHECK:   B %bb.0
20  ; CHECK: bb.1:
21  ; CHECK:   RET_ReallyLR
22  bb.0:
23    successors: %bb.0, %bb.1
24    liveins: $x0
25    %copy:gpr(s64) = COPY $x0
26    %bit:gpr(s64) = G_CONSTANT i64 8
27    %zero:gpr(s64) = G_CONSTANT i64 0
28    %fold_cst:gpr(s64) = G_CONSTANT i64 8
29
30    ; tbnz (and x, 8), 3 == tbnz x, 3 because the third bit of x & 8 is 1 when
31    ; the third bit of x is 1.
32    %fold_me:gpr(s64) = G_AND %copy, %fold_cst
33
34    %and:gpr(s64) = G_AND %fold_me, %bit
35    %cmp:gpr(s32) = G_ICMP intpred(ne), %and(s64), %zero
36    %cmp_trunc:gpr(s1) = G_TRUNC %cmp(s32)
37    G_BRCOND %cmp_trunc(s1), %bb.1
38    G_BR %bb.0
39  bb.1:
40    RET_ReallyLR
41...
42---
43name:            fold_and_lhs
44alignment:       4
45legalized:       true
46regBankSelected: true
47body:             |
48  ; CHECK-LABEL: name: fold_and_lhs
49  ; CHECK: bb.0:
50  ; CHECK:   successors: %bb.0(0x40000000), %bb.1(0x40000000)
51  ; CHECK:   %copy:gpr64all = COPY $x0
52  ; CHECK:   [[COPY:%[0-9]+]]:gpr32all = COPY %copy.sub_32
53  ; CHECK:   [[COPY1:%[0-9]+]]:gpr32 = COPY [[COPY]]
54  ; CHECK:   TBNZW [[COPY1]], 3, %bb.1
55  ; CHECK:   B %bb.0
56  ; CHECK: bb.1:
57  ; CHECK:   RET_ReallyLR
58  bb.0:
59    successors: %bb.0, %bb.1
60    liveins: $x0
61    %copy:gpr(s64) = COPY $x0
62    %bit:gpr(s64) = G_CONSTANT i64 8
63    %zero:gpr(s64) = G_CONSTANT i64 0
64    %fold_cst:gpr(s64) = G_CONSTANT i64 8
65
66    ; Same as above, but with the constant on the other side.
67    %fold_me:gpr(s64) = G_AND %fold_cst, %copy
68
69    %and:gpr(s64) = G_AND %fold_me, %bit
70    %cmp:gpr(s32) = G_ICMP intpred(ne), %and(s64), %zero
71    %cmp_trunc:gpr(s1) = G_TRUNC %cmp(s32)
72    G_BRCOND %cmp_trunc(s1), %bb.1
73    G_BR %bb.0
74  bb.1:
75    RET_ReallyLR
76...
77---
78name:            dont_fold_and
79alignment:       4
80legalized:       true
81regBankSelected: true
82body:             |
83  ; CHECK-LABEL: name: dont_fold_and
84  ; CHECK: bb.0:
85  ; CHECK:   successors: %bb.0(0x40000000), %bb.1(0x40000000)
86  ; CHECK:   %copy:gpr64 = COPY $x0
87  ; CHECK:   %fold_me:gpr64sp = ANDXri %copy, 4098
88  ; CHECK:   [[COPY:%[0-9]+]]:gpr32all = COPY %fold_me.sub_32
89  ; CHECK:   [[COPY1:%[0-9]+]]:gpr32 = COPY [[COPY]]
90  ; CHECK:   TBNZW [[COPY1]], 3, %bb.1
91  ; CHECK:   B %bb.0
92  ; CHECK: bb.1:
93  ; CHECK:   RET_ReallyLR
94  bb.0:
95    successors: %bb.0, %bb.1
96    liveins: $x0
97    %copy:gpr(s64) = COPY $x0
98    %bit:gpr(s64) = G_CONSTANT i64 8
99    %zero:gpr(s64) = G_CONSTANT i64 0
100
101    ; tbnz (and x, 7), 3 != tbnz x, 3, because the third bit of x & 7 is always
102    ; zero.
103    %fold_cst:gpr(s64) = G_CONSTANT i64 7
104
105    %fold_me:gpr(s64) = G_AND %copy, %fold_cst
106    %and:gpr(s64) = G_AND %fold_me, %bit
107    %cmp:gpr(s32) = G_ICMP intpred(ne), %and(s64), %zero
108    %cmp_trunc:gpr(s1) = G_TRUNC %cmp(s32)
109    G_BRCOND %cmp_trunc(s1), %bb.1
110    G_BR %bb.0
111  bb.1:
112    RET_ReallyLR
113