1# NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py 2# RUN: llc -mtriple=thumbv8.1m.main-none-eabi -mattr=+lob -run-pass=arm-mve-vpt-opts --verify-machineinstrs %s -o - | FileCheck %s 3 4--- | 5 @d = local_unnamed_addr global i32 0, align 4 6 @c = local_unnamed_addr global [1 x i32] zeroinitializer, align 4 7 8 define i32 @e() optsize { 9 entry: 10 %.pr = load i32, i32* @d, align 4 11 %cmp13 = icmp sgt i32 %.pr, -1 12 br i1 %cmp13, label %for.cond1.preheader.preheader, label %for.end9 13 14 for.cond1.preheader.preheader: ; preds = %entry 15 %0 = add i32 %.pr, 1 16 %1 = call i32 @llvm.start.loop.iterations.i32(i32 %0) 17 br label %for.cond1.preheader 18 19 for.cond1.preheader: ; preds = %for.cond1.preheader.preheader, %for.cond1.preheader 20 %2 = phi i32 [ %1, %for.cond1.preheader.preheader ], [ %3, %for.cond1.preheader ] 21 call void @llvm.memset.p0i8.i32(i8* nonnull align 4 dereferenceable(24) bitcast ([1 x i32]* @c to i8*), i8 0, i32 24, i1 false) 22 %3 = call i32 @llvm.loop.decrement.reg.i32(i32 %2, i32 1) 23 %4 = icmp ne i32 %3, 0 24 br i1 %4, label %for.cond1.preheader, label %for.cond.for.end9_crit_edge 25 26 for.cond.for.end9_crit_edge: ; preds = %for.cond1.preheader 27 store i32 -1, i32* @d, align 4 28 br label %for.end9 29 30 for.end9: ; preds = %for.cond.for.end9_crit_edge, %entry 31 ret i32 undef 32 } 33 34 declare void @llvm.memset.p0i8.i32(i8* nocapture writeonly, i8, i32, i1 immarg) 35 declare i32 @llvm.start.loop.iterations.i32(i32) 36 declare i32 @llvm.loop.decrement.reg.i32(i32, i32) 37 38... 39--- 40name: e 41alignment: 2 42exposesReturnsTwice: false 43legalized: false 44regBankSelected: false 45selected: false 46failedISel: false 47tracksRegLiveness: true 48hasWinCFI: false 49registers: 50 - { id: 0, class: gprnopc, preferred-register: '' } 51 - { id: 1, class: gpr, preferred-register: '' } 52 - { id: 2, class: gprlr, preferred-register: '' } 53 - { id: 3, class: gpr, preferred-register: '' } 54 - { id: 4, class: rgpr, preferred-register: '' } 55 - { id: 5, class: rgpr, preferred-register: '' } 56 - { id: 6, class: gprlr, preferred-register: '' } 57 - { id: 7, class: rgpr, preferred-register: '' } 58 - { id: 8, class: rgpr, preferred-register: '' } 59 - { id: 9, class: gprlr, preferred-register: '' } 60 - { id: 10, class: gprlr, preferred-register: '' } 61 - { id: 11, class: rgpr, preferred-register: '' } 62 - { id: 12, class: rgpr, preferred-register: '' } 63 - { id: 13, class: gpr, preferred-register: '' } 64liveins: [] 65body: | 66 ; CHECK-LABEL: name: e 67 ; CHECK: bb.0.entry: 68 ; CHECK: successors: %bb.1(0x50000000), %bb.4(0x30000000) 69 ; CHECK: [[t2MOVi32imm:%[0-9]+]]:rgpr = t2MOVi32imm @d 70 ; CHECK: [[t2LDRi12_:%[0-9]+]]:gprnopc = t2LDRi12 [[t2MOVi32imm]], 0, 14 /* CC::al */, $noreg :: (dereferenceable load 4 from @d) 71 ; CHECK: t2CMPri [[t2LDRi12_]], 0, 14 /* CC::al */, $noreg, implicit-def $cpsr 72 ; CHECK: t2Bcc %bb.4, 4 /* CC::mi */, $cpsr 73 ; CHECK: t2B %bb.1, 14 /* CC::al */, $noreg 74 ; CHECK: bb.1.for.cond1.preheader.preheader: 75 ; CHECK: successors: %bb.2(0x80000000) 76 ; CHECK: [[t2ADDri:%[0-9]+]]:rgpr = t2ADDri [[t2LDRi12_]], 1, 14 /* CC::al */, $noreg, $noreg 77 ; CHECK: [[tMOVr:%[0-9]+]]:gprlr = tMOVr killed [[t2ADDri]], 14 /* CC::al */, $noreg 78 ; CHECK: [[COPY:%[0-9]+]]:gpr = COPY [[tMOVr]] 79 ; CHECK: [[t2MOVi32imm1:%[0-9]+]]:rgpr = t2MOVi32imm @c 80 ; CHECK: [[t2MOVi:%[0-9]+]]:rgpr = t2MOVi 24, 14 /* CC::al */, $noreg, $noreg 81 ; CHECK: bb.2.for.cond1.preheader: 82 ; CHECK: successors: %bb.2(0x7c000000), %bb.3(0x04000000) 83 ; CHECK: [[PHI:%[0-9]+]]:gprlr = PHI [[COPY]], %bb.1, %3, %bb.2 84 ; CHECK: ADJCALLSTACKDOWN 0, 0, 14 /* CC::al */, $noreg, implicit-def dead $sp, implicit $sp 85 ; CHECK: $r0 = COPY [[t2MOVi32imm1]] 86 ; CHECK: $r1 = COPY [[t2MOVi]] 87 ; CHECK: tBL 14 /* CC::al */, $noreg, &__aeabi_memclr4, csr_aapcs, implicit-def dead $lr, implicit $sp, implicit $r0, implicit $r1, implicit-def $sp 88 ; CHECK: ADJCALLSTACKUP 0, 0, 14 /* CC::al */, $noreg, implicit-def dead $sp, implicit $sp 89 ; CHECK: [[t2SUBri:%[0-9]+]]:gprlr = t2SUBri [[PHI]], 1, 14 /* CC::al */, $noreg, $noreg 90 ; CHECK: [[COPY1:%[0-9]+]]:gpr = COPY [[t2SUBri]] 91 ; CHECK: t2CMPri [[t2SUBri]], 0, 14 /* CC::al */, $noreg, implicit-def $cpsr 92 ; CHECK: t2Bcc %bb.2, 1 /* CC::ne */, $cpsr 93 ; CHECK: t2B %bb.3, 14 /* CC::al */, $noreg 94 ; CHECK: bb.3.for.cond.for.end9_crit_edge: 95 ; CHECK: successors: %bb.4(0x80000000) 96 ; CHECK: [[t2MOVi1:%[0-9]+]]:rgpr = t2MOVi -1, 14 /* CC::al */, $noreg, $noreg 97 ; CHECK: t2STRi12 killed [[t2MOVi1]], [[t2MOVi32imm]], 0, 14 /* CC::al */, $noreg :: (store 4 into @d) 98 ; CHECK: bb.4.for.end9: 99 ; CHECK: [[DEF:%[0-9]+]]:gpr = IMPLICIT_DEF 100 ; CHECK: $r0 = COPY [[DEF]] 101 ; CHECK: tBX_RET 14 /* CC::al */, $noreg, implicit $r0 102 bb.0.entry: 103 successors: %bb.1(0x50000000), %bb.4(0x30000000) 104 105 %4:rgpr = t2MOVi32imm @d 106 %0:gprnopc = t2LDRi12 %4, 0, 14 /* CC::al */, $noreg :: (dereferenceable load 4 from @d) 107 t2CMPri %0, 0, 14 /* CC::al */, $noreg, implicit-def $cpsr 108 t2Bcc %bb.4, 4 /* CC::mi */, $cpsr 109 t2B %bb.1, 14 /* CC::al */, $noreg 110 111 bb.1.for.cond1.preheader.preheader: 112 successors: %bb.2(0x80000000) 113 114 %5:rgpr = t2ADDri %0, 1, 14 /* CC::al */, $noreg, $noreg 115 %6:gprlr = t2DoLoopStart killed %5 116 %1:gpr = COPY %6 117 %7:rgpr = t2MOVi32imm @c 118 %8:rgpr = t2MOVi 24, 14 /* CC::al */, $noreg, $noreg 119 120 bb.2.for.cond1.preheader: 121 successors: %bb.2(0x7c000000), %bb.3(0x04000000) 122 123 %2:gprlr = PHI %1, %bb.1, %3, %bb.2 124 ADJCALLSTACKDOWN 0, 0, 14 /* CC::al */, $noreg, implicit-def dead $sp, implicit $sp 125 $r0 = COPY %7 126 $r1 = COPY %8 127 tBL 14 /* CC::al */, $noreg, &__aeabi_memclr4, csr_aapcs, implicit-def dead $lr, implicit $sp, implicit $r0, implicit $r1, implicit-def $sp 128 ADJCALLSTACKUP 0, 0, 14 /* CC::al */, $noreg, implicit-def dead $sp, implicit $sp 129 %9:gprlr = t2LoopDec %2, 1 130 %3:gpr = COPY %9 131 t2LoopEnd %9, %bb.2, implicit-def dead $cpsr 132 t2B %bb.3, 14 /* CC::al */, $noreg 133 134 bb.3.for.cond.for.end9_crit_edge: 135 successors: %bb.4(0x80000000) 136 137 %12:rgpr = t2MOVi -1, 14 /* CC::al */, $noreg, $noreg 138 t2STRi12 killed %12, %4, 0, 14 /* CC::al */, $noreg :: (store 4 into @d) 139 140 bb.4.for.end9: 141 %13:gpr = IMPLICIT_DEF 142 $r0 = COPY %13 143 tBX_RET 14 /* CC::al */, $noreg, implicit $r0 144 145... 146