/external/llvm/test/CodeGen/ARM/ |
D | copy-cpsr.ll | 5 ; In the ARM backend, most compares are glued to their uses so CPSR can't 8 ; copying CPSR. 14 ; CPSR is used twice). 15 ; + We want both chains to write CPSR post-split (so that the copy can't be
|
D | avoid-cpsr-rmw.ll | 3 ; Avoid some 's' 16-bit instruction which partially update CPSR (and add false 4 ; dependency) when it isn't dependent on last CPSR defining instruction. 21 ; Avoid partial CPSR dependency via loop backedge. 54 ; Allow partial CPSR dependency when code size is the priority.
|
D | cse-call.ll | 5 ; Don't CSE a cmp across a call that clobbers CPSR.
|
D | crash-O0.ll | 7 ; This function would crash RegAllocFast because it tried to spill %CPSR.
|
D | interrupt-attr.ll | 13 ; Also need special function return setting pc and CPSR simultaneously.
|
D | sub-cmp-peephole.ll | 50 ; If CPSR is live-out, we can't remove cmp if there exists
|
/external/llvm/lib/Target/ARM/ |
D | Thumb2SizeReduction.cpp | 222 if (*Regs == ARM::CPSR) in HasImplicitCPSRDef() 270 if (Reg == 0 || Reg == ARM::CPSR) in canAddPseudoFlagDep() 350 if (Reg == 0 || Reg == ARM::CPSR) in VerifyLowRegs() 569 MI->getOperand(MCID.getNumOperands()-1).getReg() == ARM::CPSR) in ReduceSpecial() 720 HasCC = (MI->getOperand(NumOps-1).getReg() == ARM::CPSR); in ReduceTo2Addr() 787 if (!Reg || Reg == ARM::CPSR) in ReduceToNarrow() 815 HasCC = (MI->getOperand(NumOps-1).getReg() == ARM::CPSR); in ReduceToNarrow() 856 if (MO.isReg() && MO.isImplicit() && MO.getReg() == ARM::CPSR) in ReduceToNarrow() 880 if (MO.getReg() != ARM::CPSR) in UpdateCPSRDef() 895 if (MO.getReg() != ARM::CPSR) in UpdateCPSRUse() [all …]
|
D | Thumb2ITBlockPass.cpp | 88 if (Reg == ARM::CPSR) in TrackDefUses() 157 MI->getOperand(MCID.getNumOperands() - 1).getReg() == ARM::CPSR) in MoveCopyOutOfITBlock()
|
D | ARMBaseInstrInfo.cpp | 509 if ((MO.isRegMask() && MO.clobbersPhysReg(ARM::CPSR)) || in DefinesPredicate() 510 (MO.isReg() && MO.isDef() && MO.getReg() == ARM::CPSR)) { in DefinesPredicate() 521 if (MO.isReg() && MO.getReg() == ARM::CPSR && MO.isDef() && !MO.isDead()) in isCPSRDefined() 585 if (MO.getReg() != ARM::CPSR) in IsCPSRDead() 677 MIB.addReg(ARM::CPSR, RegState::Implicit | getKillRegState(KillSrc)); in copyFromCPSR() 699 MIB.addReg(ARM::CPSR, RegState::Implicit | RegState::Define); in copyToCPSR() 789 } else if (SrcReg == ARM::CPSR) { in copyPhysReg() 792 } else if (DestReg == ARM::CPSR) { in copyPhysReg() 1811 if (CC == ARMCC::AL || PredReg != ARM::CPSR) in commuteInstructionImpl() 2449 if (Instr.modifiesRegister(ARM::CPSR, TRI) || in optimizeCompareInstr() [all …]
|
D | ARMInstrInfo.td | 85 // SDTBinaryArithWithFlagsInOut - RES1, CPSR = op LHS, RHS, CPSR 1369 /// AdjustInstrPostInstrSelection after giving them an optional CPSR operand. 1370 let hasPostISelHook = 1, Defs = [CPSR] in { 1376 [(set GPR:$Rd, CPSR, (opnode GPR:$Rn, mod_imm:$imm))]>, 1381 [(set GPR:$Rd, CPSR, (opnode GPR:$Rn, GPR:$Rm))]>, 1388 [(set GPR:$Rd, CPSR, (opnode GPR:$Rn, 1395 [(set GPR:$Rd, CPSR, (opnode GPR:$Rn, 1403 let hasPostISelHook = 1, Defs = [CPSR] in { 1409 [(set GPR:$Rd, CPSR, (opnode mod_imm:$imm, GPR:$Rn))]>, 1415 [(set GPR:$Rd, CPSR, (opnode so_reg_imm:$shift, [all …]
|
D | ARMInstrThumb.td | 365 // tADDrSPi, but we may need to insert a sequence that clobbers CPSR. 369 let Defs = [CPSR]; 872 let isCommutable = 1, Uses = [CPSR] in 951 let isCompare = 1, Defs = [CPSR] in { 966 } // isCompare = 1, Defs = [CPSR] 969 let isCompare = 1, Defs = [CPSR] in { 998 } // isCompare = 1, Defs = [CPSR] 1060 (tMOVi8 tGPR:$Rdn, CPSR, imm0_255:$imm, 14, 0)>; 1076 let Defs = [CPSR] in 1157 let Uses = [CPSR] in [all …]
|
D | ARMFastISel.cpp | 214 bool DefinesOptionalPredicate(MachineInstr *MI, bool *CPSR); 228 bool ARMFastISel::DefinesOptionalPredicate(MachineInstr *MI, bool *CPSR) { in DefinesOptionalPredicate() argument 236 if (MO.getReg() == ARM::CPSR) in DefinesOptionalPredicate() 237 *CPSR = true; in DefinesOptionalPredicate() 274 bool CPSR = false; in AddOptionalDefs() local 275 if (DefinesOptionalPredicate(MI, &CPSR)) { in AddOptionalDefs() 276 if (CPSR) in AddOptionalDefs() 1277 .addMBB(TBB).addImm(ARMPred).addReg(ARM::CPSR); in SelectBranch() 1300 .addMBB(TBB).addImm(CCMode).addReg(ARM::CPSR); in SelectBranch() 1338 .addMBB(TBB).addImm(CCMode).addReg(ARM::CPSR); in SelectBranch() [all …]
|
D | ARMInstrThumb2.td | 576 /// changed to modify CPSR. 702 /// instruction modifies the CPSR register. 705 /// AdjustInstrPostInstrSelection after giving then an optional CPSR operand. 706 let hasPostISelHook = 1, Defs = [CPSR] in { 714 [(set rGPR:$Rd, CPSR, (opnode GPRnopc:$Rn, 720 [(set rGPR:$Rd, CPSR, (opnode GPRnopc:$Rn, 729 [(set rGPR:$Rd, CPSR, (opnode GPRnopc:$Rn, 737 let hasPostISelHook = 1, Defs = [CPSR] in { 743 [(set rGPR:$Rd, CPSR, (opnode t2_so_imm:$imm, 750 [(set rGPR:$Rd, CPSR, (opnode t2_so_reg:$ShiftedRm, [all …]
|
D | ARMMCInstLower.cpp | 74 if (MO.isImplicit() && MO.getReg() != ARM::CPSR) in lowerOperand()
|
D | ARM.td | 119 /// Some instructions update CPSR partially, which can add false dependency for 121 /// mapped to a separate physical register. Avoid partial CPSR update for these 125 "Avoid CPSR partial update for OOO execution">;
|
D | ARMBaseInstrInfo.h | 407 return MIB.addReg(ARM::CPSR, getDefRegState(true) | getDeadRegState(isDead));
|
D | ARMISelLowering.cpp | 3435 SDValue CCR = DAG.getRegister(ARM::CPSR, MVT::i32); in LowerXALUO() 3466 SDValue CCR = DAG.getRegister(ARM::CPSR, MVT::i32); in LowerSELECT() 3637 SDValue CCR = DAG.getRegister(ARM::CPSR, MVT::i32); in LowerSELECT_CC() 3663 SDValue CCR = DAG.getRegister(ARM::CPSR, MVT::i32); in LowerSELECT_CC() 3775 SDValue CCR = DAG.getRegister(ARM::CPSR, MVT::i32); in OptimizeVFPBrcond() 3819 SDValue CCR = DAG.getRegister(ARM::CPSR, MVT::i32); in LowerBR_CC() 3839 SDValue CCR = DAG.getRegister(ARM::CPSR, MVT::i32); in LowerBR_CC() 4277 SDValue CCR = DAG.getRegister(ARM::CPSR, MVT::i32); in LowerShiftRightParts() 4311 SDValue CCR = DAG.getRegister(ARM::CPSR, MVT::i32); in LowerShiftLeftParts() 7022 .addReg(ARM::CPSR, RegState::Define) in SetupEntryBlockForSjLj() [all …]
|
D | ARMRegisterInfo.td | 163 def CPSR : ARMReg<0, "cpsr">; 263 def CCR : RegisterClass<"ARM", [i32], 32, (add CPSR)> {
|
D | README-Thumb.txt | 226 to toggle the 's' bit since they do not set CPSR when they are inside IT blocks.
|
/external/v8/src/arm/ |
D | constants-arm.h | 234 CPSR = 0 << 22, enumerator 257 CPSR_c = CPSR | 1 << 16, 258 CPSR_x = CPSR | 1 << 17, 259 CPSR_s = CPSR | 1 << 18, 260 CPSR_f = CPSR | 1 << 19,
|
/external/llvm/test/CodeGen/AArch64/ |
D | arm64-2011-04-21-CPSRBug.ll | 3 ; CPSR is not allocatable so fast allocatable wouldn't mark them killed.
|
D | arm64-2011-03-09-CPSRSpill.ll | 3 ; Can't copy or spill / restore CPSR.
|
D | arm64-csel.ll | 77 ; If CPSR is used multiple times and V flag is used, we don't remove cmp.
|
/external/llvm/test/CodeGen/Thumb2/ |
D | v8_IT_6.ll | 3 ; Narrow tORR cannot be predicated and set CPSR at the same time!
|
/external/llvm/lib/Target/ARM/AsmParser/ |
D | ARMAsmParser.cpp | 1686 unsigned RegNum = getCondCode() == ARMCC::AL ? 0: ARM::CPSR; in addCondCodeOperands() 5768 Operands.push_back(ARMOperand::CreateCCOut(CarrySetting ? ARM::CPSR : 0, in ParseInstruction() 7841 Inst.getOperand(5).getReg() == (inITBlock() ? 0 : ARM::CPSR) && in processInstruction() 7892 Inst.getOpcode() == ARM::t2MOVSsr ? ARM::CPSR : 0)); in processInstruction() 7899 Inst.getOpcode() == ARM::t2MOVSsr ? ARM::CPSR : 0)); in processInstruction() 7929 Inst.getOpcode() == ARM::t2MOVSsi ? ARM::CPSR : 0)); in processInstruction() 7937 Inst.getOpcode() == ARM::t2MOVSsi ? ARM::CPSR : 0)); in processInstruction() 8124 ((!inITBlock() && Inst.getOperand(5).getReg() != ARM::CPSR) || in processInstruction() 8277 Inst.getOperand(4).getReg() == ARM::CPSR) || in processInstruction() 8300 Inst.getOperand(4).getReg() == ARM::CPSR && in processInstruction() [all …]
|