/external/swiftshader/third_party/LLVM/lib/Target/Sparc/ |
D | DelaySlotFiller.cpp | 223 if (MO.isUse()) { in delayHasHazard() 245 assert(Reg.isUse() && "JMPL first operand is not a use."); in insertCallUses() 252 assert(RegOrImm.isUse() && "JMPLrr second operand is not a use."); in insertCallUses() 273 if (MO.isUse()) in insertDefsUses()
|
/external/llvm/lib/CodeGen/ |
D | ProcessImplicitDefs.cpp | 72 if (MO.isReg() && MO.isUse() && MO.readsReg()) in canTurnIntoImplicitDef() 112 if (MO.isUse()) in processImplicitDef()
|
D | ExpandPostRAPseudos.cpp | 74 if (!MO.isReg() || !MO.isImplicit() || MO.isUse()) in TransferImplicitDefs() 84 (MI->getOperand(2).isReg() && MI->getOperand(2).isUse()) && in LowerSubregToReg()
|
D | MachineInstr.cpp | 350 if (isUndef() && isUse()) { in print() 848 if (NewMO->isUse()) { in addOperand() 1194 if (getOperand(OpIdx).isUse() && isRegTiedToDefOperand(OpIdx, &DefIdx)) in getRegClassConstraint() 1278 if (MO.isReg() && MO.isUse() && MO.isImplicit() && MO.getReg() == Reg) in hasRegisterImplicitUseOperand() 1291 if (!MO.isReg() || !MO.isUse()) in findRegisterUseOperandIdx() 1323 if (MO.isUse()) in readsWritesVirtualRegister() 1403 assert(UseMO.isUse() && "UseIdx must be a use operand"); in tieOperands() 1435 if (MO.isUse()) in findTiedOperandIdx() 1440 if (UseMO.isReg() && UseMO.isUse() && UseMO.TiedTo == OpIdx + 1) in findTiedOperandIdx() 1483 if (MO.isReg() && MO.isUse()) in clearKillInfo() [all …]
|
D | TwoAddressInstructionPass.cpp | 208 if (MO.isUse() && MOReg != SavedReg) in sink3AddrInstruction() 367 if (MO.isUse() && DI->second < LastUse) in noUseAfterLastDef() 478 if (!MO.isReg() || !MO.isUse() || MO.getReg() != Reg) in isTwoAddrUse() 1053 if (MO.isUse()) { in rescheduleKillAboveMI() 1092 if (MO.isUse()) { in rescheduleKillAboveMI() 1342 if (MO.isUse()) { in tryInstructionTransform() 1421 assert(SrcReg && SrcMO.isUse() && "two address instruction invalid"); in collectTiedOperands() 1534 assert(MO.isReg() && MO.getReg() == RegB && MO.isUse() && in processTiedPairs() 1560 MO.isUse()) { in processTiedPairs() 1596 if (MO.isReg() && MO.getReg() == RegB && MO.isUse()) { in processTiedPairs()
|
D | RegAllocFast.cpp | 240 if (MO.isUse() && !LR.LastUse->isRegTiedToDefOperand(LR.LastOpNum)) { in addKillFlag() 620 if (LRI->LastUse != &MI || LRI->LastUse->getOperand(LRI->LastOpNum).isUse()) in defineVirtReg() 653 if (MO.isUse()) in reloadVirtReg() 753 if (MO.isUse()) { in handleThroughOperands() 942 if (MO.isUse()) { in AllocateBasicBlock() 954 if (MO.isUse()) { in AllocateBasicBlock() 990 if (MO.isUse()) { in AllocateBasicBlock()
|
D | MachineSink.cpp | 377 if (!MO.isReg() || !MO.isUse()) in isWorthBreakingCriticalEdge() 603 if (MO.isUse()) { in FindSuccToSinkTo() 615 if (MO.isUse()) continue; in FindSuccToSinkTo() 852 if (MO.isReg() && MO.isUse()) in SinkInstruction()
|
D | RegisterScavenging.cpp | 128 if (MO.isUse()) { in determineKillsAndDefs() 198 if (MO.isUse()) { in forward() 359 if (MO.isReg() && MO.getReg() != 0 && !(MO.isUse() && MO.isUndef()) && in scavengeRegister()
|
D | CriticalAntiDepBreaker.cpp | 226 if (MO.isUse() && Special) { in PrescanInstruction() 292 if (!MO.isUse()) continue; in ScanInstruction() 603 if (MO.isUse() && TRI->regsOverlap(AntiDepReg, Reg)) { in BreakAntiDependencies()
|
/external/swiftshader/third_party/LLVM/lib/CodeGen/ |
D | TwoAddressInstructionPass.cpp | 199 if (MO.isUse() && MOReg != SavedReg) in Sink3AddrInstruction() 355 if (MO.isUse() && DI->second < LastUse) in NoUseAfterLastDef() 381 if (MO.isUse() && DI->second > LastUseDist) { in FindLastUseInMBB() 459 if (!MO.isReg() || !MO.isUse() || MO.getReg() != Reg) in isTwoAddrUse() 785 if (MO.isUse() && MO.isKill()) in isSafeToDelete() 993 if (MO.isUse()) { in TryInstructionTransform() 1106 mi->getOperand(SrcIdx).isUse() && in runOnMachineFunction() 1201 assert(MO.isReg() && MO.getReg() == regB && MO.isUse() && in runOnMachineFunction() 1215 if (MO.isReg() && MO.getReg() == regB && MO.isUse()) { in runOnMachineFunction() 1236 if (MO.isReg() && MO.getReg() == regB && MO.isUse()) { in runOnMachineFunction()
|
D | MachineInstr.cpp | 870 if (getOperand(OpIdx).isUse() && isRegTiedToDefOperand(OpIdx, &DefIdx)) in getRegClassConstraint() 897 if (!MO.isReg() || !MO.isUse()) in findRegisterUseOperandIdx() 929 if (MO.isUse()) in readsWritesVirtualRegister() 1011 if (i+1 >= e || !getOperand(i+1).isReg() || !getOperand(i+1).isUse()) in isRegTiedToUseOperand() 1028 if (MO.isReg() && MO.isUse() && in isRegTiedToUseOperand() 1045 if (!MO.isReg() || !MO.isUse() || MO.getReg() == 0) in isRegTiedToDefOperand() 1079 if (!MO.isReg() || !MO.isUse()) in isRegTiedToDefOperand() 1094 if (MO.isReg() && MO.isUse()) in clearKillInfo() 1202 if (MO.isUse()) in isSafeToReMat() 1310 if (!MO.isReg() || MO.isUse()) in allDefsAreDead() [all …]
|
D | ExpandPostRAPseudos.cpp | 94 if (!MO.isReg() || !MO.isImplicit() || MO.isUse()) in TransferImplicitDefs() 104 (MI->getOperand(2).isReg() && MI->getOperand(2).isUse()) && in LowerSubregToReg()
|
D | RegAllocFast.cpp | 215 if (MO.isUse() && !LR.LastUse->isRegTiedToDefOperand(LR.LastOpNum)) { in addKillFlag() 562 if (LR.LastUse != MI || LR.LastUse->getOperand(LR.LastOpNum).isUse()) in defineVirtReg() 595 if (MO.isUse()) in reloadVirtReg() 691 if (MO.isUse()) { in handleThroughOperands() 885 if (MO.isUse()) { in AllocateBasicBlock() 897 if (MO.isUse()) { in AllocateBasicBlock() 933 if (MO.isUse()) { in AllocateBasicBlock()
|
D | Spiller.cpp | 113 hasUse |= mi->getOperand(i).isUse(); in trivialSpillEverywhere() 130 if (mop.isUse() && !mi->isRegTiedToDefOperand(mopIdx)) { in trivialSpillEverywhere()
|
D | TargetInstrInfoImpl.cpp | 338 assert(MI->getOperand(Ops[i]).isUse() && "Folding load into def!"); in foldMemoryOperand() 413 if (MO.isUse()) { in isReallyTriviallyReMaterializableGeneric() 445 if (MO.isUse()) in isReallyTriviallyReMaterializableGeneric()
|
D | MachineCSE.cpp | 116 if (!MO.isReg() || !MO.isUse()) in PerformTrivialCoalescing() 170 if (MO.isUse()) in isPhysDefTriviallyDead() 302 if (MO.isReg() && MO.isUse() && in isProfitableToCSE()
|
D | CriticalAntiDepBreaker.cpp | 235 if (MO.isUse() && Special) { in PrescanInstruction() 294 if (!MO.isUse()) continue; in ScanInstruction() 588 if (MO.isUse() && TRI->regsOverlap(AntiDepReg, Reg)) { in BreakAntiDependencies()
|
/external/llvm/lib/Target/Sparc/ |
D | DelaySlotFiller.cpp | 266 if (MO.isUse()) { in delayHasHazard() 307 assert(Reg.isUse() && "CALL first operand is not a use."); in insertCallDefsUses() 314 assert(Operand1.isUse() && "CALLrr second operand is not a use."); in insertCallDefsUses() 335 if (MO.isUse()) { in insertDefsUses()
|
/external/swiftshader/third_party/LLVM/lib/Target/Mips/ |
D | MipsDelaySlotFiller.cpp | 209 if (MO.isUse()) { in delayHasHazard() 240 else if (MO.isUse()) in insertDefsUses()
|
/external/llvm/include/llvm/CodeGen/ |
D | MachineRegisterInfo.h | 839 if ((!ReturnUses && op->isUse()) || in defusechain_iterator() 854 if (Op->isUse()) in advance() 941 if ((!ReturnUses && op->isUse()) || in defusechain_instr_iterator() 956 if (Op->isUse()) in advance()
|
/external/llvm/lib/Target/Lanai/ |
D | LanaiDelaySlotFiller.cpp | 217 if (MO.isUse()) { in delayHasHazard() 243 else if (MO.isUse()) in insertDefsUses()
|
/external/swiftshader/third_party/LLVM/include/llvm/CodeGen/ |
D | MachineRegisterInfo.h | 353 if ((!ReturnUses && op->isUse()) || in defusechain_iterator() 385 while (Op && ((!ReturnUses && Op->isUse()) ||
|
/external/llvm/lib/Target/AMDGPU/ |
D | SIInsertWaits.cpp | 268 if (I->isReg() && I->isUse()) in isOpRelevant() 350 if (Op.isUse()) in pushInstruction() 480 if (Op.isUse()) in handleOperands()
|
/external/llvm/lib/Target/Hexagon/ |
D | HexagonNewValueJump.cpp | 154 (II->getOperand(i).isUse() || II->getOperand(i).isDef())) { in INITIALIZE_PASS_DEPENDENCY() 607 if (MO.isReg() && MO.isUse()) { in runOnMachineFunction() 614 if (localMO.isReg() && localMO.isUse() && in runOnMachineFunction()
|
D | HexagonGenPredicate.cpp | 238 assert(DefI->getOperand(0).isDef() && DefI->getOperand(1).isUse()); in getPredRegFor() 334 if (Mo->isReg() && Mo->isUse()) in isScalarPred() 356 if (!MO.isReg() || !MO.isUse()) in convertToPredForm()
|