Home
last modified time | relevance | path

Searched refs:isSGPRClass (Results 1 – 14 of 14) sorted by relevance

/external/llvm/lib/Target/AMDGPU/
DSIFixSGPRCopies.cpp154 return TRI.isSGPRClass(DstRC) && TRI.hasVGPRs(SrcRC); in isVGPRToSGPRCopy()
160 return TRI.isSGPRClass(SrcRC) && TRI.hasVGPRs(DstRC); in isSGPRToVGPRCopy()
183 if (!TRI->isSGPRClass(MRI.getRegClass(DstReg))) in foldVGPRCopyIntoRegSequence()
221 assert(TRI->isSGPRClass(SrcRC) && in foldVGPRCopyIntoRegSequence()
276 if (!TRI->isSGPRClass(MRI.getRegClass(Reg))) in runOnMachineFunction()
360 if (TRI->isSGPRClass(DstRC) && in runOnMachineFunction()
DSIRegisterInfo.h93 bool isSGPRClass(const TargetRegisterClass *RC) const { in isSGPRClass() function
99 return isSGPRClass(getRegClass(RCID)); in isSGPRClassID()
108 return isSGPRClass(RC); in isSGPRReg()
DSIInstrInfo.cpp525 return RI.isSGPRClass(DstRC) ? AMDGPU::S_MOV_B32 : AMDGPU::V_MOV_B32_e32; in getMovOpcode()
526 } else if (DstRC->getSize() == 8 && RI.isSGPRClass(DstRC)) { in getMovOpcode()
528 } else if (DstRC->getSize() == 8 && !RI.isSGPRClass(DstRC)) { in getMovOpcode()
589 if (RI.isSGPRClass(RC)) { in storeRegToStackSlot()
687 if (RI.isSGPRClass(RC)) { in loadRegFromStackSlot()
1246 if (!Src1->isReg() || RI.isSGPRClass(MRI->getRegClass(Src1->getReg()))) in FoldImmediate()
1249 if (!Src2->isReg() || RI.isSGPRClass(MRI->getRegClass(Src2->getReg()))) in FoldImmediate()
1293 (Src0->isReg() && RI.isSGPRClass(MRI->getRegClass(Src0->getReg())))) in FoldImmediate()
1296 if (!Src1->isReg() || RI.isSGPRClass(MRI->getRegClass(Src1->getReg()))) in FoldImmediate()
1588 return RI.isSGPRClass(MRI.getRegClass(MO.getReg())); in usesConstantBus()
[all …]
DSIRegisterInfo.cpp774 if (isSGPRClass(RC)) { in getSubRegClass()
DSIInstrInfo.td449 if (RC && SIRI->isSGPRClass(RC))
/external/swiftshader/third_party/llvm-7.0/llvm/lib/Target/AMDGPU/
DSIRegisterInfo.h132 bool isSGPRClass(const TargetRegisterClass *RC) const { in isSGPRClass() function
138 return isSGPRClass(getRegClass(RCID)); in isSGPRClassID()
147 return isSGPRClass(RC); in isSGPRReg()
DSIFixSGPRCopies.cpp186 return TRI.isSGPRClass(DstRC) && TRI.hasVGPRs(SrcRC); in isVGPRToSGPRCopy()
192 return TRI.isSGPRClass(SrcRC) && TRI.hasVGPRs(DstRC); in isSGPRToVGPRCopy()
240 if (!TRI->isSGPRClass(MRI.getRegClass(DstReg))) in foldVGPRCopyIntoRegSequence()
285 assert(TRI->isSGPRClass(SrcRC) && in foldVGPRCopyIntoRegSequence()
626 if (!TRI->isSGPRClass(MRI.getRegClass(Reg))) in runOnMachineFunction()
700 if (TRI->isSGPRClass(DstRC) && in runOnMachineFunction()
DSIInstrInfo.cpp562 if (RI.isSGPRClass(RC)) { in copyPhysReg()
571 if (!RI.isSGPRClass(RI.getPhysRegClass(SrcReg))) { in copyPhysReg()
654 if (RI.isSGPRClass(RegClass)) { in materializeImmediate()
812 return RI.isSGPRClass(DstRC) ? AMDGPU::S_MOV_B32 : AMDGPU::V_MOV_B32_e32; in getMovOpcode()
813 } else if (RI.getRegSizeInBits(*DstRC) == 64 && RI.isSGPRClass(DstRC)) { in getMovOpcode()
815 } else if (RI.getRegSizeInBits(*DstRC) == 64 && !RI.isSGPRClass(DstRC)) { in getMovOpcode()
877 if (RI.isSGPRClass(RC)) { in storeRegToStackSlot()
988 if (RI.isSGPRClass(RC)) { in loadRegFromStackSlot()
1800 return !RI.isSGPRClass(RC) && NumInsts <= 6; in canInsertSelect()
1817 return RI.isSGPRClass(RC); in canInsertSelect()
[all …]
DSIRegisterInfo.cpp688 if (SpillToSMEM && isSGPRClass(RC)) { in spillSGPR()
859 if (SpillToSMEM && isSGPRClass(RC)) { in restoreSGPR()
1321 if (isSGPRClass(RC)) { in getSubRegClass()
DGCNRegPressure.cpp90 return STI->isSGPRClass(RC) ? in getRegKind()
DAMDGPURegisterBankInfo.cpp92 if (TRI->isSGPRClass(&RC)) in getRegBankFromRegClass()
DAMDGPUInstructionSelector.cpp345 IsSgpr = TRI.isSGPRClass(RC); in selectG_CONSTANT()
DSIInstrInfo.td503 if (!RC || SIRI->isSGPRClass(RC))
DSIISelLowering.cpp2996 if (!TII->getRegisterInfo().isSGPRClass(IdxRC)) in setM0ToIndexFromSGPR()