Searched refs:isX86_64ExtendedReg (Results 1 – 5 of 5) sorted by relevance
92 if (X86II::isX86_64ExtendedReg(SrcReg)) in getVEXRegisterEncoding()724 if (X86II::isX86_64ExtendedReg(MI.getOperand(MemOperand + in EmitVEXOpcodePrefix()727 if (X86II::isX86_64ExtendedReg(MI.getOperand(MemOperand + in EmitVEXOpcodePrefix()748 if (X86II::isX86_64ExtendedReg(MO.getReg())) in EmitVEXOpcodePrefix()765 if (X86II::isX86_64ExtendedReg(MI.getOperand(CurOp).getReg())) in EmitVEXOpcodePrefix()781 if (X86II::isX86_64ExtendedReg( in EmitVEXOpcodePrefix()784 if (X86II::isX86_64ExtendedReg( in EmitVEXOpcodePrefix()816 if (X86II::isX86_64ExtendedReg( in EmitVEXOpcodePrefix()819 if (X86II::isX86_64ExtendedReg( in EmitVEXOpcodePrefix()833 if (X86II::isX86_64ExtendedReg(MI.getOperand(CurOp).getReg())) in EmitVEXOpcodePrefix()[all …]
722 inline bool isX86_64ExtendedReg(unsigned RegNo) { in isX86_64ExtendedReg() function
383 static bool isX86_64ExtendedReg(const MachineOperand &MO) { in isX86_64ExtendedReg() function385 return X86II::isX86_64ExtendedReg(MO.getReg()); in isX86_64ExtendedReg()
471 if (!X86II::isX86_64ExtendedReg(OutMI.getOperand(0).getReg()) && in Lower()472 X86II::isX86_64ExtendedReg(OutMI.getOperand(1).getReg())) { in Lower()495 if (!X86II::isX86_64ExtendedReg(OutMI.getOperand(0).getReg()) && in Lower()496 X86II::isX86_64ExtendedReg(OutMI.getOperand(2).getReg())) { in Lower()885 if (X86II::isX86_64ExtendedReg(ScratchReg)) in LowerPATCHPOINT()
894 X86II::isX86_64ExtendedReg(RegNo)) in ParseRegister()2417 if (X86II::isX86_64ExtendedReg(Inst.getOperand(0).getReg()) || in processInstruction()2418 !X86II::isX86_64ExtendedReg(Inst.getOperand(1).getReg())) in processInstruction()2442 if (X86II::isX86_64ExtendedReg(Inst.getOperand(0).getReg()) || in processInstruction()2443 !X86II::isX86_64ExtendedReg(Inst.getOperand(2).getReg())) in processInstruction()