Lines Matching refs:X86

91       return X86::SUB64ri8;  in getSUBriOpcode()
92 return X86::SUB64ri32; in getSUBriOpcode()
95 return X86::SUB32ri8; in getSUBriOpcode()
96 return X86::SUB32ri; in getSUBriOpcode()
103 return X86::ADD64ri8; in getADDriOpcode()
104 return X86::ADD64ri32; in getADDriOpcode()
107 return X86::ADD32ri8; in getADDriOpcode()
108 return X86::ADD32ri; in getADDriOpcode()
113 return isLP64 ? X86::SUB64rr : X86::SUB32rr; in getSUBrrOpcode()
117 return isLP64 ? X86::ADD64rr : X86::ADD32rr; in getADDrrOpcode()
123 return X86::AND64ri8; in getANDriOpcode()
124 return X86::AND64ri32; in getANDriOpcode()
127 return X86::AND32ri8; in getANDriOpcode()
128 return X86::AND32ri; in getANDriOpcode()
132 return IsLP64 ? X86::LEA64r : X86::LEA32r; in getLEArOpcode()
148 X86::EAX, X86::EDX, X86::ECX, 0 in findDeadCallerSavedReg()
152 X86::RAX, X86::RDX, X86::RCX, X86::RSI, X86::RDI, in findDeadCallerSavedReg()
153 X86::R8, X86::R9, X86::R10, X86::R11, 0 in findDeadCallerSavedReg()
159 case X86::RETL: in findDeadCallerSavedReg()
160 case X86::RETQ: in findDeadCallerSavedReg()
161 case X86::RETIL: in findDeadCallerSavedReg()
162 case X86::RETIQ: in findDeadCallerSavedReg()
163 case X86::TCRETURNdi: in findDeadCallerSavedReg()
164 case X86::TCRETURNri: in findDeadCallerSavedReg()
165 case X86::TCRETURNmi: in findDeadCallerSavedReg()
166 case X86::TCRETURNdi64: in findDeadCallerSavedReg()
167 case X86::TCRETURNri64: in findDeadCallerSavedReg()
168 case X86::TCRETURNmi64: in findDeadCallerSavedReg()
169 case X86::EH_RETURN: in findDeadCallerSavedReg()
170 case X86::EH_RETURN64: { in findDeadCallerSavedReg()
198 if (Reg == X86::RAX || Reg == X86::EAX || Reg == X86::AX || in isEAXLiveIn()
199 Reg == X86::AH || Reg == X86::AL) in isEAXLiveIn()
233 Reg = (unsigned)(Is64BitTarget ? X86::RAX : X86::EAX); in emitSPUpdate()
238 Opc = Is64BitTarget ? X86::MOV64ri : X86::MOV32ri; in emitSPUpdate()
257 ? (unsigned)(Is64BitTarget ? X86::RAX : X86::EAX) in emitSPUpdate()
261 ? (Is64BitTarget ? X86::PUSH64r : X86::PUSH32r) in emitSPUpdate()
262 : (Is64BitTarget ? X86::POP64r : X86::POP32r); in emitSPUpdate()
299 if ((Opc == X86::ADD64ri32 || Opc == X86::ADD64ri8 || in mergeSPUpdatesUp()
300 Opc == X86::ADD32ri || Opc == X86::ADD32ri8 || in mergeSPUpdatesUp()
301 Opc == X86::LEA32r || Opc == X86::LEA64_32r) && in mergeSPUpdatesUp()
306 } else if ((Opc == X86::SUB64ri32 || Opc == X86::SUB64ri8 || in mergeSPUpdatesUp()
307 Opc == X86::SUB32ri || Opc == X86::SUB32ri8) && in mergeSPUpdatesUp()
332 if ((Opc == X86::ADD64ri32 || Opc == X86::ADD64ri8 || in mergeSPUpdates()
333 Opc == X86::ADD32ri || Opc == X86::ADD32ri8 || in mergeSPUpdates()
334 Opc == X86::LEA32r || Opc == X86::LEA64_32r) && in mergeSPUpdates()
339 } else if ((Opc == X86::SUB64ri32 || Opc == X86::SUB64ri8 || in mergeSPUpdates()
340 Opc == X86::SUB32ri || Opc == X86::SUB32ri8) && in mergeSPUpdates()
388 ri = MRI.reg_instr_begin(X86::EFLAGS), re = MRI.reg_instr_end(); in usesTheStack()
407 CallOp = IsLargeCodeModel ? X86::CALL64r : X86::CALL64pcrel32; in emitStackProbeCall()
409 CallOp = X86::CALLpcrel32; in emitStackProbeCall()
430 BuildMI(MBB, MBBI, DL, TII.get(X86::MOV64ri), X86::R11) in emitStackProbeCall()
432 CI = BuildMI(MBB, MBBI, DL, TII.get(CallOp)).addReg(X86::R11); in emitStackProbeCall()
437 unsigned AX = Is64Bit ? X86::RAX : X86::EAX; in emitStackProbeCall()
438 unsigned SP = Is64Bit ? X86::RSP : X86::ESP; in emitStackProbeCall()
443 .addReg(X86::EFLAGS, RegState::Define | RegState::Implicit); in emitStackProbeCall()
449 BuildMI(MBB, MBBI, DL, TII.get(X86::SUB64rr), X86::RSP) in emitStackProbeCall()
450 .addReg(X86::RSP) in emitStackProbeCall()
451 .addReg(X86::RAX); in emitStackProbeCall()
688 BuildMI(MBB, MBBI, DL, TII.get(Is64Bit ? X86::PUSH64r : X86::PUSH32r)) in emitPrologue()
711 BuildMI(MBB, MBBI, DL, TII.get(X86::SEH_PushReg)) in emitPrologue()
719 TII.get(Uses64BitFramePtr ? X86::MOV64rr : X86::MOV32rr), in emitPrologue()
747 (MBBI->getOpcode() == X86::PUSH32r || in emitPrologue()
748 MBBI->getOpcode() == X86::PUSH64r)) { in emitPrologue()
765 BuildMI(MBB, MBBI, DL, TII.get(X86::SEH_PushReg)).addImm(Reg).setMIFlag( in emitPrologue()
815 BuildMI(MBB, MBBI, DL, TII.get(X86::PUSH32r)) in emitPrologue()
816 .addReg(X86::EAX, RegState::Kill) in emitPrologue()
824 BuildMI(MBB, MBBI, DL, TII.get(X86::MOV32ri), X86::EAX) in emitPrologue()
828 BuildMI(MBB, MBBI, DL, TII.get(X86::MOV64ri32), X86::RAX) in emitPrologue()
832 BuildMI(MBB, MBBI, DL, TII.get(X86::MOV64ri), X86::RAX) in emitPrologue()
839 BuildMI(MBB, MBBI, DL, TII.get(X86::MOV32ri), X86::EAX) in emitPrologue()
857 MachineInstr *MI = addRegOffset(BuildMI(MF, DL, TII.get(X86::MOV32rm), in emitPrologue()
858 X86::EAX), in emitPrologue()
869 BuildMI(MBB, MBBI, DL, TII.get(X86::SEH_StackAlloc)) in emitPrologue()
877 addRegOffset(BuildMI(MBB, MBBI, DL, TII.get(X86::LEA64r), FramePtr), in emitPrologue()
880 BuildMI(MBB, MBBI, DL, TII.get(X86::MOV64rr), FramePtr).addReg(StackPtr); in emitPrologue()
883 BuildMI(MBB, MBBI, DL, TII.get(X86::SEH_SetFrame)) in emitPrologue()
896 if (X86::FR64RegClass.contains(Reg)) { in emitPrologue()
900 BuildMI(MBB, MBBI, DL, TII.get(X86::SEH_SaveXMM)) in emitPrologue()
910 BuildMI(MBB, MBBI, DL, TII.get(X86::SEH_EndPrologue)) in emitPrologue()
936 unsigned Opc = Uses64BitFramePtr ? X86::MOV64rr : X86::MOV32rr; in emitPrologue()
942 unsigned Opm = Uses64BitFramePtr ? X86::MOV64mr : X86::MOV32mr; in emitPrologue()
1012 case X86::RETQ: in emitEpilogue()
1013 case X86::RETL: in emitEpilogue()
1014 case X86::RETIL: in emitEpilogue()
1015 case X86::RETIQ: in emitEpilogue()
1016 case X86::TCRETURNdi: in emitEpilogue()
1017 case X86::TCRETURNri: in emitEpilogue()
1018 case X86::TCRETURNmi: in emitEpilogue()
1019 case X86::TCRETURNdi64: in emitEpilogue()
1020 case X86::TCRETURNri64: in emitEpilogue()
1021 case X86::TCRETURNmi64: in emitEpilogue()
1022 case X86::EH_RETURN: in emitEpilogue()
1023 case X86::EH_RETURN64: in emitEpilogue()
1045 TII.get(Is64Bit ? X86::POP64r : X86::POP32r), MachineFramePtr); in emitEpilogue()
1056 if (Opc != X86::POP32r && Opc != X86::POP64r && Opc != X86::DBG_VALUE && in emitEpilogue()
1093 unsigned Opc = (Uses64BitFramePtr ? X86::MOV64rr : X86::MOV32rr); in emitEpilogue()
1112 BuildMI(MBB, MBBI, DL, TII.get(X86::SEH_Epilogue)); in emitEpilogue()
1115 if (RetOpcode == X86::EH_RETURN || RetOpcode == X86::EH_RETURN64) { in emitEpilogue()
1120 TII.get(Uses64BitFramePtr ? X86::MOV64rr : X86::MOV32rr), in emitEpilogue()
1122 } else if (RetOpcode == X86::TCRETURNri || RetOpcode == X86::TCRETURNdi || in emitEpilogue()
1123 RetOpcode == X86::TCRETURNmi || in emitEpilogue()
1124 RetOpcode == X86::TCRETURNri64 || RetOpcode == X86::TCRETURNdi64 || in emitEpilogue()
1125 RetOpcode == X86::TCRETURNmi64) { in emitEpilogue()
1126 bool isMem = RetOpcode == X86::TCRETURNmi || RetOpcode == X86::TCRETURNmi64; in emitEpilogue()
1152 if (RetOpcode == X86::TCRETURNdi || RetOpcode == X86::TCRETURNdi64) { in emitEpilogue()
1153 unsigned Op = (RetOpcode == X86::TCRETURNdi) in emitEpilogue()
1154 ? X86::TAILJMPd in emitEpilogue()
1155 : (IsWin64 ? X86::TAILJMPd64_REX : X86::TAILJMPd64); in emitEpilogue()
1165 } else if (RetOpcode == X86::TCRETURNmi || RetOpcode == X86::TCRETURNmi64) { in emitEpilogue()
1166 unsigned Op = (RetOpcode == X86::TCRETURNmi) in emitEpilogue()
1167 ? X86::TAILJMPm in emitEpilogue()
1168 : (IsWin64 ? X86::TAILJMPm64_REX : X86::TAILJMPm64); in emitEpilogue()
1172 } else if (RetOpcode == X86::TCRETURNri64) { in emitEpilogue()
1174 TII.get(IsWin64 ? X86::TAILJMPr64_REX : X86::TAILJMPr64)) in emitEpilogue()
1177 BuildMI(MBB, MBBI, DL, TII.get(X86::TAILJMPr)). in emitEpilogue()
1186 } else if ((RetOpcode == X86::RETQ || RetOpcode == X86::RETL || in emitEpilogue()
1187 RetOpcode == X86::RETIQ || RetOpcode == X86::RETIL) && in emitEpilogue()
1398 if (!X86::GR64RegClass.contains(Reg) && !X86::GR32RegClass.contains(Reg)) in assignCalleeSavedSpillSlots()
1413 if (X86::GR64RegClass.contains(Reg) || X86::GR32RegClass.contains(Reg)) in assignCalleeSavedSpillSlots()
1441 unsigned Opc = STI.is64Bit() ? X86::PUSH64r : X86::PUSH32r; in spillCalleeSavedRegisters()
1445 if (!X86::GR64RegClass.contains(Reg) && !X86::GR32RegClass.contains(Reg)) in spillCalleeSavedRegisters()
1458 if (X86::GR64RegClass.contains(Reg) || X86::GR32RegClass.contains(Reg)) in spillCalleeSavedRegisters()
1490 if (X86::GR64RegClass.contains(Reg) || in restoreCalleeSavedRegisters()
1491 X86::GR32RegClass.contains(Reg)) in restoreCalleeSavedRegisters()
1499 unsigned Opc = STI.is64Bit() ? X86::POP64r : X86::POP32r; in restoreCalleeSavedRegisters()
1502 if (!X86::GR64RegClass.contains(Reg) && in restoreCalleeSavedRegisters()
1503 !X86::GR32RegClass.contains(Reg)) in restoreCalleeSavedRegisters()
1563 return Primary ? X86::R14 : X86::R13; in GetScratchRegister()
1565 return Primary ? X86::EBX : X86::EDI; in GetScratchRegister()
1570 return Primary ? X86::R11 : X86::R12; in GetScratchRegister()
1572 return Primary ? X86::R11D : X86::R12D; in GetScratchRegister()
1582 return Primary ? X86::EAX : X86::ECX; in GetScratchRegister()
1585 return Primary ? X86::EDX : X86::EAX; in GetScratchRegister()
1586 return Primary ? X86::ECX : X86::EAX; in GetScratchRegister()
1644 allocMBB->addLiveIn(IsLP64 ? X86::R10 : X86::R10D); in adjustForSegmentedStacks()
1656 TlsReg = X86::FS; in adjustForSegmentedStacks()
1659 TlsReg = X86::GS; in adjustForSegmentedStacks()
1662 TlsReg = X86::GS; in adjustForSegmentedStacks()
1665 TlsReg = X86::FS; in adjustForSegmentedStacks()
1668 TlsReg = X86::FS; in adjustForSegmentedStacks()
1675 ScratchReg = IsLP64 ? X86::RSP : X86::ESP; in adjustForSegmentedStacks()
1677 … BuildMI(checkMBB, DL, TII.get(IsLP64 ? X86::LEA64r : X86::LEA64_32r), ScratchReg).addReg(X86::RSP) in adjustForSegmentedStacks()
1680 BuildMI(checkMBB, DL, TII.get(IsLP64 ? X86::CMP64rm : X86::CMP32rm)).addReg(ScratchReg) in adjustForSegmentedStacks()
1684 TlsReg = X86::GS; in adjustForSegmentedStacks()
1687 TlsReg = X86::GS; in adjustForSegmentedStacks()
1690 TlsReg = X86::FS; in adjustForSegmentedStacks()
1693 TlsReg = X86::FS; in adjustForSegmentedStacks()
1702 ScratchReg = X86::ESP; in adjustForSegmentedStacks()
1704 BuildMI(checkMBB, DL, TII.get(X86::LEA32r), ScratchReg).addReg(X86::ESP) in adjustForSegmentedStacks()
1709 BuildMI(checkMBB, DL, TII.get(X86::CMP32rm)).addReg(ScratchReg) in adjustForSegmentedStacks()
1734 BuildMI(checkMBB, DL, TII.get(X86::PUSH32r)) in adjustForSegmentedStacks()
1737 BuildMI(checkMBB, DL, TII.get(X86::MOV32ri), ScratchReg2) in adjustForSegmentedStacks()
1739 BuildMI(checkMBB, DL, TII.get(X86::CMP32rm)) in adjustForSegmentedStacks()
1746 BuildMI(checkMBB, DL, TII.get(X86::POP32r), ScratchReg2); in adjustForSegmentedStacks()
1752 BuildMI(checkMBB, DL, TII.get(X86::JA_1)).addMBB(&prologueMBB); in adjustForSegmentedStacks()
1760 const unsigned RegAX = IsLP64 ? X86::RAX : X86::EAX; in adjustForSegmentedStacks()
1761 const unsigned Reg10 = IsLP64 ? X86::R10 : X86::R10D; in adjustForSegmentedStacks()
1762 const unsigned Reg11 = IsLP64 ? X86::R11 : X86::R11D; in adjustForSegmentedStacks()
1763 const unsigned MOVrr = IsLP64 ? X86::MOV64rr : X86::MOV32rr; in adjustForSegmentedStacks()
1764 const unsigned MOVri = IsLP64 ? X86::MOV64ri : X86::MOV32ri; in adjustForSegmentedStacks()
1776 BuildMI(allocMBB, DL, TII.get(X86::PUSHi32)) in adjustForSegmentedStacks()
1778 BuildMI(allocMBB, DL, TII.get(X86::PUSHi32)) in adjustForSegmentedStacks()
1798 BuildMI(allocMBB, DL, TII.get(X86::CALL64m)) in adjustForSegmentedStacks()
1799 .addReg(X86::RIP) in adjustForSegmentedStacks()
1807 BuildMI(allocMBB, DL, TII.get(X86::CALL64pcrel32)) in adjustForSegmentedStacks()
1810 BuildMI(allocMBB, DL, TII.get(X86::CALLpcrel32)) in adjustForSegmentedStacks()
1815 BuildMI(allocMBB, DL, TII.get(X86::MORESTACK_RET_RESTORE_R10)); in adjustForSegmentedStacks()
1817 BuildMI(allocMBB, DL, TII.get(X86::MORESTACK_RET)); in adjustForSegmentedStacks()
1929 SPReg = X86::RSP; in adjustForHiPEPrologue()
1930 PReg = X86::RBP; in adjustForHiPEPrologue()
1931 LEAop = X86::LEA64r; in adjustForHiPEPrologue()
1932 CMPop = X86::CMP64rm; in adjustForHiPEPrologue()
1933 CALLop = X86::CALL64pcrel32; in adjustForHiPEPrologue()
1936 SPReg = X86::ESP; in adjustForHiPEPrologue()
1937 PReg = X86::EBP; in adjustForHiPEPrologue()
1938 LEAop = X86::LEA32r; in adjustForHiPEPrologue()
1939 CMPop = X86::CMP32rm; in adjustForHiPEPrologue()
1940 CALLop = X86::CALLpcrel32; in adjustForHiPEPrologue()
1954 BuildMI(stackCheckMBB, DL, TII.get(X86::JAE_1)).addMBB(&prologueMBB); in adjustForHiPEPrologue()
1963 BuildMI(incStackMBB, DL, TII.get(X86::JLE_1)).addMBB(incStackMBB); in adjustForHiPEPrologue()