Lines Matching refs:ScratchReg
1605 unsigned ScratchReg = GetScratchRegister(Is64Bit, IsLP64, MF, true); in adjustForSegmentedStacks() local
1606 assert(!MF.getRegInfo().isLiveIn(ScratchReg) && in adjustForSegmentedStacks()
1675 ScratchReg = IsLP64 ? X86::RSP : X86::ESP; in adjustForSegmentedStacks()
1677 … BuildMI(checkMBB, DL, TII.get(IsLP64 ? X86::LEA64r : X86::LEA64_32r), ScratchReg).addReg(X86::RSP) in adjustForSegmentedStacks()
1680 BuildMI(checkMBB, DL, TII.get(IsLP64 ? X86::CMP64rm : X86::CMP32rm)).addReg(ScratchReg) in adjustForSegmentedStacks()
1702 ScratchReg = X86::ESP; in adjustForSegmentedStacks()
1704 BuildMI(checkMBB, DL, TII.get(X86::LEA32r), ScratchReg).addReg(X86::ESP) in adjustForSegmentedStacks()
1709 BuildMI(checkMBB, DL, TII.get(X86::CMP32rm)).addReg(ScratchReg) in adjustForSegmentedStacks()
1740 .addReg(ScratchReg) in adjustForSegmentedStacks()
1926 unsigned ScratchReg, SPReg, PReg, SPLimitOffset; in adjustForHiPEPrologue() local
1944 ScratchReg = GetScratchRegister(Is64Bit, IsLP64, MF, true); in adjustForHiPEPrologue()
1945 assert(!MF.getRegInfo().isLiveIn(ScratchReg) && in adjustForHiPEPrologue()
1949 addRegOffset(BuildMI(stackCheckMBB, DL, TII.get(LEAop), ScratchReg), in adjustForHiPEPrologue()
1953 .addReg(ScratchReg), PReg, false, SPLimitOffset); in adjustForHiPEPrologue()
1959 addRegOffset(BuildMI(incStackMBB, DL, TII.get(LEAop), ScratchReg), in adjustForHiPEPrologue()
1962 .addReg(ScratchReg), PReg, false, SPLimitOffset); in adjustForHiPEPrologue()