Lines Matching refs:X86
166 bool foldX86XALUIntrinsic(X86::CondCode &CC, const Instruction *I,
172 static std::pair<X86::CondCode, bool>
174 X86::CondCode CC = X86::COND_INVALID; in getX86ConditionCode()
179 case CmpInst::FCMP_UEQ: CC = X86::COND_E; break; in getX86ConditionCode()
181 case CmpInst::FCMP_OGT: CC = X86::COND_A; break; in getX86ConditionCode()
183 case CmpInst::FCMP_OGE: CC = X86::COND_AE; break; in getX86ConditionCode()
185 case CmpInst::FCMP_ULT: CC = X86::COND_B; break; in getX86ConditionCode()
187 case CmpInst::FCMP_ULE: CC = X86::COND_BE; break; in getX86ConditionCode()
188 case CmpInst::FCMP_ONE: CC = X86::COND_NE; break; in getX86ConditionCode()
189 case CmpInst::FCMP_UNO: CC = X86::COND_P; break; in getX86ConditionCode()
190 case CmpInst::FCMP_ORD: CC = X86::COND_NP; break; in getX86ConditionCode()
192 case CmpInst::FCMP_UNE: CC = X86::COND_INVALID; break; in getX86ConditionCode()
195 case CmpInst::ICMP_EQ: CC = X86::COND_E; break; in getX86ConditionCode()
196 case CmpInst::ICMP_NE: CC = X86::COND_NE; break; in getX86ConditionCode()
197 case CmpInst::ICMP_UGT: CC = X86::COND_A; break; in getX86ConditionCode()
198 case CmpInst::ICMP_UGE: CC = X86::COND_AE; break; in getX86ConditionCode()
199 case CmpInst::ICMP_ULT: CC = X86::COND_B; break; in getX86ConditionCode()
200 case CmpInst::ICMP_ULE: CC = X86::COND_BE; break; in getX86ConditionCode()
201 case CmpInst::ICMP_SGT: CC = X86::COND_G; break; in getX86ConditionCode()
202 case CmpInst::ICMP_SGE: CC = X86::COND_GE; break; in getX86ConditionCode()
203 case CmpInst::ICMP_SLT: CC = X86::COND_L; break; in getX86ConditionCode()
204 case CmpInst::ICMP_SLE: CC = X86::COND_LE; break; in getX86ConditionCode()
247 bool X86FastISel::foldX86XALUIntrinsic(X86::CondCode &CC, const Instruction *I, in foldX86XALUIntrinsic()
267 X86::CondCode TmpCC; in foldX86XALUIntrinsic()
273 case Intrinsic::umul_with_overflow: TmpCC = X86::COND_O; break; in foldX86XALUIntrinsic()
275 case Intrinsic::usub_with_overflow: TmpCC = X86::COND_B; break; in foldX86XALUIntrinsic()
339 Opc = X86::MOV8rm; in X86FastEmitLoad()
340 RC = &X86::GR8RegClass; in X86FastEmitLoad()
343 Opc = X86::MOV16rm; in X86FastEmitLoad()
344 RC = &X86::GR16RegClass; in X86FastEmitLoad()
347 Opc = X86::MOV32rm; in X86FastEmitLoad()
348 RC = &X86::GR32RegClass; in X86FastEmitLoad()
352 Opc = X86::MOV64rm; in X86FastEmitLoad()
353 RC = &X86::GR64RegClass; in X86FastEmitLoad()
357 Opc = Subtarget->hasAVX() ? X86::VMOVSSrm : X86::MOVSSrm; in X86FastEmitLoad()
358 RC = &X86::FR32RegClass; in X86FastEmitLoad()
360 Opc = X86::LD_Fp32m; in X86FastEmitLoad()
361 RC = &X86::RFP32RegClass; in X86FastEmitLoad()
366 Opc = Subtarget->hasAVX() ? X86::VMOVSDrm : X86::MOVSDrm; in X86FastEmitLoad()
367 RC = &X86::FR64RegClass; in X86FastEmitLoad()
369 Opc = X86::LD_Fp64m; in X86FastEmitLoad()
370 RC = &X86::RFP64RegClass; in X86FastEmitLoad()
378 Opc = Subtarget->hasAVX() ? X86::VMOVAPSrm : X86::MOVAPSrm; in X86FastEmitLoad()
380 Opc = Subtarget->hasAVX() ? X86::VMOVUPSrm : X86::MOVUPSrm; in X86FastEmitLoad()
381 RC = &X86::VR128RegClass; in X86FastEmitLoad()
385 Opc = Subtarget->hasAVX() ? X86::VMOVAPDrm : X86::MOVAPDrm; in X86FastEmitLoad()
387 Opc = Subtarget->hasAVX() ? X86::VMOVUPDrm : X86::MOVUPDrm; in X86FastEmitLoad()
388 RC = &X86::VR128RegClass; in X86FastEmitLoad()
395 Opc = Subtarget->hasAVX() ? X86::VMOVDQArm : X86::MOVDQArm; in X86FastEmitLoad()
397 Opc = Subtarget->hasAVX() ? X86::VMOVDQUrm : X86::MOVDQUrm; in X86FastEmitLoad()
398 RC = &X86::VR128RegClass; in X86FastEmitLoad()
425 unsigned AndResult = createResultReg(&X86::GR8RegClass); in X86FastEmitStore()
427 TII.get(X86::AND8ri), AndResult) in X86FastEmitStore()
432 case MVT::i8: Opc = X86::MOV8mr; break; in X86FastEmitStore()
433 case MVT::i16: Opc = X86::MOV16mr; break; in X86FastEmitStore()
434 case MVT::i32: Opc = X86::MOV32mr; break; in X86FastEmitStore()
435 case MVT::i64: Opc = X86::MOV64mr; break; // Must be in x86-64 mode. in X86FastEmitStore()
438 (Subtarget->hasAVX() ? X86::VMOVSSmr : X86::MOVSSmr) : X86::ST_Fp32m; in X86FastEmitStore()
442 (Subtarget->hasAVX() ? X86::VMOVSDmr : X86::MOVSDmr) : X86::ST_Fp64m; in X86FastEmitStore()
446 Opc = Subtarget->hasAVX() ? X86::VMOVAPSmr : X86::MOVAPSmr; in X86FastEmitStore()
448 Opc = Subtarget->hasAVX() ? X86::VMOVUPSmr : X86::MOVUPSmr; in X86FastEmitStore()
452 Opc = Subtarget->hasAVX() ? X86::VMOVAPDmr : X86::MOVAPDmr; in X86FastEmitStore()
454 Opc = Subtarget->hasAVX() ? X86::VMOVUPDmr : X86::MOVUPDmr; in X86FastEmitStore()
461 Opc = Subtarget->hasAVX() ? X86::VMOVDQAmr : X86::MOVDQAmr; in X86FastEmitStore()
463 Opc = Subtarget->hasAVX() ? X86::VMOVDQUmr : X86::MOVDQUmr; in X86FastEmitStore()
490 case MVT::i8: Opc = X86::MOV8mi; break; in X86FastEmitStore()
491 case MVT::i16: Opc = X86::MOV16mi; break; in X86FastEmitStore()
492 case MVT::i32: Opc = X86::MOV32mi; break; in X86FastEmitStore()
496 Opc = X86::MOV64mi32; in X86FastEmitStore()
569 AM.Base.Reg = X86::RIP; in handleConstantAddresses()
594 Opc = X86::MOV64rm; in handleConstantAddresses()
595 RC = &X86::GR64RegClass; in handleConstantAddresses()
598 StubAM.Base.Reg = X86::RIP; in handleConstantAddresses()
600 Opc = X86::MOV32rm; in handleConstantAddresses()
601 RC = &X86::GR32RegClass; in handleConstantAddresses()
892 AM.Base.Reg = X86::RIP; in X86SelectCallAddress()
1012 if (VA.getLocReg() == X86::FP0 || VA.getLocReg() == X86::FP1) in X86SelectRet()
1062 unsigned RetReg = Subtarget->is64Bit() ? X86::RAX : X86::EAX; in X86SelectRet()
1071 TII.get(Subtarget->is64Bit() ? X86::RETQ : X86::RETL)); in X86SelectRet()
1117 case MVT::i8: return X86::CMP8rr; in X86ChooseCmpOpcode()
1118 case MVT::i16: return X86::CMP16rr; in X86ChooseCmpOpcode()
1119 case MVT::i32: return X86::CMP32rr; in X86ChooseCmpOpcode()
1120 case MVT::i64: return X86::CMP64rr; in X86ChooseCmpOpcode()
1122 return X86ScalarSSEf32 ? (HasAVX ? X86::VUCOMISSrr : X86::UCOMISSrr) : 0; in X86ChooseCmpOpcode()
1124 return X86ScalarSSEf64 ? (HasAVX ? X86::VUCOMISDrr : X86::UCOMISDrr) : 0; in X86ChooseCmpOpcode()
1137 return X86::CMP8ri; in X86ChooseCmpImmediateOpcode()
1140 return X86::CMP16ri8; in X86ChooseCmpImmediateOpcode()
1141 return X86::CMP16ri; in X86ChooseCmpImmediateOpcode()
1144 return X86::CMP32ri8; in X86ChooseCmpImmediateOpcode()
1145 return X86::CMP32ri; in X86ChooseCmpImmediateOpcode()
1148 return X86::CMP64ri8; in X86ChooseCmpImmediateOpcode()
1152 return X86::CMP64ri32; in X86ChooseCmpImmediateOpcode()
1203 ResultReg = createResultReg(&X86::GR32RegClass); in X86SelectCmp()
1204 BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, DbgLoc, TII.get(X86::MOV32r0), in X86SelectCmp()
1207 X86::sub_8bit); in X86SelectCmp()
1213 ResultReg = createResultReg(&X86::GR8RegClass); in X86SelectCmp()
1214 BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, DbgLoc, TII.get(X86::MOV8ri), in X86SelectCmp()
1239 { X86::SETEr, X86::SETNPr, X86::AND8rr }, in X86SelectCmp()
1240 { X86::SETNEr, X86::SETPr, X86::OR8rr } in X86SelectCmp()
1249 ResultReg = createResultReg(&X86::GR8RegClass); in X86SelectCmp()
1254 unsigned FlagReg1 = createResultReg(&X86::GR8RegClass); in X86SelectCmp()
1255 unsigned FlagReg2 = createResultReg(&X86::GR8RegClass); in X86SelectCmp()
1266 X86::CondCode CC; in X86SelectCmp()
1269 assert(CC <= X86::LAST_VALID_COND && "Unexpected condition code."); in X86SelectCmp()
1270 unsigned Opc = X86::getSETFromCond(CC); in X86SelectCmp()
1309 case MVT::i8: MovInst = X86::MOVZX32rr8; break; in X86SelectZExt()
1310 case MVT::i16: MovInst = X86::MOVZX32rr16; break; in X86SelectZExt()
1311 case MVT::i32: MovInst = X86::MOV32rr; break; in X86SelectZExt()
1315 unsigned Result32 = createResultReg(&X86::GR32RegClass); in X86SelectZExt()
1319 ResultReg = createResultReg(&X86::GR64RegClass); in X86SelectZExt()
1322 .addImm(0).addReg(Result32).addImm(X86::sub_32bit); in X86SelectZExt()
1344 X86::CondCode CC; in X86SelectBranch()
1395 assert(CC <= X86::LAST_VALID_COND && "Unexpected condition code."); in X86SelectBranch()
1397 BranchOpc = X86::GetCondBranchFromCond(CC); in X86SelectBranch()
1411 BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, DbgLoc, TII.get(X86::JP_1)) in X86SelectBranch()
1437 case MVT::i8: TestOpc = X86::TEST8ri; break; in X86SelectBranch()
1438 case MVT::i16: TestOpc = X86::TEST16ri; break; in X86SelectBranch()
1439 case MVT::i32: TestOpc = X86::TEST32ri; break; in X86SelectBranch()
1440 case MVT::i64: TestOpc = X86::TEST64ri32; break; in X86SelectBranch()
1448 unsigned JmpOpc = X86::JNE_1; in X86SelectBranch()
1451 JmpOpc = X86::JE_1; in X86SelectBranch()
1472 unsigned BranchOpc = X86::GetCondBranchFromCond(CC); in X86SelectBranch()
1491 BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, DbgLoc, TII.get(X86::TEST8ri)) in X86SelectBranch()
1493 BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, DbgLoc, TII.get(X86::JNE_1)) in X86SelectBranch()
1508 CReg = X86::CL; in X86SelectShift()
1509 RC = &X86::GR8RegClass; in X86SelectShift()
1511 case Instruction::LShr: OpReg = X86::SHR8rCL; break; in X86SelectShift()
1512 case Instruction::AShr: OpReg = X86::SAR8rCL; break; in X86SelectShift()
1513 case Instruction::Shl: OpReg = X86::SHL8rCL; break; in X86SelectShift()
1517 CReg = X86::CX; in X86SelectShift()
1518 RC = &X86::GR16RegClass; in X86SelectShift()
1520 case Instruction::LShr: OpReg = X86::SHR16rCL; break; in X86SelectShift()
1521 case Instruction::AShr: OpReg = X86::SAR16rCL; break; in X86SelectShift()
1522 case Instruction::Shl: OpReg = X86::SHL16rCL; break; in X86SelectShift()
1526 CReg = X86::ECX; in X86SelectShift()
1527 RC = &X86::GR32RegClass; in X86SelectShift()
1529 case Instruction::LShr: OpReg = X86::SHR32rCL; break; in X86SelectShift()
1530 case Instruction::AShr: OpReg = X86::SAR32rCL; break; in X86SelectShift()
1531 case Instruction::Shl: OpReg = X86::SHL32rCL; break; in X86SelectShift()
1535 CReg = X86::RCX; in X86SelectShift()
1536 RC = &X86::GR64RegClass; in X86SelectShift()
1538 case Instruction::LShr: OpReg = X86::SHR64rCL; break; in X86SelectShift()
1539 case Instruction::AShr: OpReg = X86::SAR64rCL; break; in X86SelectShift()
1540 case Instruction::Shl: OpReg = X86::SHL64rCL; break; in X86SelectShift()
1561 if (CReg != X86::CL) in X86SelectShift()
1563 TII.get(TargetOpcode::KILL), X86::CL) in X86SelectShift()
1604 { &X86::GR8RegClass, X86::AX, 0, { in X86SelectDivRem()
1605 { X86::IDIV8r, 0, X86::MOVSX16rr8, X86::AL, S }, // SDiv in X86SelectDivRem()
1606 { X86::IDIV8r, 0, X86::MOVSX16rr8, X86::AH, S }, // SRem in X86SelectDivRem()
1607 { X86::DIV8r, 0, X86::MOVZX16rr8, X86::AL, U }, // UDiv in X86SelectDivRem()
1608 { X86::DIV8r, 0, X86::MOVZX16rr8, X86::AH, U }, // URem in X86SelectDivRem()
1611 { &X86::GR16RegClass, X86::AX, X86::DX, { in X86SelectDivRem()
1612 { X86::IDIV16r, X86::CWD, Copy, X86::AX, S }, // SDiv in X86SelectDivRem()
1613 { X86::IDIV16r, X86::CWD, Copy, X86::DX, S }, // SRem in X86SelectDivRem()
1614 { X86::DIV16r, X86::MOV32r0, Copy, X86::AX, U }, // UDiv in X86SelectDivRem()
1615 { X86::DIV16r, X86::MOV32r0, Copy, X86::DX, U }, // URem in X86SelectDivRem()
1618 { &X86::GR32RegClass, X86::EAX, X86::EDX, { in X86SelectDivRem()
1619 { X86::IDIV32r, X86::CDQ, Copy, X86::EAX, S }, // SDiv in X86SelectDivRem()
1620 { X86::IDIV32r, X86::CDQ, Copy, X86::EDX, S }, // SRem in X86SelectDivRem()
1621 { X86::DIV32r, X86::MOV32r0, Copy, X86::EAX, U }, // UDiv in X86SelectDivRem()
1622 { X86::DIV32r, X86::MOV32r0, Copy, X86::EDX, U }, // URem in X86SelectDivRem()
1625 { &X86::GR64RegClass, X86::RAX, X86::RDX, { in X86SelectDivRem()
1626 { X86::IDIV64r, X86::CQO, Copy, X86::RAX, S }, // SDiv in X86SelectDivRem()
1627 { X86::IDIV64r, X86::CQO, Copy, X86::RDX, S }, // SRem in X86SelectDivRem()
1628 { X86::DIV64r, X86::MOV32r0, Copy, X86::RAX, U }, // UDiv in X86SelectDivRem()
1629 { X86::DIV64r, X86::MOV32r0, Copy, X86::RDX, U }, // URem in X86SelectDivRem()
1676 unsigned Zero32 = createResultReg(&X86::GR32RegClass); in X86SelectDivRem()
1678 TII.get(X86::MOV32r0), Zero32); in X86SelectDivRem()
1686 .addReg(Zero32, 0, X86::sub_16bit); in X86SelectDivRem()
1694 .addImm(0).addReg(Zero32).addImm(X86::sub_32bit); in X86SelectDivRem()
1712 OpEntry.DivRemResultReg == X86::AH && Subtarget->is64Bit()) { in X86SelectDivRem()
1713 unsigned SourceSuperReg = createResultReg(&X86::GR16RegClass); in X86SelectDivRem()
1714 unsigned ResultSuperReg = createResultReg(&X86::GR16RegClass); in X86SelectDivRem()
1716 TII.get(Copy), SourceSuperReg).addReg(X86::AX); in X86SelectDivRem()
1719 BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, DbgLoc, TII.get(X86::SHR16ri), in X86SelectDivRem()
1724 /*Kill=*/true, X86::sub_8bit); in X86SelectDivRem()
1751 X86::CondCode CC = X86::COND_NE; in X86FastEmitCMoveSelect()
1762 { X86::SETNPr, X86::SETEr , X86::TEST8rr }, in X86FastEmitCMoveSelect()
1763 { X86::SETPr, X86::SETNEr, X86::OR8rr } in X86FastEmitCMoveSelect()
1780 assert(CC <= X86::LAST_VALID_COND && "Unexpected condition code."); in X86FastEmitCMoveSelect()
1793 unsigned FlagReg1 = createResultReg(&X86::GR8RegClass); in X86FastEmitCMoveSelect()
1794 unsigned FlagReg2 = createResultReg(&X86::GR8RegClass); in X86FastEmitCMoveSelect()
1801 unsigned TmpReg = createResultReg(&X86::GR8RegClass); in X86FastEmitCMoveSelect()
1831 BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, DbgLoc, TII.get(X86::TEST8ri)) in X86FastEmitCMoveSelect()
1847 unsigned Opc = X86::getCMovFromCond(CC, RC->getSize()); in X86FastEmitCMoveSelect()
1896 { X86::CMPSSrr, X86::FsANDPSrr, X86::FsANDNPSrr, X86::FsORPSrr }, in X86FastEmitSSESelect()
1897 { X86::CMPSDrr, X86::FsANDPDrr, X86::FsANDNPDrr, X86::FsORPDrr } in X86FastEmitSSESelect()
1935 (RetVT.SimpleTy == MVT::f32) ? X86::VCMPSSrr : X86::VCMPSDrr; in X86FastEmitSSESelect()
1937 (RetVT.SimpleTy == MVT::f32) ? X86::VBLENDVPSrr : X86::VBLENDVPDrr; in X86FastEmitSSESelect()
1963 case MVT::i8: Opc = X86::CMOV_GR8; break; in X86FastEmitPseudoSelect()
1964 case MVT::i16: Opc = X86::CMOV_GR16; break; in X86FastEmitPseudoSelect()
1965 case MVT::i32: Opc = X86::CMOV_GR32; break; in X86FastEmitPseudoSelect()
1966 case MVT::f32: Opc = X86::CMOV_FR32; break; in X86FastEmitPseudoSelect()
1967 case MVT::f64: Opc = X86::CMOV_FR64; break; in X86FastEmitPseudoSelect()
1971 X86::CondCode CC = X86::COND_NE; in X86FastEmitPseudoSelect()
1980 if (CC > X86::LAST_VALID_COND) in X86FastEmitPseudoSelect()
1997 BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, DbgLoc, TII.get(X86::TEST8ri)) in X86FastEmitPseudoSelect()
2081 Opcode = X86::VCVTSI2SDrr; in X86SelectSIToFP()
2082 RC = &X86::FR64RegClass; in X86SelectSIToFP()
2085 Opcode = X86::VCVTSI2SSrr; in X86SelectSIToFP()
2086 RC = &X86::FR32RegClass; in X86SelectSIToFP()
2131 unsigned Opc = Subtarget->hasAVX() ? X86::VCVTSS2SDrr : X86::CVTSS2SDrr; in X86SelectFPExt()
2132 return X86SelectFPExtOrFPTrunc(I, Opc, &X86::FR64RegClass); in X86SelectFPExt()
2142 unsigned Opc = Subtarget->hasAVX() ? X86::VCVTSD2SSrr : X86::CVTSD2SSrr; in X86SelectFPTrunc()
2143 return X86SelectFPExtOrFPTrunc(I, Opc, &X86::FR32RegClass); in X86SelectFPTrunc()
2174 (SrcVT == MVT::i16) ? &X86::GR16_ABCDRegClass : &X86::GR32_ABCDRegClass; in X86SelectTrunc()
2184 X86::sub_8bit); in X86SelectTrunc()
2263 InputReg = fastEmitInst_ri(X86::VCVTPS2PHrr, RC, InputReg, false, 0); in fastLowerIntrinsicCall()
2266 ResultReg = createResultReg(&X86::GR32RegClass); in fastLowerIntrinsicCall()
2268 TII.get(X86::VMOVPDI2DIrr), ResultReg) in fastLowerIntrinsicCall()
2272 unsigned RegIdx = X86::sub_16bit; in fastLowerIntrinsicCall()
2284 InputReg = fastEmitInst_r(X86::VCVTPH2PSrr, RC, InputReg, /*Kill=*/true); in fastLowerIntrinsicCall()
2288 ResultReg = createResultReg(&X86::FR32RegClass); in fastLowerIntrinsicCall()
2313 case MVT::i32: Opc = X86::MOV32rm; RC = &X86::GR32RegClass; break; in fastLowerIntrinsicCall()
2314 case MVT::i64: Opc = X86::MOV64rm; RC = &X86::GR64RegClass; break; in fastLowerIntrinsicCall()
2324 assert(((FrameReg == X86::RBP && VT == MVT::i64) || in fastLowerIntrinsicCall()
2325 (FrameReg == X86::EBP && VT == MVT::i32)) && in fastLowerIntrinsicCall()
2429 BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, DbgLoc, TII.get(X86::TRAP)); in fastLowerIntrinsicCall()
2446 {X86::SQRTSSr, X86::VSQRTSSr}, in fastLowerIntrinsicCall()
2447 {X86::SQRTSDr, X86::VSQRTSDr} in fastLowerIntrinsicCall()
2454 case MVT::f32: Opc = SqrtOpc[0][HasAVX]; RC = &X86::FR32RegClass; break; in fastLowerIntrinsicCall()
2455 case MVT::f64: Opc = SqrtOpc[1][HasAVX]; RC = &X86::FR64RegClass; break; in fastLowerIntrinsicCall()
2521 CondOpc = X86::SETOr; in fastLowerIntrinsicCall()
2524 BaseOpc = ISD::ADD; CondOpc = X86::SETBr; break; in fastLowerIntrinsicCall()
2527 CondOpc = X86::SETOr; in fastLowerIntrinsicCall()
2530 BaseOpc = ISD::SUB; CondOpc = X86::SETBr; break; in fastLowerIntrinsicCall()
2532 BaseOpc = X86ISD::SMUL; CondOpc = X86::SETOr; break; in fastLowerIntrinsicCall()
2534 BaseOpc = X86ISD::UMUL; CondOpc = X86::SETOr; break; in fastLowerIntrinsicCall()
2546 { X86::INC8r, X86::INC16r, X86::INC32r, X86::INC64r }, in fastLowerIntrinsicCall()
2547 { X86::DEC8r, X86::DEC16r, X86::DEC32r, X86::DEC64r } in fastLowerIntrinsicCall()
2576 { X86::MUL8r, X86::MUL16r, X86::MUL32r, X86::MUL64r }; in fastLowerIntrinsicCall()
2577 static const unsigned Reg[] = { X86::AL, X86::AX, X86::EAX, X86::RAX }; in fastLowerIntrinsicCall()
2587 { X86::IMUL8r, X86::IMUL16rr, X86::IMUL32rr, X86::IMUL64rr }; in fastLowerIntrinsicCall()
2592 TII.get(TargetOpcode::COPY), X86::AL) in fastLowerIntrinsicCall()
2640 { { X86::CVTTSS2SIrr, X86::VCVTTSS2SIrr }, in fastLowerIntrinsicCall()
2641 { X86::CVTTSS2SI64rr, X86::VCVTTSS2SI64rr } }, in fastLowerIntrinsicCall()
2642 { { X86::CVTTSD2SIrr, X86::VCVTTSD2SIrr }, in fastLowerIntrinsicCall()
2643 { X86::CVTTSD2SI64rr, X86::VCVTTSD2SI64rr } } in fastLowerIntrinsicCall()
2741 X86::EDI, X86::ESI, X86::EDX, X86::ECX, X86::R8D, X86::R9D in fastLowerArguments()
2744 X86::RDI, X86::RSI, X86::RDX, X86::RCX, X86::R8 , X86::R9 in fastLowerArguments()
2747 X86::XMM0, X86::XMM1, X86::XMM2, X86::XMM3, in fastLowerArguments()
2748 X86::XMM4, X86::XMM5, X86::XMM6, X86::XMM7 in fastLowerArguments()
2840 if (X86::isCalleePop(CC, Subtarget->is64Bit(), IsVarArg, in fastLowerCall()
3027 TII.get(TargetOpcode::COPY), X86::EBX).addReg(Base); in fastLowerCall()
3041 X86::XMM0, X86::XMM1, X86::XMM2, X86::XMM3, in fastLowerCall()
3042 X86::XMM4, X86::XMM5, X86::XMM6, X86::XMM7 in fastLowerCall()
3047 BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, DbgLoc, TII.get(X86::MOV8ri), in fastLowerCall()
3048 X86::AL).addImm(NumXMMRegs); in fastLowerCall()
3070 unsigned CallOpc = Is64Bit ? X86::CALL64r : X86::CALL32r; in fastLowerCall()
3076 unsigned CallOpc = Is64Bit ? X86::CALL64pcrel32 : X86::CALLpcrel32; in fastLowerCall()
3112 MIB.addReg(X86::EBX, RegState::Implicit); in fastLowerCall()
3115 MIB.addReg(X86::AL, RegState::Implicit); in fastLowerCall()
3149 if ((VA.getLocReg() == X86::FP0 || VA.getLocReg() == X86::FP1) && in fastLowerCall()
3152 CopyReg = createResultReg(&X86::RFP80RegClass); in fastLowerCall()
3165 unsigned Opc = ResVT == MVT::f32 ? X86::ST_Fp80m32 : X86::ST_Fp80m64; in fastLowerCall()
3171 Opc = ResVT == MVT::f32 ? X86::MOVSSrm : X86::MOVSDrm; in fastLowerCall()
3244 unsigned SrcReg = fastEmitInst_(X86::MOV32r0, &X86::GR32RegClass); in X86MaterializeInt()
3250 X86::sub_8bit); in X86MaterializeInt()
3253 X86::sub_16bit); in X86MaterializeInt()
3257 unsigned ResultReg = createResultReg(&X86::GR64RegClass); in X86MaterializeInt()
3260 .addImm(0).addReg(SrcReg).addImm(X86::sub_32bit); in X86MaterializeInt()
3270 case MVT::i8: Opc = X86::MOV8ri; break; in X86MaterializeInt()
3271 case MVT::i16: Opc = X86::MOV16ri; break; in X86MaterializeInt()
3272 case MVT::i32: Opc = X86::MOV32ri; break; in X86MaterializeInt()
3275 Opc = X86::MOV32ri; in X86MaterializeInt()
3277 Opc = X86::MOV64ri32; in X86MaterializeInt()
3279 Opc = X86::MOV64ri; in X86MaterializeInt()
3283 if (VT == MVT::i64 && Opc == X86::MOV32ri) { in X86MaterializeInt()
3284 unsigned SrcReg = fastEmitInst_i(Opc, &X86::GR32RegClass, Imm); in X86MaterializeInt()
3285 unsigned ResultReg = createResultReg(&X86::GR64RegClass); in X86MaterializeInt()
3288 .addImm(0).addReg(SrcReg).addImm(X86::sub_32bit); in X86MaterializeInt()
3310 Opc = Subtarget->hasAVX() ? X86::VMOVSSrm : X86::MOVSSrm; in X86MaterializeFP()
3311 RC = &X86::FR32RegClass; in X86MaterializeFP()
3313 Opc = X86::LD_Fp32m; in X86MaterializeFP()
3314 RC = &X86::RFP32RegClass; in X86MaterializeFP()
3319 Opc = Subtarget->hasAVX() ? X86::VMOVSDrm : X86::MOVSDrm; in X86MaterializeFP()
3320 RC = &X86::FR64RegClass; in X86MaterializeFP()
3322 Opc = X86::LD_Fp64m; in X86MaterializeFP()
3323 RC = &X86::RFP64RegClass; in X86MaterializeFP()
3349 PICBase = X86::RIP; in X86MaterializeFP()
3357 unsigned AddrReg = createResultReg(&X86::GR64RegClass); in X86MaterializeFP()
3358 BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, DbgLoc, TII.get(X86::MOV64ri), in X86MaterializeFP()
3396 BuildMI(*FuncInfo.MBB, FuncInfo.InsertPt, DbgLoc, TII.get(X86::MOV64ri), in X86MaterializeGV()
3402 ? X86::LEA64_32r : X86::LEA32r) in X86MaterializeGV()
3403 : X86::LEA64r; in X86MaterializeGV()
3447 ? X86::LEA64_32r : X86::LEA32r) in fastMaterializeAlloca()
3448 : X86::LEA64r; in fastMaterializeAlloca()
3468 Opc = X86::FsFLD0SS; in fastMaterializeFloatZero()
3469 RC = &X86::FR32RegClass; in fastMaterializeFloatZero()
3471 Opc = X86::LD_Fp032; in fastMaterializeFloatZero()
3472 RC = &X86::RFP32RegClass; in fastMaterializeFloatZero()
3477 Opc = X86::FsFLD0SD; in fastMaterializeFloatZero()
3478 RC = &X86::FR64RegClass; in fastMaterializeFloatZero()
3480 Opc = X86::LD_Fp064; in fastMaterializeFloatZero()
3481 RC = &X86::RFP64RegClass; in fastMaterializeFloatZero()
3527 FastISel *X86::createFastISel(FunctionLoweringInfo &funcInfo, in createFastISel()