Home
last modified time | relevance | path

Searched refs:LoadMI (Results 1 – 15 of 15) sorted by relevance

/external/llvm/lib/CodeGen/
DImplicitNullChecks.cpp98 MachineInstr *insertFaultingLoad(MachineInstr *LoadMI, MachineBasicBlock *MBB,
349 MachineInstr *ImplicitNullChecks::insertFaultingLoad(MachineInstr *LoadMI, in insertFaultingLoad() argument
356 unsigned NumDefs = LoadMI->getDesc().getNumDefs(); in insertFaultingLoad()
361 DefReg = LoadMI->defs().begin()->getReg(); in insertFaultingLoad()
362 assert(std::distance(LoadMI->defs().begin(), LoadMI->defs().end()) == 1 && in insertFaultingLoad()
368 .addImm(LoadMI->getOpcode()); in insertFaultingLoad()
370 for (auto &MO : LoadMI->uses()) in insertFaultingLoad()
373 MIB.setMemRefs(LoadMI->memoperands_begin(), LoadMI->memoperands_end()); in insertFaultingLoad()
DTargetInstrInfo.cpp779 MachineInstr *LoadMI) const { in foldMemoryOperand()
780 assert(LoadMI->canFoldAsLoad() && "LoadMI isn't foldable!"); in foldMemoryOperand()
794 isLoadFromStackSlot(LoadMI, FrameIndex)) { in foldMemoryOperand()
801 NewMI = foldMemoryOperandImpl(MF, MI, Ops, MI, LoadMI); in foldMemoryOperand()
808 NewMI->setMemRefs(LoadMI->memoperands_begin(), in foldMemoryOperand()
809 LoadMI->memoperands_end()); in foldMemoryOperand()
815 for (MachineInstr::mmo_iterator I = LoadMI->memoperands_begin(), in foldMemoryOperand()
816 E = LoadMI->memoperands_end(); I != E; ++I) { in foldMemoryOperand()
DInlineSpiller.cpp177 MachineInstr *LoadMI = nullptr);
1083 MachineInstr *LoadMI) { in foldMemoryOperand() argument
1113 if (LoadMI && MO.isDef()) in foldMemoryOperand()
1123 LoadMI ? TII.foldMemoryOperand(MI, FoldOps, LoadMI) in foldMemoryOperand()
/external/mesa3d/src/gallium/drivers/radeon/
DAMDGPUInstrInfo.h97 MachineInstr *LoadMI) const;
DAMDGPUInstrInfo.cpp152 MachineInstr *LoadMI) const { in foldMemoryOperandImpl()
/external/llvm/lib/Target/AMDGPU/
DAMDGPUInstrInfo.h95 MachineInstr *LoadMI) const override;
DAMDGPUInstrInfo.cpp163 MachineBasicBlock::iterator InsertPt, MachineInstr *LoadMI) const { in foldMemoryOperandImpl()
/external/llvm/lib/Target/SystemZ/
DSystemZInstrInfo.h193 MachineInstr *LoadMI) const override;
DSystemZInstrInfo.cpp892 MachineBasicBlock::iterator InsertPt, MachineInstr *LoadMI) const { in foldMemoryOperandImpl()
/external/llvm/include/llvm/Target/
DTargetInstrInfo.h792 MachineInstr *LoadMI) const;
878 MachineBasicBlock::iterator InsertPt, MachineInstr *LoadMI) const { in foldMemoryOperandImpl() argument
/external/llvm/lib/Target/X86/
DX86MCInstLower.cpp906 MCInst LoadMI; in LowerFAULTING_LOAD_OP() local
907 LoadMI.setOpcode(LoadOpcode); in LowerFAULTING_LOAD_OP()
910 LoadMI.addOperand(MCOperand::createReg(LoadDefRegister)); in LowerFAULTING_LOAD_OP()
916 LoadMI.addOperand(MaybeOperand.getValue()); in LowerFAULTING_LOAD_OP()
918 OutStreamer->EmitInstruction(LoadMI, getSubtargetInfo()); in LowerFAULTING_LOAD_OP()
DX86InstrInfo.h381 MachineInstr *LoadMI) const override;
DX86InstrInfo.cpp5971 static bool isNonFoldablePartialRegisterLoad(const MachineInstr &LoadMI, in isNonFoldablePartialRegisterLoad() argument
5974 unsigned Opc = LoadMI.getOpcode(); in isNonFoldablePartialRegisterLoad()
5977 MF.getRegInfo().getRegClass(LoadMI.getOperand(0).getReg())->getSize(); in isNonFoldablePartialRegisterLoad()
6026 MachineBasicBlock::iterator InsertPt, MachineInstr *LoadMI) const { in foldMemoryOperandImpl()
6028 unsigned NumOps = LoadMI->getDesc().getNumOperands(); in foldMemoryOperandImpl()
6030 if (isLoadFromStackSlot(LoadMI, FrameIndex)) { in foldMemoryOperandImpl()
6031 if (isNonFoldablePartialRegisterLoad(*LoadMI, *MI, MF)) in foldMemoryOperandImpl()
6045 if (LoadMI->hasOneMemOperand()) in foldMemoryOperandImpl()
6046 Alignment = (*LoadMI->memoperands_begin())->getAlignment(); in foldMemoryOperandImpl()
6048 switch (LoadMI->getOpcode()) { in foldMemoryOperandImpl()
[all …]
DX86FastISel.cpp652 MachineInstrBuilder LoadMI = in handleConstantAddresses() local
654 addFullAddress(LoadMI, StubAM); in handleConstantAddresses()
/external/llvm/lib/Target/AArch64/
DAArch64FastISel.cpp4338 const auto *LoadMI = MI; in optimizeIntExtLoad() local
4339 if (LoadMI->getOpcode() == TargetOpcode::COPY && in optimizeIntExtLoad()
4340 LoadMI->getOperand(1).getSubReg() == AArch64::sub_32) { in optimizeIntExtLoad()
4342 LoadMI = MRI.getUniqueVRegDef(LoadReg); in optimizeIntExtLoad()
4343 assert(LoadMI && "Expected valid instruction"); in optimizeIntExtLoad()
4345 if (!(IsZExt && isZExtLoad(LoadMI)) && !(!IsZExt && isSExtLoad(LoadMI))) in optimizeIntExtLoad()