Searched refs:LoadMI (Results 1 – 15 of 15) sorted by relevance
/external/llvm/lib/CodeGen/ |
D | ImplicitNullChecks.cpp | 98 MachineInstr *insertFaultingLoad(MachineInstr *LoadMI, MachineBasicBlock *MBB, 349 MachineInstr *ImplicitNullChecks::insertFaultingLoad(MachineInstr *LoadMI, in insertFaultingLoad() argument 356 unsigned NumDefs = LoadMI->getDesc().getNumDefs(); in insertFaultingLoad() 361 DefReg = LoadMI->defs().begin()->getReg(); in insertFaultingLoad() 362 assert(std::distance(LoadMI->defs().begin(), LoadMI->defs().end()) == 1 && in insertFaultingLoad() 368 .addImm(LoadMI->getOpcode()); in insertFaultingLoad() 370 for (auto &MO : LoadMI->uses()) in insertFaultingLoad() 373 MIB.setMemRefs(LoadMI->memoperands_begin(), LoadMI->memoperands_end()); in insertFaultingLoad()
|
D | TargetInstrInfo.cpp | 779 MachineInstr *LoadMI) const { in foldMemoryOperand() 780 assert(LoadMI->canFoldAsLoad() && "LoadMI isn't foldable!"); in foldMemoryOperand() 794 isLoadFromStackSlot(LoadMI, FrameIndex)) { in foldMemoryOperand() 801 NewMI = foldMemoryOperandImpl(MF, MI, Ops, MI, LoadMI); in foldMemoryOperand() 808 NewMI->setMemRefs(LoadMI->memoperands_begin(), in foldMemoryOperand() 809 LoadMI->memoperands_end()); in foldMemoryOperand() 815 for (MachineInstr::mmo_iterator I = LoadMI->memoperands_begin(), in foldMemoryOperand() 816 E = LoadMI->memoperands_end(); I != E; ++I) { in foldMemoryOperand()
|
D | InlineSpiller.cpp | 177 MachineInstr *LoadMI = nullptr); 1083 MachineInstr *LoadMI) { in foldMemoryOperand() argument 1113 if (LoadMI && MO.isDef()) in foldMemoryOperand() 1123 LoadMI ? TII.foldMemoryOperand(MI, FoldOps, LoadMI) in foldMemoryOperand()
|
/external/mesa3d/src/gallium/drivers/radeon/ |
D | AMDGPUInstrInfo.h | 97 MachineInstr *LoadMI) const;
|
D | AMDGPUInstrInfo.cpp | 152 MachineInstr *LoadMI) const { in foldMemoryOperandImpl()
|
/external/llvm/lib/Target/AMDGPU/ |
D | AMDGPUInstrInfo.h | 95 MachineInstr *LoadMI) const override;
|
D | AMDGPUInstrInfo.cpp | 163 MachineBasicBlock::iterator InsertPt, MachineInstr *LoadMI) const { in foldMemoryOperandImpl()
|
/external/llvm/lib/Target/SystemZ/ |
D | SystemZInstrInfo.h | 193 MachineInstr *LoadMI) const override;
|
D | SystemZInstrInfo.cpp | 892 MachineBasicBlock::iterator InsertPt, MachineInstr *LoadMI) const { in foldMemoryOperandImpl()
|
/external/llvm/include/llvm/Target/ |
D | TargetInstrInfo.h | 792 MachineInstr *LoadMI) const; 878 MachineBasicBlock::iterator InsertPt, MachineInstr *LoadMI) const { in foldMemoryOperandImpl() argument
|
/external/llvm/lib/Target/X86/ |
D | X86MCInstLower.cpp | 906 MCInst LoadMI; in LowerFAULTING_LOAD_OP() local 907 LoadMI.setOpcode(LoadOpcode); in LowerFAULTING_LOAD_OP() 910 LoadMI.addOperand(MCOperand::createReg(LoadDefRegister)); in LowerFAULTING_LOAD_OP() 916 LoadMI.addOperand(MaybeOperand.getValue()); in LowerFAULTING_LOAD_OP() 918 OutStreamer->EmitInstruction(LoadMI, getSubtargetInfo()); in LowerFAULTING_LOAD_OP()
|
D | X86InstrInfo.h | 381 MachineInstr *LoadMI) const override;
|
D | X86InstrInfo.cpp | 5971 static bool isNonFoldablePartialRegisterLoad(const MachineInstr &LoadMI, in isNonFoldablePartialRegisterLoad() argument 5974 unsigned Opc = LoadMI.getOpcode(); in isNonFoldablePartialRegisterLoad() 5977 MF.getRegInfo().getRegClass(LoadMI.getOperand(0).getReg())->getSize(); in isNonFoldablePartialRegisterLoad() 6026 MachineBasicBlock::iterator InsertPt, MachineInstr *LoadMI) const { in foldMemoryOperandImpl() 6028 unsigned NumOps = LoadMI->getDesc().getNumOperands(); in foldMemoryOperandImpl() 6030 if (isLoadFromStackSlot(LoadMI, FrameIndex)) { in foldMemoryOperandImpl() 6031 if (isNonFoldablePartialRegisterLoad(*LoadMI, *MI, MF)) in foldMemoryOperandImpl() 6045 if (LoadMI->hasOneMemOperand()) in foldMemoryOperandImpl() 6046 Alignment = (*LoadMI->memoperands_begin())->getAlignment(); in foldMemoryOperandImpl() 6048 switch (LoadMI->getOpcode()) { in foldMemoryOperandImpl() [all …]
|
D | X86FastISel.cpp | 652 MachineInstrBuilder LoadMI = in handleConstantAddresses() local 654 addFullAddress(LoadMI, StubAM); in handleConstantAddresses()
|
/external/llvm/lib/Target/AArch64/ |
D | AArch64FastISel.cpp | 4338 const auto *LoadMI = MI; in optimizeIntExtLoad() local 4339 if (LoadMI->getOpcode() == TargetOpcode::COPY && in optimizeIntExtLoad() 4340 LoadMI->getOperand(1).getSubReg() == AArch64::sub_32) { in optimizeIntExtLoad() 4342 LoadMI = MRI.getUniqueVRegDef(LoadReg); in optimizeIntExtLoad() 4343 assert(LoadMI && "Expected valid instruction"); in optimizeIntExtLoad() 4345 if (!(IsZExt && isZExtLoad(LoadMI)) && !(!IsZExt && isSExtLoad(LoadMI))) in optimizeIntExtLoad()
|