/external/llvm/include/llvm/CodeGen/ |
D | ValueTypes.h | 207 bool bitsLT(EVT VT) const { in bitsLT() function
|
D | MachineValueType.h | 535 bool bitsLT(MVT VT) const { in bitsLT() function
|
/external/llvm/lib/Target/ARM/ |
D | ARMSelectionDAGInfo.cpp | 96 else if (Src.getValueType().bitsLT(MVT::i32)) in EmitSpecializedLibcall()
|
D | ARMISelLowering.cpp | 5636 if (SrcEltTy.bitsLT(SmallestEltTy)) in ReconstructShuffle()
|
/external/llvm/lib/CodeGen/ |
D | TargetLoweringBase.cpp | 1072 if (EVT(DestVT).bitsLT(NewVT)) // Value is expanded, e.g. i64 -> i16. in getVectorTypeBreakdownMVT() 1442 if (EVT(DestVT).bitsLT(NewVT)) // Value is expanded, e.g. i64 -> i16. in getVectorTypeBreakdown() 1477 if (VT.bitsLT(MinVT)) in GetReturnInfo()
|
D | CodeGenPrepare.cpp | 784 if (SrcVT.bitsLT(DstVT)) return false; in OptimizeNoopCopyExpression()
|
/external/llvm/lib/CodeGen/SelectionDAG/ |
D | SelectionDAG.cpp | 3027 assert(Operand.getValueType().bitsLT(VT) && in getNode() 3040 assert(Operand.getValueType().bitsLT(VT) && in getNode() 3056 assert(Operand.getValueType().bitsLT(VT) && in getNode() 3073 assert(Operand.getValueType().bitsLT(VT) && in getNode() 3106 .bitsLT(VT.getScalarType())) in getNode() 3350 if (LegalSVT.bitsLT(SVT)) in FoldConstantVectorArithmetic() 3447 SVT = (SVT.bitsLT(Op.getValueType()) ? Op.getValueType() : SVT); in getNode() 4540 if (VT.bitsLT(LargestVT)) { in getMemsetStores() 5082 assert(MemVT.getScalarType().bitsLT(VT.getScalarType()) && in getLoad() 5265 assert(SVT.getScalarType().bitsLT(VT.getScalarType()) && in getTruncStore()
|
D | LegalizeTypesGeneric.cpp | 231 assert(OldEltVT.bitsLT(OldVT) && "Result type smaller then element type!"); in ExpandRes_EXTRACT_VECTOR_ELT()
|
D | LegalizeDAG.cpp | 1605 if (EltVT.bitsLT(Node->getOperand(i).getValueType().getScalarType())) { in ExpandVectorBuildThroughStack() 2576 } else if (DestVT.bitsLT(MVT::f64)) { in ExpandLegalINT_TO_FP() 3227 if (NewEltVT.bitsLT(EltVT)) { in ExpandNode() 4472 assert(NewEltVT.bitsLT(EltVT) && "not handled"); in PromoteNode() 4505 assert(NewEltVT.bitsLT(EltVT) && "not handled"); in PromoteNode() 4552 assert(NewEltVT.bitsLT(EltVT) && "not handled"); in PromoteNode()
|
D | FastISel.cpp | 326 if (IdxVT.bitsLT(PtrVT)) { in getRegForGEPIndex() 1634 if (DstVT.bitsLT(SrcVT)) in selectOperator()
|
D | DAGCombiner.cpp | 6249 if (VT.bitsLT(Op.getValueType())) in visitZERO_EXTEND() 6289 if (SrcVT.bitsLT(VT)) { in visitZERO_EXTEND() 6301 if (SrcVT.bitsLT(VT)) { in visitZERO_EXTEND() 6321 if (X.getValueType().bitsLT(VT)) { in visitZERO_EXTEND() 6571 if (X.getValueType().bitsLT(VT)) { in visitANY_EXTEND() 6911 EVT.bitsLT(cast<VTSDNode>(N0.getOperand(1))->getVT())) in visitSIGN_EXTEND_INREG() 7030 if (N0.getOperand(0).getValueType().bitsLT(VT)) in visitTRUNCATE() 9113 if (VT.bitsLT(In.getValueType())) in visitFP_EXTEND() 12135 if (ResultVT.bitsLT(VecEltVT)) in ReplaceExtractVectorEltOfLoadWithNarrowedLoad() 12243 if (NVT.bitsLT(LVT) && !TLI.isTruncateFree(LVT, NVT)) in visitEXTRACT_VECTOR_ELT() [all …]
|
D | LegalizeVectorTypes.cpp | 1429 if (N->getValueType(0).bitsLT(N->getOperand(0)->getValueType(0))) in SplitVectorOperand() 1436 if (N->getValueType(0).bitsLT(N->getOperand(0)->getValueType(0))) in SplitVectorOperand() 3673 assert(StVT.bitsLT(ValOp.getValueType())); in GenWidenVectorTruncStores()
|
D | SelectionDAGBuilder.cpp | 202 ValueVT.bitsLT(PartEVT)) { in getCopyFromParts() 210 if (ValueVT.bitsLT(PartEVT)) { in getCopyFromParts() 224 if (ValueVT.bitsLT(Val.getValueType())) in getCopyFromParts()
|
D | TargetLowering.cpp | 1603 else if (Op0.getValueType().bitsLT(VT)) in SimplifySetCC()
|
D | LegalizeIntegerTypes.cpp | 3192 if (N->getOperand(i).getValueType().bitsLT(NOutVTElem)) in PromoteIntRes_BUILD_VECTOR()
|
/external/llvm/include/llvm/Target/ |
D | TargetLowering.h | 2546 return VT.bitsLT(MinVT) ? MinVT : VT; in getTypeForExtArgOrReturn()
|
/external/llvm/lib/Target/AMDGPU/ |
D | AMDGPUISelDAGToDAG.cpp | 569 N->getMemoryVT().bitsLT(MVT::i32)) in isGlobalLoad()
|
D | AMDGPUISelLowering.cpp | 520 return Dest.bitsLT(Source) && (Dest.getSizeInBits() % 32 == 0); in isTruncateFree() 1500 MemVT.bitsLT(MVT::i32)) { in LowerSTORE()
|
D | SIISelLowering.cpp | 464 if (VT.bitsLT(MVT::i32)) in allowsMisalignedMemoryAccesses()
|
/external/llvm/lib/Target/X86/ |
D | X86FastISel.cpp | 3261 if (DstVT.bitsLT(SrcVT)) in fastSelectInstruction()
|
D | X86ISelLowering.cpp | 2370 return VT.bitsLT(MinVT) ? MinVT : VT; in getTypeForExtArgOrReturn() 12723 if (DestVT.bitsLT(MVT::f64)) in LowerUINT_TO_FP_i32() 13559 if (SrcVT.bitsLT(VT)) { in LowerFCOPYSIGN() 18529 else if (EltVT.bitsLT(MVT::i32)) in LowerScalarVariableShift()
|
/external/llvm/lib/Target/AArch64/ |
D | AArch64FastISel.cpp | 4796 if (IdxVT.bitsLT(PtrVT)) { in getRegForGEPIndex()
|
D | AArch64ISelLowering.cpp | 3717 if (SrcVT.bitsLT(VT)) in LowerFCOPYSIGN() 4895 if (SrcEltTy.bitsLT(SmallestEltTy)) { in ReconstructShuffle()
|
/external/llvm/lib/Target/PowerPC/ |
D | PPCISelLowering.cpp | 2269 if (VT.bitsLT(MVT::i32)) { in LowerSETCC()
|