Lines Matching refs:v64i16
203 if (LocVT == MVT::v16i64 || LocVT == MVT::v32i32 || LocVT == MVT::v64i16 || in CC_Hexagon_VarArg()
348 (LocVT == MVT::v16i64 || LocVT == MVT::v32i32 || LocVT == MVT::v64i16 || in CC_HexagonVector()
371 (LocVT == MVT::v16i64 || LocVT == MVT::v32i32 || LocVT == MVT::v64i16 || in CC_HexagonVector()
418 } else if (LocVT == MVT::v128i8 || LocVT == MVT::v64i16 || in RetCC_Hexagon()
545 ty == MVT::v16i64 || ty == MVT::v32i32 || ty == MVT::v64i16 || in IsHvxVectorType()
899 VT == MVT::v64i16 || VT == MVT::v128i8); in getIndexedAddressParts()
1126 RegVT == MVT::v64i16 || RegVT == MVT::v128i8))) { in LowerFormalArguments()
1134 RegVT == MVT::v64i16 || RegVT == MVT::v128i8)) { in LowerFormalArguments()
1760 addRegisterClass(MVT::v64i16, &Hexagon::VecDblRegsRegClass); in HexagonTargetLowering()
1766 addRegisterClass(MVT::v64i16, &Hexagon::VectorRegs128BRegClass); in HexagonTargetLowering()
2000 setOperationAction(ISD::CONCAT_VECTORS, MVT::v64i16, Custom); in HexagonTargetLowering()
2072 for (MVT VT : {MVT::v128i8, MVT::v64i16, MVT::v32i32, MVT::v16i64}) { in HexagonTargetLowering()
2889 case MVT::v64i16: in getRegForInlineAsmConstraint()
3032 case MVT::v64i16: in allowsMisalignedMemoryAccesses()
3062 case MVT::v64i16: in findRepresentativeClass()