Lines Matching refs:Ops
1911 Value *CodeGenFunction::EmitNeonCall(Function *F, SmallVectorImpl<Value*> &Ops, in EmitNeonCall() argument
1918 Ops[j] = EmitNeonShiftVector(Ops[j], ai->getType(), rightshift); in EmitNeonCall()
1920 Ops[j] = Builder.CreateBitCast(Ops[j], ai->getType(), name); in EmitNeonCall()
1922 return Builder.CreateCall(F, Ops, name); in EmitNeonCall()
2668 SmallVectorImpl<Value *> &Ops, in EmitCommonNeonSISDBuiltinExpr() argument
2691 std::swap(Ops[0], Ops[1]); in EmitCommonNeonSISDBuiltinExpr()
2707 if (Ops[j]->getType()->getPrimitiveSizeInBits() == in EmitCommonNeonSISDBuiltinExpr()
2711 assert(ArgTy->isVectorTy() && !Ops[j]->getType()->isVectorTy()); in EmitCommonNeonSISDBuiltinExpr()
2714 Ops[j] = in EmitCommonNeonSISDBuiltinExpr()
2715 CGF.Builder.CreateTruncOrBitCast(Ops[j], ArgTy->getVectorElementType()); in EmitCommonNeonSISDBuiltinExpr()
2716 Ops[j] = in EmitCommonNeonSISDBuiltinExpr()
2717 CGF.Builder.CreateInsertElement(UndefValue::get(ArgTy), Ops[j], C0); in EmitCommonNeonSISDBuiltinExpr()
2720 Value *Result = CGF.EmitNeonCall(F, Ops, s); in EmitCommonNeonSISDBuiltinExpr()
2732 SmallVectorImpl<llvm::Value *> &Ops, llvm::Value *Align) { in EmitCommonNeonBuiltinExpr() argument
2758 return EmitNeonCall(CGM.getIntrinsic(Intrinsic::fabs, Ty), Ops, "vabs"); in EmitCommonNeonBuiltinExpr()
2759 return EmitNeonCall(CGM.getIntrinsic(LLVMIntrinsic, Ty), Ops, "vabs"); in EmitCommonNeonBuiltinExpr()
2765 Ops[0] = Builder.CreateBitCast(Ops[0], SrcTy); in EmitCommonNeonBuiltinExpr()
2766 Ops[1] = Builder.CreateBitCast(Ops[1], SrcTy); in EmitCommonNeonBuiltinExpr()
2767 Ops[0] = Builder.CreateAdd(Ops[0], Ops[1], "vaddhn"); in EmitCommonNeonBuiltinExpr()
2773 Ops[0] = Builder.CreateLShr(Ops[0], ShiftAmt, "vaddhn"); in EmitCommonNeonBuiltinExpr()
2776 return Builder.CreateTrunc(Ops[0], VTy, "vaddhn"); in EmitCommonNeonBuiltinExpr()
2782 std::swap(Ops[0], Ops[1]); in EmitCommonNeonBuiltinExpr()
2792 return EmitNeonCall(F, Ops, NameHint); in EmitCommonNeonBuiltinExpr()
2798 Ops.push_back(Builder.getInt1(getTarget().isCLZForZeroUndef())); in EmitCommonNeonBuiltinExpr()
2802 Ops[0] = Builder.CreateBitCast(Ops[0], Ty); in EmitCommonNeonBuiltinExpr()
2804 return Usgn ? Builder.CreateUIToFP(Ops[0], Ty, "vcvt") in EmitCommonNeonBuiltinExpr()
2805 : Builder.CreateSIToFP(Ops[0], Ty, "vcvt"); in EmitCommonNeonBuiltinExpr()
2819 return EmitNeonCall(F, Ops, "vcvt_n"); in EmitCommonNeonBuiltinExpr()
2837 return EmitNeonCall(F, Ops, "vcvt_n"); in EmitCommonNeonBuiltinExpr()
2853 Ops[0] = Builder.CreateBitCast(Ops[0], FloatTy); in EmitCommonNeonBuiltinExpr()
2854 return Usgn ? Builder.CreateFPToUI(Ops[0], Ty, "vcvt") in EmitCommonNeonBuiltinExpr()
2855 : Builder.CreateFPToSI(Ops[0], Ty, "vcvt"); in EmitCommonNeonBuiltinExpr()
2896 return EmitNeonCall(CGM.getIntrinsic(LLVMIntrinsic, Tys), Ops, NameHint); in EmitCommonNeonBuiltinExpr()
2900 int CV = cast<ConstantInt>(Ops[2])->getSExtValue(); in EmitCommonNeonBuiltinExpr()
2905 Ops[0] = Builder.CreateBitCast(Ops[0], Ty); in EmitCommonNeonBuiltinExpr()
2906 Ops[1] = Builder.CreateBitCast(Ops[1], Ty); in EmitCommonNeonBuiltinExpr()
2908 return Builder.CreateShuffleVector(Ops[0], Ops[1], SV, "vext"); in EmitCommonNeonBuiltinExpr()
2913 Ops[0] = Builder.CreateBitCast(Ops[0], Ty); in EmitCommonNeonBuiltinExpr()
2914 Ops[1] = Builder.CreateBitCast(Ops[1], Ty); in EmitCommonNeonBuiltinExpr()
2915 Ops[2] = Builder.CreateBitCast(Ops[2], Ty); in EmitCommonNeonBuiltinExpr()
2918 return Builder.CreateCall3(F, Ops[1], Ops[2], Ops[0]); in EmitCommonNeonBuiltinExpr()
2922 Ops.push_back(Align); in EmitCommonNeonBuiltinExpr()
2923 return EmitNeonCall(CGM.getIntrinsic(LLVMIntrinsic, Ty), Ops, "vld1"); in EmitCommonNeonBuiltinExpr()
2931 Ops[1] = Builder.CreateCall2(F, Ops[1], Align, NameHint); in EmitCommonNeonBuiltinExpr()
2932 Ty = llvm::PointerType::getUnqual(Ops[1]->getType()); in EmitCommonNeonBuiltinExpr()
2933 Ops[0] = Builder.CreateBitCast(Ops[0], Ty); in EmitCommonNeonBuiltinExpr()
2934 return Builder.CreateStore(Ops[1], Ops[0]); in EmitCommonNeonBuiltinExpr()
2940 Ops[0] = Builder.CreateBitCast(Ops[0], Ty); in EmitCommonNeonBuiltinExpr()
2941 LoadInst *Ld = Builder.CreateLoad(Ops[0]); in EmitCommonNeonBuiltinExpr()
2944 Ops[0] = Builder.CreateInsertElement(V, Ld, CI); in EmitCommonNeonBuiltinExpr()
2945 return EmitNeonSplat(Ops[0], CI); in EmitCommonNeonBuiltinExpr()
2954 for (unsigned I = 2; I < Ops.size() - 1; ++I) in EmitCommonNeonBuiltinExpr()
2955 Ops[I] = Builder.CreateBitCast(Ops[I], Ty); in EmitCommonNeonBuiltinExpr()
2956 Ops.push_back(Align); in EmitCommonNeonBuiltinExpr()
2957 Ops[1] = Builder.CreateCall(F, makeArrayRef(Ops).slice(1), NameHint); in EmitCommonNeonBuiltinExpr()
2958 Ty = llvm::PointerType::getUnqual(Ops[1]->getType()); in EmitCommonNeonBuiltinExpr()
2959 Ops[0] = Builder.CreateBitCast(Ops[0], Ty); in EmitCommonNeonBuiltinExpr()
2960 return Builder.CreateStore(Ops[1], Ops[0]); in EmitCommonNeonBuiltinExpr()
2964 Ops[0] = Builder.CreateBitCast(Ops[0], DTy); in EmitCommonNeonBuiltinExpr()
2966 return Builder.CreateZExt(Ops[0], Ty, "vmovl"); in EmitCommonNeonBuiltinExpr()
2967 return Builder.CreateSExt(Ops[0], Ty, "vmovl"); in EmitCommonNeonBuiltinExpr()
2971 Ops[0] = Builder.CreateBitCast(Ops[0], QTy); in EmitCommonNeonBuiltinExpr()
2972 return Builder.CreateTrunc(Ops[0], Ty, "vmovn"); in EmitCommonNeonBuiltinExpr()
2982 return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vmull"); in EmitCommonNeonBuiltinExpr()
2992 return EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, NameHint); in EmitCommonNeonBuiltinExpr()
3002 return EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "vpaddl"); in EmitCommonNeonBuiltinExpr()
3006 SmallVector<Value *, 2> MulOps(Ops.begin() + 1, Ops.end()); in EmitCommonNeonBuiltinExpr()
3011 AccumOps.push_back(Ops[0]); in EmitCommonNeonBuiltinExpr()
3018 return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vqshl_n", in EmitCommonNeonBuiltinExpr()
3022 return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vqshlu_n", in EmitCommonNeonBuiltinExpr()
3029 return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, NameHint); in EmitCommonNeonBuiltinExpr()
3033 return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vrshr_n", in EmitCommonNeonBuiltinExpr()
3037 Ops[1] = EmitNeonShiftVector(Ops[1], Ty, false); in EmitCommonNeonBuiltinExpr()
3038 return Builder.CreateShl(Builder.CreateBitCast(Ops[0],Ty), Ops[1], in EmitCommonNeonBuiltinExpr()
3042 Ops[0] = Builder.CreateBitCast(Ops[0], SrcTy); in EmitCommonNeonBuiltinExpr()
3044 Ops[0] = Builder.CreateZExt(Ops[0], VTy); in EmitCommonNeonBuiltinExpr()
3046 Ops[0] = Builder.CreateSExt(Ops[0], VTy); in EmitCommonNeonBuiltinExpr()
3047 Ops[1] = EmitNeonShiftVector(Ops[1], VTy, false); in EmitCommonNeonBuiltinExpr()
3048 return Builder.CreateShl(Ops[0], Ops[1], "vshll_n"); in EmitCommonNeonBuiltinExpr()
3052 Ops[0] = Builder.CreateBitCast(Ops[0], SrcTy); in EmitCommonNeonBuiltinExpr()
3053 Ops[1] = EmitNeonShiftVector(Ops[1], SrcTy, false); in EmitCommonNeonBuiltinExpr()
3055 Ops[0] = Builder.CreateLShr(Ops[0], Ops[1]); in EmitCommonNeonBuiltinExpr()
3057 Ops[0] = Builder.CreateAShr(Ops[0], Ops[1]); in EmitCommonNeonBuiltinExpr()
3058 return Builder.CreateTrunc(Ops[0], Ty, "vshrn_n"); in EmitCommonNeonBuiltinExpr()
3062 return EmitNeonRShiftImm(Ops[0], Ops[1], Ty, Usgn, "vshr_n"); in EmitCommonNeonBuiltinExpr()
3077 Ops.push_back(Align); in EmitCommonNeonBuiltinExpr()
3078 return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, ""); in EmitCommonNeonBuiltinExpr()
3084 Ops[0] = Builder.CreateBitCast(Ops[0], SrcTy); in EmitCommonNeonBuiltinExpr()
3085 Ops[1] = Builder.CreateBitCast(Ops[1], SrcTy); in EmitCommonNeonBuiltinExpr()
3086 Ops[0] = Builder.CreateSub(Ops[0], Ops[1], "vsubhn"); in EmitCommonNeonBuiltinExpr()
3092 Ops[0] = Builder.CreateLShr(Ops[0], ShiftAmt, "vsubhn"); in EmitCommonNeonBuiltinExpr()
3095 return Builder.CreateTrunc(Ops[0], VTy, "vsubhn"); in EmitCommonNeonBuiltinExpr()
3099 Ops[0] = Builder.CreateBitCast(Ops[0], llvm::PointerType::getUnqual(Ty)); in EmitCommonNeonBuiltinExpr()
3100 Ops[1] = Builder.CreateBitCast(Ops[1], Ty); in EmitCommonNeonBuiltinExpr()
3101 Ops[2] = Builder.CreateBitCast(Ops[2], Ty); in EmitCommonNeonBuiltinExpr()
3110 Value *Addr = Builder.CreateConstInBoundsGEP1_32(Ty, Ops[0], vi); in EmitCommonNeonBuiltinExpr()
3112 SV = Builder.CreateShuffleVector(Ops[1], Ops[2], SV, "vtrn"); in EmitCommonNeonBuiltinExpr()
3119 Ops[0] = Builder.CreateBitCast(Ops[0], Ty); in EmitCommonNeonBuiltinExpr()
3120 Ops[1] = Builder.CreateBitCast(Ops[1], Ty); in EmitCommonNeonBuiltinExpr()
3121 Ops[0] = Builder.CreateAnd(Ops[0], Ops[1]); in EmitCommonNeonBuiltinExpr()
3122 Ops[0] = Builder.CreateICmp(ICmpInst::ICMP_NE, Ops[0], in EmitCommonNeonBuiltinExpr()
3124 return Builder.CreateSExt(Ops[0], Ty, "vtst"); in EmitCommonNeonBuiltinExpr()
3128 Ops[0] = Builder.CreateBitCast(Ops[0], llvm::PointerType::getUnqual(Ty)); in EmitCommonNeonBuiltinExpr()
3129 Ops[1] = Builder.CreateBitCast(Ops[1], Ty); in EmitCommonNeonBuiltinExpr()
3130 Ops[2] = Builder.CreateBitCast(Ops[2], Ty); in EmitCommonNeonBuiltinExpr()
3138 Value *Addr = Builder.CreateConstInBoundsGEP1_32(Ty, Ops[0], vi); in EmitCommonNeonBuiltinExpr()
3140 SV = Builder.CreateShuffleVector(Ops[1], Ops[2], SV, "vuzp"); in EmitCommonNeonBuiltinExpr()
3147 Ops[0] = Builder.CreateBitCast(Ops[0], llvm::PointerType::getUnqual(Ty)); in EmitCommonNeonBuiltinExpr()
3148 Ops[1] = Builder.CreateBitCast(Ops[1], Ty); in EmitCommonNeonBuiltinExpr()
3149 Ops[2] = Builder.CreateBitCast(Ops[2], Ty); in EmitCommonNeonBuiltinExpr()
3158 Value *Addr = Builder.CreateConstInBoundsGEP1_32(Ty, Ops[0], vi); in EmitCommonNeonBuiltinExpr()
3160 SV = Builder.CreateShuffleVector(Ops[1], Ops[2], SV, "vzip"); in EmitCommonNeonBuiltinExpr()
3172 Value *Result = EmitNeonCall(F, Ops, NameHint); in EmitCommonNeonBuiltinExpr()
3200 static Value *packTBLDVectorList(CodeGenFunction &CGF, ArrayRef<Value *> Ops, in packTBLDVectorList() argument
3210 llvm::VectorType *TblTy = cast<llvm::VectorType>(Ops[0]->getType()); in packTBLDVectorList()
3217 int PairPos = 0, End = Ops.size() - 1; in packTBLDVectorList()
3219 TblOps.push_back(CGF.Builder.CreateShuffleVector(Ops[PairPos], in packTBLDVectorList()
3220 Ops[PairPos+1], SV, Name)); in packTBLDVectorList()
3228 TblOps.push_back(CGF.Builder.CreateShuffleVector(Ops[PairPos], in packTBLDVectorList()
3320 SmallVector<Value*, 2> Ops; in EmitARMBuiltinExpr() local
3322 Ops.push_back(EmitScalarExpr(E->getArg(i))); in EmitARMBuiltinExpr()
3326 return EmitNounwindRuntimeCall(CGM.CreateRuntimeFunction(FTy, Name), Ops); in EmitARMBuiltinExpr()
3481 SmallVector<Value*, 4> Ops; in EmitARMBuiltinExpr() local
3512 Ops.push_back(Src.first); in EmitARMBuiltinExpr()
3538 Ops.push_back(Src.first); in EmitARMBuiltinExpr()
3543 Ops.push_back(EmitScalarExpr(E->getArg(i))); in EmitARMBuiltinExpr()
3560 return Builder.CreateExtractElement(Ops[0], EmitScalarExpr(E->getArg(1)), in EmitARMBuiltinExpr()
3572 Ops.push_back(EmitScalarExpr(E->getArg(2))); in EmitARMBuiltinExpr()
3573 return Builder.CreateInsertElement(Ops[1], Ops[0], Ops[2], "vset_lane"); in EmitARMBuiltinExpr()
3577 Ops.push_back(EmitScalarExpr(E->getArg(0))); in EmitARMBuiltinExpr()
3578 return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_sha1h), Ops, in EmitARMBuiltinExpr()
3581 Ops.push_back(EmitScalarExpr(E->getArg(2))); in EmitARMBuiltinExpr()
3582 return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_sha1c), Ops, in EmitARMBuiltinExpr()
3585 Ops.push_back(EmitScalarExpr(E->getArg(2))); in EmitARMBuiltinExpr()
3586 return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_sha1p), Ops, in EmitARMBuiltinExpr()
3589 Ops.push_back(EmitScalarExpr(E->getArg(2))); in EmitARMBuiltinExpr()
3590 return EmitNeonCall(CGM.getIntrinsic(Intrinsic::arm_neon_sha1m), Ops, in EmitARMBuiltinExpr()
3615 return Builder.CreateCall(F, Ops, "vcvtr"); in EmitARMBuiltinExpr()
3636 Builtin->NameHint, Builtin->TypeModifier, E, Ops, Align); in EmitARMBuiltinExpr()
3646 Ops[1] = Builder.CreateBitCast(Ops[1], Ty); in EmitARMBuiltinExpr()
3647 int Lane = cast<ConstantInt>(Ops[2])->getZExtValue(); in EmitARMBuiltinExpr()
3649 Ops[1] = Builder.CreateShuffleVector(Ops[1], Ops[1], SV); in EmitARMBuiltinExpr()
3653 Value *Ld = Builder.CreateCall2(F, Ops[0], Align); in EmitARMBuiltinExpr()
3659 return Builder.CreateShuffleVector(Ops[1], Ld, SV, "vld1q_lane"); in EmitARMBuiltinExpr()
3663 Ops[1] = Builder.CreateBitCast(Ops[1], Ty); in EmitARMBuiltinExpr()
3665 Ops[0] = Builder.CreateBitCast(Ops[0], Ty); in EmitARMBuiltinExpr()
3666 LoadInst *Ld = Builder.CreateLoad(Ops[0]); in EmitARMBuiltinExpr()
3668 return Builder.CreateInsertElement(Ops[1], Ld, Ops[2], "vld1_lane"); in EmitARMBuiltinExpr()
3688 Ops[1] = Builder.CreateCall2(F, Ops[1], Align, "vld_dup"); in EmitARMBuiltinExpr()
3689 Ty = llvm::PointerType::getUnqual(Ops[1]->getType()); in EmitARMBuiltinExpr()
3690 Ops[0] = Builder.CreateBitCast(Ops[0], Ty); in EmitARMBuiltinExpr()
3691 return Builder.CreateStore(Ops[1], Ops[0]); in EmitARMBuiltinExpr()
3709 Args.push_back(Ops[1]); in EmitARMBuiltinExpr()
3716 Ops[1] = Builder.CreateCall(F, Args, "vld_dup"); in EmitARMBuiltinExpr()
3719 Value *Val = Builder.CreateExtractValue(Ops[1], i); in EmitARMBuiltinExpr()
3723 Ops[1] = Builder.CreateInsertValue(Ops[1], Elt, i); in EmitARMBuiltinExpr()
3725 Ty = llvm::PointerType::getUnqual(Ops[1]->getType()); in EmitARMBuiltinExpr()
3726 Ops[0] = Builder.CreateBitCast(Ops[0], Ty); in EmitARMBuiltinExpr()
3727 return Builder.CreateStore(Ops[1], Ops[0]); in EmitARMBuiltinExpr()
3732 return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vqrshrn_n", in EmitARMBuiltinExpr()
3736 Ops, "vqrshrun_n", 1, true); in EmitARMBuiltinExpr()
3739 return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vqshrn_n", in EmitARMBuiltinExpr()
3743 Ops, "vqshrun_n", 1, true); in EmitARMBuiltinExpr()
3747 Ops, "vrecpe"); in EmitARMBuiltinExpr()
3750 Ops, "vrshrn_n", 1, true); in EmitARMBuiltinExpr()
3753 Ops[0] = Builder.CreateBitCast(Ops[0], Ty); in EmitARMBuiltinExpr()
3754 Ops[1] = Builder.CreateBitCast(Ops[1], Ty); in EmitARMBuiltinExpr()
3755 Ops[2] = EmitNeonShiftVector(Ops[2], Ty, true); in EmitARMBuiltinExpr()
3757 Ops[1] = Builder.CreateCall2(CGM.getIntrinsic(Int, Ty), Ops[1], Ops[2]); in EmitARMBuiltinExpr()
3758 return Builder.CreateAdd(Ops[0], Ops[1], "vrsra_n"); in EmitARMBuiltinExpr()
3764 Ops[2] = EmitNeonShiftVector(Ops[2], Ty, rightShift); in EmitARMBuiltinExpr()
3766 Ops, "vsli_n"); in EmitARMBuiltinExpr()
3769 Ops[0] = Builder.CreateBitCast(Ops[0], Ty); in EmitARMBuiltinExpr()
3770 Ops[1] = EmitNeonRShiftImm(Ops[1], Ops[2], Ty, usgn, "vsra_n"); in EmitARMBuiltinExpr()
3771 return Builder.CreateAdd(Ops[0], Ops[1]); in EmitARMBuiltinExpr()
3776 Ops[1] = Builder.CreateBitCast(Ops[1], Ty); in EmitARMBuiltinExpr()
3777 Value *SV = llvm::ConstantVector::get(cast<llvm::Constant>(Ops[2])); in EmitARMBuiltinExpr()
3778 Ops[1] = Builder.CreateShuffleVector(Ops[1], Ops[1], SV); in EmitARMBuiltinExpr()
3779 Ops[2] = Align; in EmitARMBuiltinExpr()
3781 Ops[1]->getType()), Ops); in EmitARMBuiltinExpr()
3785 Ops[1] = Builder.CreateBitCast(Ops[1], Ty); in EmitARMBuiltinExpr()
3786 Ops[1] = Builder.CreateExtractElement(Ops[1], Ops[2]); in EmitARMBuiltinExpr()
3787 Ty = llvm::PointerType::getUnqual(Ops[1]->getType()); in EmitARMBuiltinExpr()
3788 StoreInst *St = Builder.CreateStore(Ops[1], in EmitARMBuiltinExpr()
3789 Builder.CreateBitCast(Ops[0], Ty)); in EmitARMBuiltinExpr()
3795 Ops, "vtbl1"); in EmitARMBuiltinExpr()
3798 Ops, "vtbl2"); in EmitARMBuiltinExpr()
3801 Ops, "vtbl3"); in EmitARMBuiltinExpr()
3804 Ops, "vtbl4"); in EmitARMBuiltinExpr()
3807 Ops, "vtbx1"); in EmitARMBuiltinExpr()
3810 Ops, "vtbx2"); in EmitARMBuiltinExpr()
3813 Ops, "vtbx3"); in EmitARMBuiltinExpr()
3816 Ops, "vtbx4"); in EmitARMBuiltinExpr()
3822 SmallVectorImpl<Value *> &Ops) { in EmitAArch64TblBuiltinExpr() argument
3881 TblOps.push_back(Ops[0]); in EmitAArch64TblBuiltinExpr()
3882 return packTBLDVectorList(CGF, TblOps, nullptr, Ops[1], Ty, in EmitAArch64TblBuiltinExpr()
3886 TblOps.push_back(Ops[0]); in EmitAArch64TblBuiltinExpr()
3887 TblOps.push_back(Ops[1]); in EmitAArch64TblBuiltinExpr()
3888 return packTBLDVectorList(CGF, TblOps, nullptr, Ops[2], Ty, in EmitAArch64TblBuiltinExpr()
3892 TblOps.push_back(Ops[0]); in EmitAArch64TblBuiltinExpr()
3893 TblOps.push_back(Ops[1]); in EmitAArch64TblBuiltinExpr()
3894 TblOps.push_back(Ops[2]); in EmitAArch64TblBuiltinExpr()
3895 return packTBLDVectorList(CGF, TblOps, nullptr, Ops[3], Ty, in EmitAArch64TblBuiltinExpr()
3899 TblOps.push_back(Ops[0]); in EmitAArch64TblBuiltinExpr()
3900 TblOps.push_back(Ops[1]); in EmitAArch64TblBuiltinExpr()
3901 TblOps.push_back(Ops[2]); in EmitAArch64TblBuiltinExpr()
3902 TblOps.push_back(Ops[3]); in EmitAArch64TblBuiltinExpr()
3903 return packTBLDVectorList(CGF, TblOps, nullptr, Ops[4], Ty, in EmitAArch64TblBuiltinExpr()
3907 TblOps.push_back(Ops[1]); in EmitAArch64TblBuiltinExpr()
3908 Value *TblRes = packTBLDVectorList(CGF, TblOps, nullptr, Ops[2], Ty, in EmitAArch64TblBuiltinExpr()
3913 Value *CmpRes = Builder.CreateICmp(ICmpInst::ICMP_UGE, Ops[2], EightV); in EmitAArch64TblBuiltinExpr()
3916 Value *EltsFromInput = Builder.CreateAnd(CmpRes, Ops[0]); in EmitAArch64TblBuiltinExpr()
3921 TblOps.push_back(Ops[1]); in EmitAArch64TblBuiltinExpr()
3922 TblOps.push_back(Ops[2]); in EmitAArch64TblBuiltinExpr()
3923 return packTBLDVectorList(CGF, TblOps, Ops[0], Ops[3], Ty, in EmitAArch64TblBuiltinExpr()
3927 TblOps.push_back(Ops[1]); in EmitAArch64TblBuiltinExpr()
3928 TblOps.push_back(Ops[2]); in EmitAArch64TblBuiltinExpr()
3929 TblOps.push_back(Ops[3]); in EmitAArch64TblBuiltinExpr()
3930 Value *TblRes = packTBLDVectorList(CGF, TblOps, nullptr, Ops[4], Ty, in EmitAArch64TblBuiltinExpr()
3935 Value *CmpRes = Builder.CreateICmp(ICmpInst::ICMP_UGE, Ops[4], in EmitAArch64TblBuiltinExpr()
3939 Value *EltsFromInput = Builder.CreateAnd(CmpRes, Ops[0]); in EmitAArch64TblBuiltinExpr()
3944 TblOps.push_back(Ops[1]); in EmitAArch64TblBuiltinExpr()
3945 TblOps.push_back(Ops[2]); in EmitAArch64TblBuiltinExpr()
3946 TblOps.push_back(Ops[3]); in EmitAArch64TblBuiltinExpr()
3947 TblOps.push_back(Ops[4]); in EmitAArch64TblBuiltinExpr()
3948 return packTBLDVectorList(CGF, TblOps, Ops[0], Ops[5], Ty, in EmitAArch64TblBuiltinExpr()
3982 return CGF.EmitNeonCall(F, Ops, s); in EmitAArch64TblBuiltinExpr()
4004 emitVectorWrappedScalar8Intrinsic(unsigned Int, SmallVectorImpl<Value*> &Ops, in emitVectorWrappedScalar8Intrinsic() argument
4011 Ops[0] = vectorWrapScalar8(Ops[0]); in emitVectorWrappedScalar8Intrinsic()
4012 Ops[1] = vectorWrapScalar8(Ops[1]); in emitVectorWrappedScalar8Intrinsic()
4014 Value *V = EmitNeonCall(CGM.getIntrinsic(Int, VTy), Ops, Name); in emitVectorWrappedScalar8Intrinsic()
4020 emitVectorWrappedScalar16Intrinsic(unsigned Int, SmallVectorImpl<Value*> &Ops, in emitVectorWrappedScalar16Intrinsic() argument
4027 Ops[0] = vectorWrapScalar16(Ops[0]); in emitVectorWrappedScalar16Intrinsic()
4028 Ops[1] = vectorWrapScalar16(Ops[1]); in emitVectorWrappedScalar16Intrinsic()
4030 Value *V = EmitNeonCall(CGM.getIntrinsic(Int, VTy), Ops, Name); in emitVectorWrappedScalar16Intrinsic()
4106 SmallVector<Value*, 2> Ops; in EmitAArch64BuiltinExpr() local
4108 Ops.push_back(EmitScalarExpr(E->getArg(i))); in EmitAArch64BuiltinExpr()
4112 return EmitNounwindRuntimeCall(CGM.CreateRuntimeFunction(FTy, Name), Ops); in EmitAArch64BuiltinExpr()
4242 llvm::SmallVector<Value*, 4> Ops; in EmitAArch64BuiltinExpr() local
4244 Ops.push_back(EmitScalarExpr(E->getArg(i))); in EmitAArch64BuiltinExpr()
4251 Ops.push_back(EmitScalarExpr(E->getArg(E->getNumArgs() - 1))); in EmitAArch64BuiltinExpr()
4252 Value *Result = EmitCommonNeonSISDBuiltinExpr(*this, *Builtin, Ops, E); in EmitAArch64BuiltinExpr()
4277 Value *Ptr = Builder.CreateBitCast(Ops[0], Int128PTy); in EmitAArch64BuiltinExpr()
4286 Ops.push_back(EmitScalarExpr(E->getArg(0))); in EmitAArch64BuiltinExpr()
4287 bool Is64 = Ops[0]->getType()->getPrimitiveSizeInBits() == 64; in EmitAArch64BuiltinExpr()
4290 Ops[0] = Builder.CreateBitCast(Ops[0], FTy); in EmitAArch64BuiltinExpr()
4292 return Builder.CreateFPToUI(Ops[0], InTy); in EmitAArch64BuiltinExpr()
4293 return Builder.CreateFPToSI(Ops[0], InTy); in EmitAArch64BuiltinExpr()
4301 Ops.push_back(EmitScalarExpr(E->getArg(0))); in EmitAArch64BuiltinExpr()
4302 bool Is64 = Ops[0]->getType()->getPrimitiveSizeInBits() == 64; in EmitAArch64BuiltinExpr()
4305 Ops[0] = Builder.CreateBitCast(Ops[0], InTy); in EmitAArch64BuiltinExpr()
4307 return Builder.CreateUIToFP(Ops[0], FTy); in EmitAArch64BuiltinExpr()
4308 return Builder.CreateSIToFP(Ops[0], FTy); in EmitAArch64BuiltinExpr()
4352 Ops.push_back(EmitScalarExpr(E->getArg(0))); in EmitAArch64BuiltinExpr()
4354 Ops[0], ConvertType(E->getCallReturnType(getContext())), in EmitAArch64BuiltinExpr()
4359 Ops.push_back(EmitScalarExpr(E->getArg(0))); in EmitAArch64BuiltinExpr()
4361 Ops[0], ConvertType(E->getCallReturnType(getContext())), in EmitAArch64BuiltinExpr()
4366 Ops.push_back(EmitScalarExpr(E->getArg(0))); in EmitAArch64BuiltinExpr()
4368 Ops[0], ConvertType(E->getCallReturnType(getContext())), in EmitAArch64BuiltinExpr()
4373 Ops.push_back(EmitScalarExpr(E->getArg(0))); in EmitAArch64BuiltinExpr()
4375 Ops[0], ConvertType(E->getCallReturnType(getContext())), in EmitAArch64BuiltinExpr()
4380 Ops.push_back(EmitScalarExpr(E->getArg(0))); in EmitAArch64BuiltinExpr()
4382 Ops[0], ConvertType(E->getCallReturnType(getContext())), in EmitAArch64BuiltinExpr()
4387 Ops.push_back(EmitScalarExpr(E->getArg(0))); in EmitAArch64BuiltinExpr()
4388 Ops[0] = Builder.CreateBitCast(Ops[0], Ty); in EmitAArch64BuiltinExpr()
4389 Ops[0] = Builder.CreateICmp(llvm::ICmpInst::ICMP_EQ, Ops[0], in EmitAArch64BuiltinExpr()
4391 return Builder.CreateSExt(Ops[0], Ty, "vceqzd"); in EmitAArch64BuiltinExpr()
4407 Ops.push_back(EmitScalarExpr(E->getArg(1))); in EmitAArch64BuiltinExpr()
4408 Ops[0] = Builder.CreateBitCast(Ops[0], DoubleTy); in EmitAArch64BuiltinExpr()
4409 Ops[1] = Builder.CreateBitCast(Ops[1], DoubleTy); in EmitAArch64BuiltinExpr()
4410 Ops[0] = Builder.CreateFCmp(P, Ops[0], Ops[1]); in EmitAArch64BuiltinExpr()
4411 return Builder.CreateSExt(Ops[0], Int64Ty, "vcmpd"); in EmitAArch64BuiltinExpr()
4427 Ops.push_back(EmitScalarExpr(E->getArg(1))); in EmitAArch64BuiltinExpr()
4428 Ops[0] = Builder.CreateBitCast(Ops[0], FloatTy); in EmitAArch64BuiltinExpr()
4429 Ops[1] = Builder.CreateBitCast(Ops[1], FloatTy); in EmitAArch64BuiltinExpr()
4430 Ops[0] = Builder.CreateFCmp(P, Ops[0], Ops[1]); in EmitAArch64BuiltinExpr()
4431 return Builder.CreateSExt(Ops[0], Int32Ty, "vcmpd"); in EmitAArch64BuiltinExpr()
4457 Ops.push_back(EmitScalarExpr(E->getArg(1))); in EmitAArch64BuiltinExpr()
4458 Ops[0] = Builder.CreateBitCast(Ops[0], Int64Ty); in EmitAArch64BuiltinExpr()
4459 Ops[1] = Builder.CreateBitCast(Ops[1], Int64Ty); in EmitAArch64BuiltinExpr()
4460 Ops[0] = Builder.CreateICmp(P, Ops[0], Ops[1]); in EmitAArch64BuiltinExpr()
4461 return Builder.CreateSExt(Ops[0], Int64Ty, "vceqd"); in EmitAArch64BuiltinExpr()
4466 Ops.push_back(EmitScalarExpr(E->getArg(1))); in EmitAArch64BuiltinExpr()
4467 Ops[0] = Builder.CreateBitCast(Ops[0], Ty); in EmitAArch64BuiltinExpr()
4468 Ops[1] = Builder.CreateBitCast(Ops[1], Ty); in EmitAArch64BuiltinExpr()
4469 Ops[0] = Builder.CreateAnd(Ops[0], Ops[1]); in EmitAArch64BuiltinExpr()
4470 Ops[0] = Builder.CreateICmp(ICmpInst::ICMP_NE, Ops[0], in EmitAArch64BuiltinExpr()
4472 return Builder.CreateSExt(Ops[0], Ty, "vtstd"); in EmitAArch64BuiltinExpr()
4484 Ops.push_back(EmitScalarExpr(E->getArg(2))); in EmitAArch64BuiltinExpr()
4485 return Builder.CreateInsertElement(Ops[1], Ops[0], Ops[2], "vset_lane"); in EmitAArch64BuiltinExpr()
4488 Ops[1] = Builder.CreateBitCast(Ops[1], in EmitAArch64BuiltinExpr()
4490 Ops.push_back(EmitScalarExpr(E->getArg(2))); in EmitAArch64BuiltinExpr()
4491 return Builder.CreateInsertElement(Ops[1], Ops[0], Ops[2], "vset_lane"); in EmitAArch64BuiltinExpr()
4494 Ops[1] = Builder.CreateBitCast(Ops[1], in EmitAArch64BuiltinExpr()
4496 Ops.push_back(EmitScalarExpr(E->getArg(2))); in EmitAArch64BuiltinExpr()
4497 return Builder.CreateInsertElement(Ops[1], Ops[0], Ops[2], "vset_lane"); in EmitAArch64BuiltinExpr()
4501 Ops[0] = Builder.CreateBitCast(Ops[0], in EmitAArch64BuiltinExpr()
4503 return Builder.CreateExtractElement(Ops[0], EmitScalarExpr(E->getArg(1)), in EmitAArch64BuiltinExpr()
4507 Ops[0] = Builder.CreateBitCast(Ops[0], in EmitAArch64BuiltinExpr()
4509 return Builder.CreateExtractElement(Ops[0], EmitScalarExpr(E->getArg(1)), in EmitAArch64BuiltinExpr()
4513 Ops[0] = Builder.CreateBitCast(Ops[0], in EmitAArch64BuiltinExpr()
4515 return Builder.CreateExtractElement(Ops[0], EmitScalarExpr(E->getArg(1)), in EmitAArch64BuiltinExpr()
4519 Ops[0] = Builder.CreateBitCast(Ops[0], in EmitAArch64BuiltinExpr()
4521 return Builder.CreateExtractElement(Ops[0], EmitScalarExpr(E->getArg(1)), in EmitAArch64BuiltinExpr()
4525 Ops[0] = Builder.CreateBitCast( in EmitAArch64BuiltinExpr()
4526 Ops[0], in EmitAArch64BuiltinExpr()
4528 return Builder.CreateExtractElement(Ops[0], EmitScalarExpr(E->getArg(1)), in EmitAArch64BuiltinExpr()
4531 Ops[0] = Builder.CreateBitCast(Ops[0], in EmitAArch64BuiltinExpr()
4533 return Builder.CreateExtractElement(Ops[0], EmitScalarExpr(E->getArg(1)), in EmitAArch64BuiltinExpr()
4537 Ops[0] = Builder.CreateBitCast(Ops[0], in EmitAArch64BuiltinExpr()
4539 return Builder.CreateExtractElement(Ops[0], EmitScalarExpr(E->getArg(1)), in EmitAArch64BuiltinExpr()
4543 Ops[0] = Builder.CreateBitCast(Ops[0], in EmitAArch64BuiltinExpr()
4545 return Builder.CreateExtractElement(Ops[0], EmitScalarExpr(E->getArg(1)), in EmitAArch64BuiltinExpr()
4548 Ops[0] = Builder.CreateBitCast(Ops[0], in EmitAArch64BuiltinExpr()
4550 return Builder.CreateExtractElement(Ops[0], EmitScalarExpr(E->getArg(1)), in EmitAArch64BuiltinExpr()
4554 Ops[0] = Builder.CreateBitCast(Ops[0], in EmitAArch64BuiltinExpr()
4556 return Builder.CreateExtractElement(Ops[0], EmitScalarExpr(E->getArg(1)), in EmitAArch64BuiltinExpr()
4559 Ops[0] = Builder.CreateBitCast(Ops[0], in EmitAArch64BuiltinExpr()
4561 return Builder.CreateExtractElement(Ops[0], EmitScalarExpr(E->getArg(1)), in EmitAArch64BuiltinExpr()
4564 Ops[0] = Builder.CreateBitCast(Ops[0], in EmitAArch64BuiltinExpr()
4566 return Builder.CreateExtractElement(Ops[0], EmitScalarExpr(E->getArg(1)), in EmitAArch64BuiltinExpr()
4570 Ops[0] = Builder.CreateBitCast(Ops[0], in EmitAArch64BuiltinExpr()
4572 return Builder.CreateExtractElement(Ops[0], EmitScalarExpr(E->getArg(1)), in EmitAArch64BuiltinExpr()
4576 Ops[0] = Builder.CreateBitCast(Ops[0], in EmitAArch64BuiltinExpr()
4578 return Builder.CreateExtractElement(Ops[0], EmitScalarExpr(E->getArg(1)), in EmitAArch64BuiltinExpr()
4582 return Builder.CreateAdd(Ops[0], EmitScalarExpr(E->getArg(1)), "vaddd"); in EmitAArch64BuiltinExpr()
4585 return Builder.CreateSub(Ops[0], EmitScalarExpr(E->getArg(1)), "vsubd"); in EmitAArch64BuiltinExpr()
4589 ProductOps.push_back(vectorWrapScalar16(Ops[1])); in EmitAArch64BuiltinExpr()
4592 Ops[1] = EmitNeonCall(CGM.getIntrinsic(Intrinsic::aarch64_neon_sqdmull, VTy), in EmitAArch64BuiltinExpr()
4595 Ops[1] = Builder.CreateExtractElement(Ops[1], CI, "lane0"); in EmitAArch64BuiltinExpr()
4600 return EmitNeonCall(CGM.getIntrinsic(AccumInt, Int32Ty), Ops, "vqdmlXl"); in EmitAArch64BuiltinExpr()
4603 Ops.push_back(EmitScalarExpr(E->getArg(1))); in EmitAArch64BuiltinExpr()
4604 Ops[1] = Builder.CreateZExt(Ops[1], Int64Ty); in EmitAArch64BuiltinExpr()
4606 Ops, "vqshlu_n"); in EmitAArch64BuiltinExpr()
4613 Ops.push_back(EmitScalarExpr(E->getArg(1))); in EmitAArch64BuiltinExpr()
4614 Ops[1] = Builder.CreateZExt(Ops[1], Int64Ty); in EmitAArch64BuiltinExpr()
4615 return EmitNeonCall(CGM.getIntrinsic(Int, Int64Ty), Ops, "vqshl_n"); in EmitAArch64BuiltinExpr()
4622 Ops.push_back(EmitScalarExpr(E->getArg(1))); in EmitAArch64BuiltinExpr()
4623 int SV = cast<ConstantInt>(Ops[1])->getSExtValue(); in EmitAArch64BuiltinExpr()
4624 Ops[1] = ConstantInt::get(Int64Ty, -SV); in EmitAArch64BuiltinExpr()
4625 return EmitNeonCall(CGM.getIntrinsic(Int, Int64Ty), Ops, "vrshr_n"); in EmitAArch64BuiltinExpr()
4632 Ops[1] = Builder.CreateBitCast(Ops[1], Int64Ty); in EmitAArch64BuiltinExpr()
4633 Ops.push_back(Builder.CreateNeg(EmitScalarExpr(E->getArg(2)))); in EmitAArch64BuiltinExpr()
4634 Ops[1] = Builder.CreateCall2(CGM.getIntrinsic(Int, Int64Ty), Ops[1], in EmitAArch64BuiltinExpr()
4635 Builder.CreateSExt(Ops[2], Int64Ty)); in EmitAArch64BuiltinExpr()
4636 return Builder.CreateAdd(Ops[0], Builder.CreateBitCast(Ops[1], Int64Ty)); in EmitAArch64BuiltinExpr()
4642 Ops[0], ConstantInt::get(Int64Ty, Amt->getZExtValue()), "shld_n"); in EmitAArch64BuiltinExpr()
4647 Ops[0], ConstantInt::get(Int64Ty, std::min(static_cast<uint64_t>(63), in EmitAArch64BuiltinExpr()
4657 return Builder.CreateLShr(Ops[0], ConstantInt::get(Int64Ty, ShiftAmt), in EmitAArch64BuiltinExpr()
4662 Ops[1] = Builder.CreateAShr( in EmitAArch64BuiltinExpr()
4663 Ops[1], ConstantInt::get(Int64Ty, std::min(static_cast<uint64_t>(63), in EmitAArch64BuiltinExpr()
4666 return Builder.CreateAdd(Ops[0], Ops[1]); in EmitAArch64BuiltinExpr()
4674 return Ops[0]; in EmitAArch64BuiltinExpr()
4675 Ops[1] = Builder.CreateLShr(Ops[1], ConstantInt::get(Int64Ty, ShiftAmt), in EmitAArch64BuiltinExpr()
4677 return Builder.CreateAdd(Ops[0], Ops[1]); in EmitAArch64BuiltinExpr()
4683 Ops[2] = Builder.CreateExtractElement(Ops[2], EmitScalarExpr(E->getArg(3)), in EmitAArch64BuiltinExpr()
4686 ProductOps.push_back(vectorWrapScalar16(Ops[1])); in EmitAArch64BuiltinExpr()
4687 ProductOps.push_back(vectorWrapScalar16(Ops[2])); in EmitAArch64BuiltinExpr()
4689 Ops[1] = EmitNeonCall(CGM.getIntrinsic(Intrinsic::aarch64_neon_sqdmull, VTy), in EmitAArch64BuiltinExpr()
4692 Ops[1] = Builder.CreateExtractElement(Ops[1], CI, "lane0"); in EmitAArch64BuiltinExpr()
4693 Ops.pop_back(); in EmitAArch64BuiltinExpr()
4699 return EmitNeonCall(CGM.getIntrinsic(AccInt, Int32Ty), Ops, "vqdmlXl"); in EmitAArch64BuiltinExpr()
4704 ProductOps.push_back(Ops[1]); in EmitAArch64BuiltinExpr()
4706 Ops[1] = in EmitAArch64BuiltinExpr()
4713 return EmitNeonCall(CGM.getIntrinsic(AccumInt, Int64Ty), Ops, "vqdmlXl"); in EmitAArch64BuiltinExpr()
4719 Ops[2] = Builder.CreateExtractElement(Ops[2], EmitScalarExpr(E->getArg(3)), in EmitAArch64BuiltinExpr()
4722 ProductOps.push_back(Ops[1]); in EmitAArch64BuiltinExpr()
4723 ProductOps.push_back(Ops[2]); in EmitAArch64BuiltinExpr()
4724 Ops[1] = in EmitAArch64BuiltinExpr()
4727 Ops.pop_back(); in EmitAArch64BuiltinExpr()
4733 return EmitNeonCall(CGM.getIntrinsic(AccInt, Int64Ty), Ops, "vqdmlXl"); in EmitAArch64BuiltinExpr()
4750 Builtin->NameHint, Builtin->TypeModifier, E, Ops, nullptr); in EmitAArch64BuiltinExpr()
4752 if (Value *V = EmitAArch64TblBuiltinExpr(*this, BuiltinID, E, Ops)) in EmitAArch64BuiltinExpr()
4761 Ops[0] = Builder.CreateBitCast(Ops[0], BitTy, "vbsl"); in EmitAArch64BuiltinExpr()
4762 Ops[1] = Builder.CreateBitCast(Ops[1], BitTy, "vbsl"); in EmitAArch64BuiltinExpr()
4763 Ops[2] = Builder.CreateBitCast(Ops[2], BitTy, "vbsl"); in EmitAArch64BuiltinExpr()
4765 Ops[1] = Builder.CreateAnd(Ops[0], Ops[1], "vbsl"); in EmitAArch64BuiltinExpr()
4766 Ops[2] = Builder.CreateAnd(Builder.CreateNot(Ops[0]), Ops[2], "vbsl"); in EmitAArch64BuiltinExpr()
4767 Ops[0] = Builder.CreateOr(Ops[1], Ops[2], "vbsl"); in EmitAArch64BuiltinExpr()
4768 return Builder.CreateBitCast(Ops[0], Ty); in EmitAArch64BuiltinExpr()
4774 Value *Addend = Ops[0]; in EmitAArch64BuiltinExpr()
4775 Value *Multiplicand = Ops[1]; in EmitAArch64BuiltinExpr()
4776 Value *LaneSource = Ops[2]; in EmitAArch64BuiltinExpr()
4777 Ops[0] = Multiplicand; in EmitAArch64BuiltinExpr()
4778 Ops[1] = LaneSource; in EmitAArch64BuiltinExpr()
4779 Ops[2] = Addend; in EmitAArch64BuiltinExpr()
4785 llvm::Constant *cst = cast<Constant>(Ops[3]); in EmitAArch64BuiltinExpr()
4787 Ops[1] = Builder.CreateBitCast(Ops[1], SourceTy); in EmitAArch64BuiltinExpr()
4788 Ops[1] = Builder.CreateShuffleVector(Ops[1], Ops[1], SV, "lane"); in EmitAArch64BuiltinExpr()
4790 Ops.pop_back(); in EmitAArch64BuiltinExpr()
4792 return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "fmla"); in EmitAArch64BuiltinExpr()
4798 Ops[0] = Builder.CreateBitCast(Ops[0], DoubleTy); in EmitAArch64BuiltinExpr()
4799 Ops[1] = Builder.CreateBitCast(Ops[1], DoubleTy); in EmitAArch64BuiltinExpr()
4802 Ops[2] = Builder.CreateBitCast(Ops[2], VTy); in EmitAArch64BuiltinExpr()
4803 Ops[2] = Builder.CreateExtractElement(Ops[2], Ops[3], "extract"); in EmitAArch64BuiltinExpr()
4805 Value *Result = Builder.CreateCall3(F, Ops[1], Ops[2], Ops[0]); in EmitAArch64BuiltinExpr()
4809 Ops[0] = Builder.CreateBitCast(Ops[0], Ty); in EmitAArch64BuiltinExpr()
4810 Ops[1] = Builder.CreateBitCast(Ops[1], Ty); in EmitAArch64BuiltinExpr()
4814 Ops[2] = Builder.CreateBitCast(Ops[2], STy); in EmitAArch64BuiltinExpr()
4816 cast<ConstantInt>(Ops[3])); in EmitAArch64BuiltinExpr()
4817 Ops[2] = Builder.CreateShuffleVector(Ops[2], Ops[2], SV, "lane"); in EmitAArch64BuiltinExpr()
4819 return Builder.CreateCall3(F, Ops[2], Ops[1], Ops[0]); in EmitAArch64BuiltinExpr()
4823 Ops[0] = Builder.CreateBitCast(Ops[0], Ty); in EmitAArch64BuiltinExpr()
4824 Ops[1] = Builder.CreateBitCast(Ops[1], Ty); in EmitAArch64BuiltinExpr()
4826 Ops[2] = Builder.CreateBitCast(Ops[2], Ty); in EmitAArch64BuiltinExpr()
4827 Ops[2] = EmitNeonSplat(Ops[2], cast<ConstantInt>(Ops[3])); in EmitAArch64BuiltinExpr()
4828 return Builder.CreateCall3(F, Ops[2], Ops[1], Ops[0]); in EmitAArch64BuiltinExpr()
4834 Ops.push_back(EmitScalarExpr(E->getArg(3))); in EmitAArch64BuiltinExpr()
4837 Ops[2] = Builder.CreateExtractElement(Ops[2], Ops[3], "extract"); in EmitAArch64BuiltinExpr()
4838 return Builder.CreateCall3(F, Ops[1], Ops[2], Ops[0]); in EmitAArch64BuiltinExpr()
4847 Value *Subtrahend = Ops[0]; in EmitAArch64BuiltinExpr()
4848 Value *Multiplicand = Ops[2]; in EmitAArch64BuiltinExpr()
4849 Ops[0] = Multiplicand; in EmitAArch64BuiltinExpr()
4850 Ops[2] = Subtrahend; in EmitAArch64BuiltinExpr()
4851 Ops[1] = Builder.CreateBitCast(Ops[1], VTy); in EmitAArch64BuiltinExpr()
4852 Ops[1] = Builder.CreateFNeg(Ops[1]); in EmitAArch64BuiltinExpr()
4854 return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "fmls"); in EmitAArch64BuiltinExpr()
4860 return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vmull"); in EmitAArch64BuiltinExpr()
4866 return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vmax"); in EmitAArch64BuiltinExpr()
4872 return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vmin"); in EmitAArch64BuiltinExpr()
4878 return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vabd"); in EmitAArch64BuiltinExpr()
4889 TmpOps.push_back(Ops[1]); in EmitAArch64BuiltinExpr()
4892 llvm::Value *addend = Builder.CreateBitCast(Ops[0], tmp->getType()); in EmitAArch64BuiltinExpr()
4900 return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vpmin"); in EmitAArch64BuiltinExpr()
4906 return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vpmax"); in EmitAArch64BuiltinExpr()
4910 return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vminnm"); in EmitAArch64BuiltinExpr()
4914 return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vmaxnm"); in EmitAArch64BuiltinExpr()
4917 Ops.push_back(EmitScalarExpr(E->getArg(1))); in EmitAArch64BuiltinExpr()
4919 Ops, "vrecps"); in EmitAArch64BuiltinExpr()
4923 Ops.push_back(EmitScalarExpr(E->getArg(1))); in EmitAArch64BuiltinExpr()
4925 Ops, "vrecps"); in EmitAArch64BuiltinExpr()
4929 return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vqshrun_n"); in EmitAArch64BuiltinExpr()
4932 return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vqrshrun_n"); in EmitAArch64BuiltinExpr()
4935 return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vqshrn_n"); in EmitAArch64BuiltinExpr()
4938 return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vrshrn_n"); in EmitAArch64BuiltinExpr()
4941 return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vqrshrn_n"); in EmitAArch64BuiltinExpr()
4945 return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vrnda"); in EmitAArch64BuiltinExpr()
4950 return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vrndi"); in EmitAArch64BuiltinExpr()
4955 return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vrndm"); in EmitAArch64BuiltinExpr()
4960 return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vrndn"); in EmitAArch64BuiltinExpr()
4965 return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vrndp"); in EmitAArch64BuiltinExpr()
4970 return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vrndx"); in EmitAArch64BuiltinExpr()
4975 return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vrndz"); in EmitAArch64BuiltinExpr()
4979 return EmitAArch64CompareBuiltinExpr(Ops[0], Ty, ICmpInst::FCMP_OEQ, in EmitAArch64BuiltinExpr()
4983 return EmitAArch64CompareBuiltinExpr(Ops[0], Ty, ICmpInst::FCMP_OGE, in EmitAArch64BuiltinExpr()
4987 return EmitAArch64CompareBuiltinExpr(Ops[0], Ty, ICmpInst::FCMP_OLE, in EmitAArch64BuiltinExpr()
4991 return EmitAArch64CompareBuiltinExpr(Ops[0], Ty, ICmpInst::FCMP_OGT, in EmitAArch64BuiltinExpr()
4995 return EmitAArch64CompareBuiltinExpr(Ops[0], Ty, ICmpInst::FCMP_OLT, in EmitAArch64BuiltinExpr()
4999 Ops[0] = Builder.CreateBitCast(Ops[0], Ty); in EmitAArch64BuiltinExpr()
5001 return usgn ? Builder.CreateUIToFP(Ops[0], Ty, "vcvt") in EmitAArch64BuiltinExpr()
5002 : Builder.CreateSIToFP(Ops[0], Ty, "vcvt"); in EmitAArch64BuiltinExpr()
5007 Ops[0] = Builder.CreateBitCast(Ops[0], GetNeonType(this, SrcFlag)); in EmitAArch64BuiltinExpr()
5009 return Builder.CreateFPExt(Ops[0], Ty, "vcvt"); in EmitAArch64BuiltinExpr()
5015 Ops[0] = Builder.CreateBitCast(Ops[0], GetNeonType(this, SrcFlag)); in EmitAArch64BuiltinExpr()
5017 return Builder.CreateFPTrunc(Ops[0], Ty, "vcvt"); in EmitAArch64BuiltinExpr()
5033 Ops[0] = Builder.CreateBitCast(Ops[0], InTy); in EmitAArch64BuiltinExpr()
5035 return Builder.CreateFPToUI(Ops[0], Ty); in EmitAArch64BuiltinExpr()
5036 return Builder.CreateFPToSI(Ops[0], Ty); in EmitAArch64BuiltinExpr()
5054 return EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "vcvta"); in EmitAArch64BuiltinExpr()
5072 return EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "vcvtm"); in EmitAArch64BuiltinExpr()
5090 return EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "vcvtn"); in EmitAArch64BuiltinExpr()
5108 return EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "vcvtp"); in EmitAArch64BuiltinExpr()
5113 return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vmulx"); in EmitAArch64BuiltinExpr()
5121 Ops[0] = Builder.CreateBitCast(Ops[0], DoubleTy); in EmitAArch64BuiltinExpr()
5124 Ops[1] = Builder.CreateBitCast(Ops[1], VTy); in EmitAArch64BuiltinExpr()
5125 Ops[1] = Builder.CreateExtractElement(Ops[1], Ops[2], "extract"); in EmitAArch64BuiltinExpr()
5126 Value *Result = Builder.CreateFMul(Ops[0], Ops[1]); in EmitAArch64BuiltinExpr()
5134 return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vpmaxnm"); in EmitAArch64BuiltinExpr()
5139 return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vpminnm"); in EmitAArch64BuiltinExpr()
5144 Ops[0] = Builder.CreateBitCast(Ops[0], Ty); in EmitAArch64BuiltinExpr()
5145 return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vsqrt"); in EmitAArch64BuiltinExpr()
5150 return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vrbit"); in EmitAArch64BuiltinExpr()
5162 Ops.push_back(EmitScalarExpr(E->getArg(0))); in EmitAArch64BuiltinExpr()
5163 Ops[0] = EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "vaddv"); in EmitAArch64BuiltinExpr()
5164 return Builder.CreateTrunc(Ops[0], in EmitAArch64BuiltinExpr()
5176 Ops.push_back(EmitScalarExpr(E->getArg(0))); in EmitAArch64BuiltinExpr()
5177 Ops[0] = EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "vaddv"); in EmitAArch64BuiltinExpr()
5178 return Builder.CreateTrunc(Ops[0], in EmitAArch64BuiltinExpr()
5190 Ops.push_back(EmitScalarExpr(E->getArg(0))); in EmitAArch64BuiltinExpr()
5191 Ops[0] = EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "vaddv"); in EmitAArch64BuiltinExpr()
5192 return Builder.CreateTrunc(Ops[0], in EmitAArch64BuiltinExpr()
5204 Ops.push_back(EmitScalarExpr(E->getArg(0))); in EmitAArch64BuiltinExpr()
5205 Ops[0] = EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "vaddv"); in EmitAArch64BuiltinExpr()
5206 return Builder.CreateTrunc(Ops[0], in EmitAArch64BuiltinExpr()
5215 Ops.push_back(EmitScalarExpr(E->getArg(0))); in EmitAArch64BuiltinExpr()
5216 Ops[0] = EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "vmaxv"); in EmitAArch64BuiltinExpr()
5217 return Builder.CreateTrunc(Ops[0], in EmitAArch64BuiltinExpr()
5226 Ops.push_back(EmitScalarExpr(E->getArg(0))); in EmitAArch64BuiltinExpr()
5227 Ops[0] = EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "vmaxv"); in EmitAArch64BuiltinExpr()
5228 return Builder.CreateTrunc(Ops[0], in EmitAArch64BuiltinExpr()
5237 Ops.push_back(EmitScalarExpr(E->getArg(0))); in EmitAArch64BuiltinExpr()
5238 Ops[0] = EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "vmaxv"); in EmitAArch64BuiltinExpr()
5239 return Builder.CreateTrunc(Ops[0], in EmitAArch64BuiltinExpr()
5248 Ops.push_back(EmitScalarExpr(E->getArg(0))); in EmitAArch64BuiltinExpr()
5249 Ops[0] = EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "vmaxv"); in EmitAArch64BuiltinExpr()
5250 return Builder.CreateTrunc(Ops[0], in EmitAArch64BuiltinExpr()
5259 Ops.push_back(EmitScalarExpr(E->getArg(0))); in EmitAArch64BuiltinExpr()
5260 Ops[0] = EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "vmaxv"); in EmitAArch64BuiltinExpr()
5261 return Builder.CreateTrunc(Ops[0], in EmitAArch64BuiltinExpr()
5270 Ops.push_back(EmitScalarExpr(E->getArg(0))); in EmitAArch64BuiltinExpr()
5271 Ops[0] = EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "vmaxv"); in EmitAArch64BuiltinExpr()
5272 return Builder.CreateTrunc(Ops[0], in EmitAArch64BuiltinExpr()
5281 Ops.push_back(EmitScalarExpr(E->getArg(0))); in EmitAArch64BuiltinExpr()
5282 Ops[0] = EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "vmaxv"); in EmitAArch64BuiltinExpr()
5283 return Builder.CreateTrunc(Ops[0], in EmitAArch64BuiltinExpr()
5292 Ops.push_back(EmitScalarExpr(E->getArg(0))); in EmitAArch64BuiltinExpr()
5293 Ops[0] = EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "vmaxv"); in EmitAArch64BuiltinExpr()
5294 return Builder.CreateTrunc(Ops[0], in EmitAArch64BuiltinExpr()
5303 Ops.push_back(EmitScalarExpr(E->getArg(0))); in EmitAArch64BuiltinExpr()
5304 Ops[0] = EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "vminv"); in EmitAArch64BuiltinExpr()
5305 return Builder.CreateTrunc(Ops[0], in EmitAArch64BuiltinExpr()
5314 Ops.push_back(EmitScalarExpr(E->getArg(0))); in EmitAArch64BuiltinExpr()
5315 Ops[0] = EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "vminv"); in EmitAArch64BuiltinExpr()
5316 return Builder.CreateTrunc(Ops[0], in EmitAArch64BuiltinExpr()
5325 Ops.push_back(EmitScalarExpr(E->getArg(0))); in EmitAArch64BuiltinExpr()
5326 Ops[0] = EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "vminv"); in EmitAArch64BuiltinExpr()
5327 return Builder.CreateTrunc(Ops[0], in EmitAArch64BuiltinExpr()
5336 Ops.push_back(EmitScalarExpr(E->getArg(0))); in EmitAArch64BuiltinExpr()
5337 Ops[0] = EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "vminv"); in EmitAArch64BuiltinExpr()
5338 return Builder.CreateTrunc(Ops[0], in EmitAArch64BuiltinExpr()
5347 Ops.push_back(EmitScalarExpr(E->getArg(0))); in EmitAArch64BuiltinExpr()
5348 Ops[0] = EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "vminv"); in EmitAArch64BuiltinExpr()
5349 return Builder.CreateTrunc(Ops[0], in EmitAArch64BuiltinExpr()
5358 Ops.push_back(EmitScalarExpr(E->getArg(0))); in EmitAArch64BuiltinExpr()
5359 Ops[0] = EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "vminv"); in EmitAArch64BuiltinExpr()
5360 return Builder.CreateTrunc(Ops[0], in EmitAArch64BuiltinExpr()
5369 Ops.push_back(EmitScalarExpr(E->getArg(0))); in EmitAArch64BuiltinExpr()
5370 Ops[0] = EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "vminv"); in EmitAArch64BuiltinExpr()
5371 return Builder.CreateTrunc(Ops[0], in EmitAArch64BuiltinExpr()
5380 Ops.push_back(EmitScalarExpr(E->getArg(0))); in EmitAArch64BuiltinExpr()
5381 Ops[0] = EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "vminv"); in EmitAArch64BuiltinExpr()
5382 return Builder.CreateTrunc(Ops[0], in EmitAArch64BuiltinExpr()
5386 Ops[0] = Builder.CreateBitCast(Ops[0], DoubleTy); in EmitAArch64BuiltinExpr()
5388 return Builder.CreateFMul(Ops[0], RHS); in EmitAArch64BuiltinExpr()
5396 Ops.push_back(EmitScalarExpr(E->getArg(0))); in EmitAArch64BuiltinExpr()
5397 Ops[0] = EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "vaddlv"); in EmitAArch64BuiltinExpr()
5398 return Builder.CreateTrunc(Ops[0], in EmitAArch64BuiltinExpr()
5407 Ops.push_back(EmitScalarExpr(E->getArg(0))); in EmitAArch64BuiltinExpr()
5408 return EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "vaddlv"); in EmitAArch64BuiltinExpr()
5416 Ops.push_back(EmitScalarExpr(E->getArg(0))); in EmitAArch64BuiltinExpr()
5417 Ops[0] = EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "vaddlv"); in EmitAArch64BuiltinExpr()
5418 return Builder.CreateTrunc(Ops[0], in EmitAArch64BuiltinExpr()
5427 Ops.push_back(EmitScalarExpr(E->getArg(0))); in EmitAArch64BuiltinExpr()
5428 return EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "vaddlv"); in EmitAArch64BuiltinExpr()
5436 Ops.push_back(EmitScalarExpr(E->getArg(0))); in EmitAArch64BuiltinExpr()
5437 Ops[0] = EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "vaddlv"); in EmitAArch64BuiltinExpr()
5438 return Builder.CreateTrunc(Ops[0], in EmitAArch64BuiltinExpr()
5447 Ops.push_back(EmitScalarExpr(E->getArg(0))); in EmitAArch64BuiltinExpr()
5448 return EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "vaddlv"); in EmitAArch64BuiltinExpr()
5456 Ops.push_back(EmitScalarExpr(E->getArg(0))); in EmitAArch64BuiltinExpr()
5457 Ops[0] = EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "vaddlv"); in EmitAArch64BuiltinExpr()
5458 return Builder.CreateTrunc(Ops[0], in EmitAArch64BuiltinExpr()
5467 Ops.push_back(EmitScalarExpr(E->getArg(0))); in EmitAArch64BuiltinExpr()
5468 return EmitNeonCall(CGM.getIntrinsic(Int, Tys), Ops, "vaddlv"); in EmitAArch64BuiltinExpr()
5474 return EmitNeonCall(Intrin, Ops, "vsri_n"); in EmitAArch64BuiltinExpr()
5480 return EmitNeonCall(Intrin, Ops, "vsli_n"); in EmitAArch64BuiltinExpr()
5484 Ops[0] = Builder.CreateBitCast(Ops[0], Ty); in EmitAArch64BuiltinExpr()
5485 Ops[1] = EmitNeonRShiftImm(Ops[1], Ops[2], Ty, usgn, "vsra_n"); in EmitAArch64BuiltinExpr()
5486 return Builder.CreateAdd(Ops[0], Ops[1]); in EmitAArch64BuiltinExpr()
5491 TmpOps.push_back(Ops[1]); in EmitAArch64BuiltinExpr()
5492 TmpOps.push_back(Ops[2]); in EmitAArch64BuiltinExpr()
5495 Ops[0] = Builder.CreateBitCast(Ops[0], VTy); in EmitAArch64BuiltinExpr()
5496 return Builder.CreateAdd(Ops[0], tmp); in EmitAArch64BuiltinExpr()
5507 Ops[1] = Builder.CreateBitCast(Ops[1], PTy); in EmitAArch64BuiltinExpr()
5525 Ops[1] = Builder.CreateCall(F, Ops[1], "vld1xN"); in EmitAArch64BuiltinExpr()
5526 Ty = llvm::PointerType::getUnqual(Ops[1]->getType()); in EmitAArch64BuiltinExpr()
5527 Ops[0] = Builder.CreateBitCast(Ops[0], Ty); in EmitAArch64BuiltinExpr()
5528 return Builder.CreateStore(Ops[1], Ops[0]); in EmitAArch64BuiltinExpr()
5553 SmallVector<Value *, 4> IntOps(Ops.begin()+1, Ops.end()); in EmitAArch64BuiltinExpr()
5554 IntOps.push_back(Ops[0]); in EmitAArch64BuiltinExpr()
5559 Ops[0] = Builder.CreateBitCast(Ops[0], llvm::PointerType::getUnqual(VTy)); in EmitAArch64BuiltinExpr()
5560 return Builder.CreateLoad(Ops[0]); in EmitAArch64BuiltinExpr()
5563 Ops[0] = Builder.CreateBitCast(Ops[0], llvm::PointerType::getUnqual(VTy)); in EmitAArch64BuiltinExpr()
5564 Ops[1] = Builder.CreateBitCast(Ops[1], VTy); in EmitAArch64BuiltinExpr()
5565 return Builder.CreateStore(Ops[1], Ops[0]); in EmitAArch64BuiltinExpr()
5568 Ops[1] = Builder.CreateBitCast(Ops[1], Ty); in EmitAArch64BuiltinExpr()
5570 Ops[0] = Builder.CreateBitCast(Ops[0], Ty); in EmitAArch64BuiltinExpr()
5571 Ops[0] = Builder.CreateLoad(Ops[0]); in EmitAArch64BuiltinExpr()
5572 return Builder.CreateInsertElement(Ops[1], Ops[0], Ops[2], "vld1_lane"); in EmitAArch64BuiltinExpr()
5577 Ops[0] = Builder.CreateBitCast(Ops[0], Ty); in EmitAArch64BuiltinExpr()
5578 Ops[0] = Builder.CreateLoad(Ops[0]); in EmitAArch64BuiltinExpr()
5580 Ops[0] = Builder.CreateInsertElement(V, Ops[0], CI); in EmitAArch64BuiltinExpr()
5581 return EmitNeonSplat(Ops[0], CI); in EmitAArch64BuiltinExpr()
5585 Ops[1] = Builder.CreateBitCast(Ops[1], Ty); in EmitAArch64BuiltinExpr()
5586 Ops[1] = Builder.CreateExtractElement(Ops[1], Ops[2]); in EmitAArch64BuiltinExpr()
5587 Ty = llvm::PointerType::getUnqual(Ops[1]->getType()); in EmitAArch64BuiltinExpr()
5588 return Builder.CreateStore(Ops[1], Builder.CreateBitCast(Ops[0], Ty)); in EmitAArch64BuiltinExpr()
5592 Ops[1] = Builder.CreateBitCast(Ops[1], PTy); in EmitAArch64BuiltinExpr()
5595 Ops[1] = Builder.CreateCall(F, Ops[1], "vld2"); in EmitAArch64BuiltinExpr()
5596 Ops[0] = Builder.CreateBitCast(Ops[0], in EmitAArch64BuiltinExpr()
5597 llvm::PointerType::getUnqual(Ops[1]->getType())); in EmitAArch64BuiltinExpr()
5598 return Builder.CreateStore(Ops[1], Ops[0]); in EmitAArch64BuiltinExpr()
5603 Ops[1] = Builder.CreateBitCast(Ops[1], PTy); in EmitAArch64BuiltinExpr()
5606 Ops[1] = Builder.CreateCall(F, Ops[1], "vld3"); in EmitAArch64BuiltinExpr()
5607 Ops[0] = Builder.CreateBitCast(Ops[0], in EmitAArch64BuiltinExpr()
5608 llvm::PointerType::getUnqual(Ops[1]->getType())); in EmitAArch64BuiltinExpr()
5609 return Builder.CreateStore(Ops[1], Ops[0]); in EmitAArch64BuiltinExpr()
5614 Ops[1] = Builder.CreateBitCast(Ops[1], PTy); in EmitAArch64BuiltinExpr()
5617 Ops[1] = Builder.CreateCall(F, Ops[1], "vld4"); in EmitAArch64BuiltinExpr()
5618 Ops[0] = Builder.CreateBitCast(Ops[0], in EmitAArch64BuiltinExpr()
5619 llvm::PointerType::getUnqual(Ops[1]->getType())); in EmitAArch64BuiltinExpr()
5620 return Builder.CreateStore(Ops[1], Ops[0]); in EmitAArch64BuiltinExpr()
5626 Ops[1] = Builder.CreateBitCast(Ops[1], PTy); in EmitAArch64BuiltinExpr()
5629 Ops[1] = Builder.CreateCall(F, Ops[1], "vld2"); in EmitAArch64BuiltinExpr()
5630 Ops[0] = Builder.CreateBitCast(Ops[0], in EmitAArch64BuiltinExpr()
5631 llvm::PointerType::getUnqual(Ops[1]->getType())); in EmitAArch64BuiltinExpr()
5632 return Builder.CreateStore(Ops[1], Ops[0]); in EmitAArch64BuiltinExpr()
5638 Ops[1] = Builder.CreateBitCast(Ops[1], PTy); in EmitAArch64BuiltinExpr()
5641 Ops[1] = Builder.CreateCall(F, Ops[1], "vld3"); in EmitAArch64BuiltinExpr()
5642 Ops[0] = Builder.CreateBitCast(Ops[0], in EmitAArch64BuiltinExpr()
5643 llvm::PointerType::getUnqual(Ops[1]->getType())); in EmitAArch64BuiltinExpr()
5644 return Builder.CreateStore(Ops[1], Ops[0]); in EmitAArch64BuiltinExpr()
5650 Ops[1] = Builder.CreateBitCast(Ops[1], PTy); in EmitAArch64BuiltinExpr()
5653 Ops[1] = Builder.CreateCall(F, Ops[1], "vld4"); in EmitAArch64BuiltinExpr()
5654 Ops[0] = Builder.CreateBitCast(Ops[0], in EmitAArch64BuiltinExpr()
5655 llvm::PointerType::getUnqual(Ops[1]->getType())); in EmitAArch64BuiltinExpr()
5656 return Builder.CreateStore(Ops[1], Ops[0]); in EmitAArch64BuiltinExpr()
5660 llvm::Type *Tys[2] = { VTy, Ops[1]->getType() }; in EmitAArch64BuiltinExpr()
5662 Ops.push_back(Ops[1]); in EmitAArch64BuiltinExpr()
5663 Ops.erase(Ops.begin()+1); in EmitAArch64BuiltinExpr()
5664 Ops[1] = Builder.CreateBitCast(Ops[1], Ty); in EmitAArch64BuiltinExpr()
5665 Ops[2] = Builder.CreateBitCast(Ops[2], Ty); in EmitAArch64BuiltinExpr()
5666 Ops[3] = Builder.CreateZExt(Ops[3], in EmitAArch64BuiltinExpr()
5668 Ops[1] = Builder.CreateCall(F, makeArrayRef(Ops).slice(1), "vld2_lane"); in EmitAArch64BuiltinExpr()
5669 Ty = llvm::PointerType::getUnqual(Ops[1]->getType()); in EmitAArch64BuiltinExpr()
5670 Ops[0] = Builder.CreateBitCast(Ops[0], Ty); in EmitAArch64BuiltinExpr()
5671 return Builder.CreateStore(Ops[1], Ops[0]); in EmitAArch64BuiltinExpr()
5675 llvm::Type *Tys[2] = { VTy, Ops[1]->getType() }; in EmitAArch64BuiltinExpr()
5677 Ops.push_back(Ops[1]); in EmitAArch64BuiltinExpr()
5678 Ops.erase(Ops.begin()+1); in EmitAArch64BuiltinExpr()
5679 Ops[1] = Builder.CreateBitCast(Ops[1], Ty); in EmitAArch64BuiltinExpr()
5680 Ops[2] = Builder.CreateBitCast(Ops[2], Ty); in EmitAArch64BuiltinExpr()
5681 Ops[3] = Builder.CreateBitCast(Ops[3], Ty); in EmitAArch64BuiltinExpr()
5682 Ops[4] = Builder.CreateZExt(Ops[4], in EmitAArch64BuiltinExpr()
5684 Ops[1] = Builder.CreateCall(F, makeArrayRef(Ops).slice(1), "vld3_lane"); in EmitAArch64BuiltinExpr()
5685 Ty = llvm::PointerType::getUnqual(Ops[1]->getType()); in EmitAArch64BuiltinExpr()
5686 Ops[0] = Builder.CreateBitCast(Ops[0], Ty); in EmitAArch64BuiltinExpr()
5687 return Builder.CreateStore(Ops[1], Ops[0]); in EmitAArch64BuiltinExpr()
5691 llvm::Type *Tys[2] = { VTy, Ops[1]->getType() }; in EmitAArch64BuiltinExpr()
5693 Ops.push_back(Ops[1]); in EmitAArch64BuiltinExpr()
5694 Ops.erase(Ops.begin()+1); in EmitAArch64BuiltinExpr()
5695 Ops[1] = Builder.CreateBitCast(Ops[1], Ty); in EmitAArch64BuiltinExpr()
5696 Ops[2] = Builder.CreateBitCast(Ops[2], Ty); in EmitAArch64BuiltinExpr()
5697 Ops[3] = Builder.CreateBitCast(Ops[3], Ty); in EmitAArch64BuiltinExpr()
5698 Ops[4] = Builder.CreateBitCast(Ops[4], Ty); in EmitAArch64BuiltinExpr()
5699 Ops[5] = Builder.CreateZExt(Ops[5], in EmitAArch64BuiltinExpr()
5701 Ops[1] = Builder.CreateCall(F, makeArrayRef(Ops).slice(1), "vld4_lane"); in EmitAArch64BuiltinExpr()
5702 Ty = llvm::PointerType::getUnqual(Ops[1]->getType()); in EmitAArch64BuiltinExpr()
5703 Ops[0] = Builder.CreateBitCast(Ops[0], Ty); in EmitAArch64BuiltinExpr()
5704 return Builder.CreateStore(Ops[1], Ops[0]); in EmitAArch64BuiltinExpr()
5708 Ops.push_back(Ops[0]); in EmitAArch64BuiltinExpr()
5709 Ops.erase(Ops.begin()); in EmitAArch64BuiltinExpr()
5710 llvm::Type *Tys[2] = { VTy, Ops[2]->getType() }; in EmitAArch64BuiltinExpr()
5712 Ops, ""); in EmitAArch64BuiltinExpr()
5716 Ops.push_back(Ops[0]); in EmitAArch64BuiltinExpr()
5717 Ops.erase(Ops.begin()); in EmitAArch64BuiltinExpr()
5718 Ops[2] = Builder.CreateZExt(Ops[2], in EmitAArch64BuiltinExpr()
5720 llvm::Type *Tys[2] = { VTy, Ops[3]->getType() }; in EmitAArch64BuiltinExpr()
5722 Ops, ""); in EmitAArch64BuiltinExpr()
5726 Ops.push_back(Ops[0]); in EmitAArch64BuiltinExpr()
5727 Ops.erase(Ops.begin()); in EmitAArch64BuiltinExpr()
5728 llvm::Type *Tys[2] = { VTy, Ops[3]->getType() }; in EmitAArch64BuiltinExpr()
5730 Ops, ""); in EmitAArch64BuiltinExpr()
5734 Ops.push_back(Ops[0]); in EmitAArch64BuiltinExpr()
5735 Ops.erase(Ops.begin()); in EmitAArch64BuiltinExpr()
5736 Ops[3] = Builder.CreateZExt(Ops[3], in EmitAArch64BuiltinExpr()
5738 llvm::Type *Tys[2] = { VTy, Ops[4]->getType() }; in EmitAArch64BuiltinExpr()
5740 Ops, ""); in EmitAArch64BuiltinExpr()
5744 Ops.push_back(Ops[0]); in EmitAArch64BuiltinExpr()
5745 Ops.erase(Ops.begin()); in EmitAArch64BuiltinExpr()
5746 llvm::Type *Tys[2] = { VTy, Ops[4]->getType() }; in EmitAArch64BuiltinExpr()
5748 Ops, ""); in EmitAArch64BuiltinExpr()
5752 Ops.push_back(Ops[0]); in EmitAArch64BuiltinExpr()
5753 Ops.erase(Ops.begin()); in EmitAArch64BuiltinExpr()
5754 Ops[4] = Builder.CreateZExt(Ops[4], in EmitAArch64BuiltinExpr()
5756 llvm::Type *Tys[2] = { VTy, Ops[5]->getType() }; in EmitAArch64BuiltinExpr()
5758 Ops, ""); in EmitAArch64BuiltinExpr()
5762 Ops[0] = Builder.CreateBitCast(Ops[0], llvm::PointerType::getUnqual(Ty)); in EmitAArch64BuiltinExpr()
5763 Ops[1] = Builder.CreateBitCast(Ops[1], Ty); in EmitAArch64BuiltinExpr()
5764 Ops[2] = Builder.CreateBitCast(Ops[2], Ty); in EmitAArch64BuiltinExpr()
5773 Value *Addr = Builder.CreateConstInBoundsGEP1_32(Ty, Ops[0], vi); in EmitAArch64BuiltinExpr()
5775 SV = Builder.CreateShuffleVector(Ops[1], Ops[2], SV, "vtrn"); in EmitAArch64BuiltinExpr()
5782 Ops[0] = Builder.CreateBitCast(Ops[0], llvm::PointerType::getUnqual(Ty)); in EmitAArch64BuiltinExpr()
5783 Ops[1] = Builder.CreateBitCast(Ops[1], Ty); in EmitAArch64BuiltinExpr()
5784 Ops[2] = Builder.CreateBitCast(Ops[2], Ty); in EmitAArch64BuiltinExpr()
5792 Value *Addr = Builder.CreateConstInBoundsGEP1_32(Ty, Ops[0], vi); in EmitAArch64BuiltinExpr()
5794 SV = Builder.CreateShuffleVector(Ops[1], Ops[2], SV, "vuzp"); in EmitAArch64BuiltinExpr()
5801 Ops[0] = Builder.CreateBitCast(Ops[0], llvm::PointerType::getUnqual(Ty)); in EmitAArch64BuiltinExpr()
5802 Ops[1] = Builder.CreateBitCast(Ops[1], Ty); in EmitAArch64BuiltinExpr()
5803 Ops[2] = Builder.CreateBitCast(Ops[2], Ty); in EmitAArch64BuiltinExpr()
5812 Value *Addr = Builder.CreateConstInBoundsGEP1_32(Ty, Ops[0], vi); in EmitAArch64BuiltinExpr()
5814 SV = Builder.CreateShuffleVector(Ops[1], Ops[2], SV, "vzip"); in EmitAArch64BuiltinExpr()
5821 Ops, "vtbl1"); in EmitAArch64BuiltinExpr()
5825 Ops, "vtbl2"); in EmitAArch64BuiltinExpr()
5829 Ops, "vtbl3"); in EmitAArch64BuiltinExpr()
5833 Ops, "vtbl4"); in EmitAArch64BuiltinExpr()
5837 Ops, "vtbx1"); in EmitAArch64BuiltinExpr()
5841 Ops, "vtbx2"); in EmitAArch64BuiltinExpr()
5845 Ops, "vtbx3"); in EmitAArch64BuiltinExpr()
5849 Ops, "vtbx4"); in EmitAArch64BuiltinExpr()
5854 return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vsqadd"); in EmitAArch64BuiltinExpr()
5859 return EmitNeonCall(CGM.getIntrinsic(Int, Ty), Ops, "vuqadd"); in EmitAArch64BuiltinExpr()
5865 BuildVector(ArrayRef<llvm::Value*> Ops) { in BuildVector() argument
5866 assert((Ops.size() & (Ops.size() - 1)) == 0 && in BuildVector()
5869 for (unsigned i = 0, e = Ops.size(); i != e && AllConstants; ++i) in BuildVector()
5870 AllConstants &= isa<Constant>(Ops[i]); in BuildVector()
5875 for (unsigned i = 0, e = Ops.size(); i != e; ++i) in BuildVector()
5876 CstOps.push_back(cast<Constant>(Ops[i])); in BuildVector()
5882 llvm::UndefValue::get(llvm::VectorType::get(Ops[0]->getType(), Ops.size())); in BuildVector()
5884 for (unsigned i = 0, e = Ops.size(); i != e; ++i) in BuildVector()
5885 Result = Builder.CreateInsertElement(Result, Ops[i], Builder.getInt32(i)); in BuildVector()
5892 SmallVector<Value*, 4> Ops; in EmitX86BuiltinExpr() local
5903 Ops.push_back(EmitScalarExpr(E->getArg(i))); in EmitX86BuiltinExpr()
5912 Ops.push_back(llvm::ConstantInt::get(getLLVMContext(), Result)); in EmitX86BuiltinExpr()
5928 return Builder.CreateBitCast(BuildVector(Ops), in EmitX86BuiltinExpr()
5931 return Builder.CreateExtractElement(Ops[0], in EmitX86BuiltinExpr()
5932 llvm::ConstantInt::get(Ops[1]->getType(), 0)); in EmitX86BuiltinExpr()
5935 Builder.CreateStore(Ops[0], Tmp); in EmitX86BuiltinExpr()
5951 Ops[1] = Builder.CreateBitCast(Ops[1], VecTy, "cast"); in EmitX86BuiltinExpr()
5956 Ops[1] = Builder.CreateExtractElement(Ops[1], Idx, "extract"); in EmitX86BuiltinExpr()
5959 Ops[0] = Builder.CreateBitCast(Ops[0], PtrTy); in EmitX86BuiltinExpr()
5960 return Builder.CreateStore(Ops[1], Ops[0]); in EmitX86BuiltinExpr()
5964 unsigned ShiftVal = cast<llvm::ConstantInt>(Ops[2])->getZExtValue(); in EmitX86BuiltinExpr()
5967 cast<llvm::VectorType>(Ops[0]->getType())->getNumElements(); in EmitX86BuiltinExpr()
5981 Ops[0] = llvm::Constant::getNullValue(Ops[0]->getType()); in EmitX86BuiltinExpr()
5996 return Builder.CreateShuffleVector(Ops[1], Ops[0], SV, "palignr"); in EmitX86BuiltinExpr()
6000 unsigned shiftVal = cast<llvm::ConstantInt>(Ops[1])->getZExtValue() >> 3; in EmitX86BuiltinExpr()
6017 Ops[0] = Builder.CreateBitCast(Ops[0], VecTy, "cast"); in EmitX86BuiltinExpr()
6021 SV = Builder.CreateShuffleVector(Zero, Ops[0], SV, "pslldq"); in EmitX86BuiltinExpr()
6027 unsigned shiftVal = cast<llvm::ConstantInt>(Ops[1])->getZExtValue() >> 3; in EmitX86BuiltinExpr()
6044 Ops[0] = Builder.CreateBitCast(Ops[0], VecTy, "cast"); in EmitX86BuiltinExpr()
6048 SV = Builder.CreateShuffleVector(Ops[0], Zero, SV, "psrldq"); in EmitX86BuiltinExpr()
6064 Value *BC = Builder.CreateBitCast(Ops[0], in EmitX86BuiltinExpr()
6065 llvm::PointerType::getUnqual(Ops[1]->getType()), in EmitX86BuiltinExpr()
6067 StoreInst *SI = Builder.CreateStore(Ops[1], BC); in EmitX86BuiltinExpr()
6085 Ops[0] = Builder.CreateBitCast(Ops[0], MMXTy, "cast"); in EmitX86BuiltinExpr()
6087 return Builder.CreateCall(F, Ops, "pswapd"); in EmitX86BuiltinExpr()
6119 Builder.CreateStore(Builder.CreateExtractValue(Call, 0), Ops[0]); in EmitX86BuiltinExpr()
6267 Ops.push_back(llvm::ConstantInt::get(Int8Ty, Imm)); in EmitX86BuiltinExpr()
6269 return Builder.CreateCall(F, Ops, name); in EmitX86BuiltinExpr()
6276 SmallVector<Value*, 4> Ops; in EmitPPCBuiltinExpr() local
6279 Ops.push_back(EmitScalarExpr(E->getArg(i))); in EmitPPCBuiltinExpr()
6297 Ops[1] = Builder.CreateBitCast(Ops[1], Int8PtrTy); in EmitPPCBuiltinExpr()
6299 Ops[0] = Builder.CreateGEP(Ops[1], Ops[0]); in EmitPPCBuiltinExpr()
6300 Ops.pop_back(); in EmitPPCBuiltinExpr()
6333 return Builder.CreateCall(F, Ops, ""); in EmitPPCBuiltinExpr()
6345 Ops[2] = Builder.CreateBitCast(Ops[2], Int8PtrTy); in EmitPPCBuiltinExpr()
6346 Ops[1] = Builder.CreateGEP(Ops[2], Ops[1]); in EmitPPCBuiltinExpr()
6347 Ops.pop_back(); in EmitPPCBuiltinExpr()
6374 return Builder.CreateCall(F, Ops, ""); in EmitPPCBuiltinExpr()