1//===-- X86InstrFragmentsSIMD.td - x86 SIMD ISA ------------*- tablegen -*-===// 2// 3// The LLVM Compiler Infrastructure 4// 5// This file is distributed under the University of Illinois Open Source 6// License. See LICENSE.TXT for details. 7// 8//===----------------------------------------------------------------------===// 9// 10// This file provides pattern fragments useful for SIMD instructions. 11// 12//===----------------------------------------------------------------------===// 13 14//===----------------------------------------------------------------------===// 15// MMX specific DAG Nodes. 16//===----------------------------------------------------------------------===// 17 18// Low word of MMX to GPR. 19def MMX_X86movd2w : SDNode<"X86ISD::MMX_MOVD2W", SDTypeProfile<1, 1, 20 [SDTCisVT<0, i32>, SDTCisVT<1, x86mmx>]>>; 21// GPR to low word of MMX. 22def MMX_X86movw2d : SDNode<"X86ISD::MMX_MOVW2D", SDTypeProfile<1, 1, 23 [SDTCisVT<0, x86mmx>, SDTCisVT<1, i32>]>>; 24 25//===----------------------------------------------------------------------===// 26// MMX Pattern Fragments 27//===----------------------------------------------------------------------===// 28 29def load_mmx : PatFrag<(ops node:$ptr), (x86mmx (load node:$ptr))>; 30def load_mvmmx : PatFrag<(ops node:$ptr), 31 (x86mmx (MMX_X86movw2d (load node:$ptr)))>; 32def bc_mmx : PatFrag<(ops node:$in), (x86mmx (bitconvert node:$in))>; 33 34//===----------------------------------------------------------------------===// 35// SSE specific DAG Nodes. 36//===----------------------------------------------------------------------===// 37 38def SDTX86VFCMP : SDTypeProfile<1, 3, [SDTCisInt<0>, SDTCisSameAs<1, 2>, 39 SDTCisFP<1>, SDTCisVT<3, i8>, 40 SDTCisVec<1>]>; 41def SDTX86CmpTestSae : SDTypeProfile<1, 3, [SDTCisVT<0, i32>, 42 SDTCisSameAs<1, 2>, SDTCisInt<3>]>; 43 44def X86fmin : SDNode<"X86ISD::FMIN", SDTFPBinOp>; 45def X86fmax : SDNode<"X86ISD::FMAX", SDTFPBinOp>; 46 47// Commutative and Associative FMIN and FMAX. 48def X86fminc : SDNode<"X86ISD::FMINC", SDTFPBinOp, 49 [SDNPCommutative, SDNPAssociative]>; 50def X86fmaxc : SDNode<"X86ISD::FMAXC", SDTFPBinOp, 51 [SDNPCommutative, SDNPAssociative]>; 52 53def X86fand : SDNode<"X86ISD::FAND", SDTFPBinOp, 54 [SDNPCommutative, SDNPAssociative]>; 55def X86for : SDNode<"X86ISD::FOR", SDTFPBinOp, 56 [SDNPCommutative, SDNPAssociative]>; 57def X86fxor : SDNode<"X86ISD::FXOR", SDTFPBinOp, 58 [SDNPCommutative, SDNPAssociative]>; 59def X86fandn : SDNode<"X86ISD::FANDN", SDTFPBinOp, 60 [SDNPCommutative, SDNPAssociative]>; 61def X86frsqrt : SDNode<"X86ISD::FRSQRT", SDTFPUnaryOp>; 62def X86frcp : SDNode<"X86ISD::FRCP", SDTFPUnaryOp>; 63def X86frsqrt14s: SDNode<"X86ISD::FRSQRT", SDTFPBinOp>; 64def X86frcp14s : SDNode<"X86ISD::FRCP", SDTFPBinOp>; 65def X86fgetsign: SDNode<"X86ISD::FGETSIGNx86",SDTFPToIntOp>; 66def X86fhadd : SDNode<"X86ISD::FHADD", SDTFPBinOp>; 67def X86fhsub : SDNode<"X86ISD::FHSUB", SDTFPBinOp>; 68def X86hadd : SDNode<"X86ISD::HADD", SDTIntBinOp>; 69def X86hsub : SDNode<"X86ISD::HSUB", SDTIntBinOp>; 70def X86comi : SDNode<"X86ISD::COMI", SDTX86CmpTest>; 71def X86comiSae : SDNode<"X86ISD::COMI", SDTX86CmpTestSae>; 72def X86ucomi : SDNode<"X86ISD::UCOMI", SDTX86CmpTest>; 73def X86ucomiSae: SDNode<"X86ISD::UCOMI", SDTX86CmpTestSae>; 74def X86cmps : SDNode<"X86ISD::FSETCC", SDTX86Cmps>; 75//def X86cmpsd : SDNode<"X86ISD::FSETCCsd", SDTX86Cmpsd>; 76def X86cvtdq2pd: SDNode<"X86ISD::CVTDQ2PD", 77 SDTypeProfile<1, 1, [SDTCisVT<0, v2f64>, 78 SDTCisVT<1, v4i32>]>>; 79def X86cvtudq2pd: SDNode<"X86ISD::CVTUDQ2PD", 80 SDTypeProfile<1, 1, [SDTCisVT<0, v2f64>, 81 SDTCisVT<1, v4i32>]>>; 82def X86pshufb : SDNode<"X86ISD::PSHUFB", 83 SDTypeProfile<1, 2, [SDTCVecEltisVT<0, i8>, SDTCisSameAs<0,1>, 84 SDTCisSameAs<0,2>]>>; 85def X86psadbw : SDNode<"X86ISD::PSADBW", 86 SDTypeProfile<1, 2, [SDTCVecEltisVT<0, i64>, 87 SDTCVecEltisVT<1, i8>, 88 SDTCisSameSizeAs<0,1>, 89 SDTCisSameAs<1,2>]>>; 90def X86dbpsadbw : SDNode<"X86ISD::DBPSADBW", 91 SDTypeProfile<1, 3, [SDTCVecEltisVT<0, i16>, 92 SDTCVecEltisVT<1, i8>, 93 SDTCisSameSizeAs<0,1>, 94 SDTCisSameAs<1,2>, SDTCisInt<3>]>>; 95def X86andnp : SDNode<"X86ISD::ANDNP", 96 SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisSameAs<0,1>, 97 SDTCisSameAs<0,2>]>>; 98def X86psign : SDNode<"X86ISD::PSIGN", 99 SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisSameAs<0,1>, 100 SDTCisSameAs<0,2>]>>; 101def X86pextrb : SDNode<"X86ISD::PEXTRB", 102 SDTypeProfile<1, 2, [SDTCisVT<0, i32>, SDTCisVT<1, v16i8>, 103 SDTCisPtrTy<2>]>>; 104def X86pextrw : SDNode<"X86ISD::PEXTRW", 105 SDTypeProfile<1, 2, [SDTCisVT<0, i32>, SDTCisVT<1, v8i16>, 106 SDTCisPtrTy<2>]>>; 107def X86pinsrb : SDNode<"X86ISD::PINSRB", 108 SDTypeProfile<1, 3, [SDTCisVT<0, v16i8>, SDTCisSameAs<0,1>, 109 SDTCisVT<2, i32>, SDTCisPtrTy<3>]>>; 110def X86pinsrw : SDNode<"X86ISD::PINSRW", 111 SDTypeProfile<1, 3, [SDTCisVT<0, v8i16>, SDTCisSameAs<0,1>, 112 SDTCisVT<2, i32>, SDTCisPtrTy<3>]>>; 113def X86insertps : SDNode<"X86ISD::INSERTPS", 114 SDTypeProfile<1, 3, [SDTCisVT<0, v4f32>, SDTCisSameAs<0,1>, 115 SDTCisVT<2, v4f32>, SDTCisVT<3, i8>]>>; 116def X86vzmovl : SDNode<"X86ISD::VZEXT_MOVL", 117 SDTypeProfile<1, 1, [SDTCisSameAs<0,1>]>>; 118 119def X86vzload : SDNode<"X86ISD::VZEXT_LOAD", SDTLoad, 120 [SDNPHasChain, SDNPMayLoad, SDNPMemOperand]>; 121 122def X86vzext : SDNode<"X86ISD::VZEXT", 123 SDTypeProfile<1, 1, [SDTCisVec<0>, SDTCisVec<1>, 124 SDTCisInt<0>, SDTCisInt<1>, 125 SDTCisOpSmallerThanOp<1, 0>]>>; 126 127def X86vsext : SDNode<"X86ISD::VSEXT", 128 SDTypeProfile<1, 1, [SDTCisVec<0>, SDTCisVec<1>, 129 SDTCisInt<0>, SDTCisInt<1>, 130 SDTCisOpSmallerThanOp<1, 0>]>>; 131 132def SDTVtrunc : SDTypeProfile<1, 1, [SDTCisVec<0>, SDTCisVec<1>, 133 SDTCisInt<0>, SDTCisInt<1>, 134 SDTCisOpSmallerThanOp<0, 1>]>; 135 136def X86vtrunc : SDNode<"X86ISD::VTRUNC", SDTVtrunc>; 137def X86vtruncs : SDNode<"X86ISD::VTRUNCS", SDTVtrunc>; 138def X86vtruncus : SDNode<"X86ISD::VTRUNCUS", SDTVtrunc>; 139 140def X86trunc : SDNode<"X86ISD::TRUNC", 141 SDTypeProfile<1, 1, [SDTCisInt<0>, SDTCisInt<1>, 142 SDTCisOpSmallerThanOp<0, 1>]>>; 143def X86vfpext : SDNode<"X86ISD::VFPEXT", 144 SDTypeProfile<1, 1, [SDTCisVec<0>, SDTCisVec<1>, 145 SDTCisFP<0>, SDTCisFP<1>, 146 SDTCisOpSmallerThanOp<1, 0>]>>; 147def X86vfpround: SDNode<"X86ISD::VFPROUND", 148 SDTypeProfile<1, 1, [SDTCisVec<0>, SDTCisVec<1>, 149 SDTCisFP<0>, SDTCisFP<1>, 150 SDTCisOpSmallerThanOp<0, 1>]>>; 151 152def X86fround: SDNode<"X86ISD::VFPROUND", 153 SDTypeProfile<1, 2, [SDTCisFP<0>, SDTCisFP<1>,SDTCisFP<2>, 154 SDTCVecEltisVT<0, f32>, 155 SDTCVecEltisVT<1, f64>, 156 SDTCVecEltisVT<2, f64>, 157 SDTCisOpSmallerThanOp<0, 1>]>>; 158def X86froundRnd: SDNode<"X86ISD::VFPROUND", 159 SDTypeProfile<1, 3, [SDTCisFP<0>, SDTCisFP<1>,SDTCisFP<2>, 160 SDTCVecEltisVT<0, f32>, 161 SDTCVecEltisVT<1, f64>, 162 SDTCVecEltisVT<2, f64>, 163 SDTCisOpSmallerThanOp<0, 1>, 164 SDTCisInt<3>]>>; 165 166def X86fpext : SDNode<"X86ISD::VFPEXT", 167 SDTypeProfile<1, 2, [SDTCisFP<0>, SDTCisFP<1>,SDTCisFP<2>, 168 SDTCVecEltisVT<0, f64>, 169 SDTCVecEltisVT<1, f32>, 170 SDTCVecEltisVT<2, f32>, 171 SDTCisOpSmallerThanOp<1, 0>]>>; 172 173def X86fpextRnd : SDNode<"X86ISD::VFPEXT", 174 SDTypeProfile<1, 3, [SDTCisFP<0>, SDTCisFP<1>,SDTCisFP<2>, 175 SDTCVecEltisVT<0, f64>, 176 SDTCVecEltisVT<1, f32>, 177 SDTCVecEltisVT<2, f32>, 178 SDTCisOpSmallerThanOp<1, 0>, 179 SDTCisInt<3>]>>; 180 181def X86vshldq : SDNode<"X86ISD::VSHLDQ", SDTIntShiftOp>; 182def X86vshrdq : SDNode<"X86ISD::VSRLDQ", SDTIntShiftOp>; 183def X86cmpp : SDNode<"X86ISD::CMPP", SDTX86VFCMP>; 184def X86pcmpeq : SDNode<"X86ISD::PCMPEQ", SDTIntBinOp, [SDNPCommutative]>; 185def X86pcmpgt : SDNode<"X86ISD::PCMPGT", SDTIntBinOp>; 186 187def X86IntCmpMask : SDTypeProfile<1, 2, 188 [SDTCisVec<0>, SDTCisSameAs<1, 2>, SDTCisInt<1>]>; 189def X86pcmpeqm : SDNode<"X86ISD::PCMPEQM", X86IntCmpMask, [SDNPCommutative]>; 190def X86pcmpgtm : SDNode<"X86ISD::PCMPGTM", X86IntCmpMask>; 191 192def X86CmpMaskCC : 193 SDTypeProfile<1, 3, [SDTCisVec<0>, SDTCVecEltisVT<0, i1>, 194 SDTCisVec<1>, SDTCisSameAs<2, 1>, 195 SDTCisSameNumEltsAs<0, 1>, SDTCisVT<3, i8>]>; 196def X86CmpMaskCCRound : 197 SDTypeProfile<1, 4, [SDTCisVec<0>,SDTCVecEltisVT<0, i1>, 198 SDTCisVec<1>, SDTCisSameAs<2, 1>, 199 SDTCisSameNumEltsAs<0, 1>, SDTCisVT<3, i8>, 200 SDTCisInt<4>]>; 201def X86CmpMaskCCScalar : 202 SDTypeProfile<1, 3, [SDTCisInt<0>, SDTCisSameAs<1, 2>, SDTCisVT<3, i8>]>; 203 204def X86CmpMaskCCScalarRound : 205 SDTypeProfile<1, 4, [SDTCisInt<0>, SDTCisSameAs<1, 2>, SDTCisVT<3, i8>, 206 SDTCisInt<4>]>; 207 208def X86cmpm : SDNode<"X86ISD::CMPM", X86CmpMaskCC>; 209def X86cmpmRnd : SDNode<"X86ISD::CMPM_RND", X86CmpMaskCCRound>; 210def X86cmpmu : SDNode<"X86ISD::CMPMU", X86CmpMaskCC>; 211def X86cmpms : SDNode<"X86ISD::FSETCC", X86CmpMaskCCScalar>; 212def X86cmpmsRnd : SDNode<"X86ISD::FSETCC", X86CmpMaskCCScalarRound>; 213 214def X86vshl : SDNode<"X86ISD::VSHL", 215 SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisSameAs<0,1>, 216 SDTCisVec<2>]>>; 217def X86vsrl : SDNode<"X86ISD::VSRL", 218 SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisSameAs<0,1>, 219 SDTCisVec<2>]>>; 220def X86vsra : SDNode<"X86ISD::VSRA", 221 SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisSameAs<0,1>, 222 SDTCisVec<2>]>>; 223 224def X86vshli : SDNode<"X86ISD::VSHLI", SDTIntShiftOp>; 225def X86vsrli : SDNode<"X86ISD::VSRLI", SDTIntShiftOp>; 226def X86vsrai : SDNode<"X86ISD::VSRAI", SDTIntShiftOp>; 227 228def X86vprot : SDNode<"X86ISD::VPROT", 229 SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisSameAs<0,1>, 230 SDTCisSameAs<0,2>]>>; 231def X86vproti : SDNode<"X86ISD::VPROTI", 232 SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisSameAs<0,1>, 233 SDTCisVT<2, i8>]>>; 234 235def X86vpshl : SDNode<"X86ISD::VPSHL", 236 SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisSameAs<0,1>, 237 SDTCisSameAs<0,2>]>>; 238def X86vpsha : SDNode<"X86ISD::VPSHA", 239 SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisSameAs<0,1>, 240 SDTCisSameAs<0,2>]>>; 241 242def X86vpcom : SDNode<"X86ISD::VPCOM", 243 SDTypeProfile<1, 3, [SDTCisVec<0>, SDTCisSameAs<0,1>, 244 SDTCisSameAs<0,2>, 245 SDTCisVT<3, i8>]>>; 246def X86vpcomu : SDNode<"X86ISD::VPCOMU", 247 SDTypeProfile<1, 3, [SDTCisVec<0>, SDTCisSameAs<0,1>, 248 SDTCisSameAs<0,2>, 249 SDTCisVT<3, i8>]>>; 250 251def SDTX86CmpPTest : SDTypeProfile<1, 2, [SDTCisVT<0, i32>, 252 SDTCisVec<1>, 253 SDTCisSameAs<2, 1>]>; 254def X86addus : SDNode<"X86ISD::ADDUS", SDTIntBinOp>; 255def X86subus : SDNode<"X86ISD::SUBUS", SDTIntBinOp>; 256def X86adds : SDNode<"X86ISD::ADDS", SDTIntBinOp>; 257def X86subs : SDNode<"X86ISD::SUBS", SDTIntBinOp>; 258def X86mulhrs : SDNode<"X86ISD::MULHRS" , SDTIntBinOp>; 259def X86avg : SDNode<"X86ISD::AVG" , SDTIntBinOp>; 260def X86ptest : SDNode<"X86ISD::PTEST", SDTX86CmpPTest>; 261def X86testp : SDNode<"X86ISD::TESTP", SDTX86CmpPTest>; 262def X86kortest : SDNode<"X86ISD::KORTEST", SDTX86CmpPTest>; 263def X86ktest : SDNode<"X86ISD::KTEST", SDTX86CmpPTest>; 264def X86testm : SDNode<"X86ISD::TESTM", SDTypeProfile<1, 2, [SDTCisVec<0>, 265 SDTCisVec<1>, SDTCisSameAs<2, 1>, 266 SDTCVecEltisVT<0, i1>, 267 SDTCisSameNumEltsAs<0, 1>]>>; 268def X86testnm : SDNode<"X86ISD::TESTNM", SDTypeProfile<1, 2, [SDTCisVec<0>, 269 SDTCisVec<1>, SDTCisSameAs<2, 1>, 270 SDTCVecEltisVT<0, i1>, 271 SDTCisSameNumEltsAs<0, 1>]>>; 272def X86select : SDNode<"X86ISD::SELECT" , SDTSelect>; 273 274def X86pmuludq : SDNode<"X86ISD::PMULUDQ", 275 SDTypeProfile<1, 2, [SDTCVecEltisVT<0, i64>, 276 SDTCVecEltisVT<1, i32>, 277 SDTCisSameSizeAs<0,1>, 278 SDTCisSameAs<1,2>]>>; 279def X86pmuldq : SDNode<"X86ISD::PMULDQ", 280 SDTypeProfile<1, 2, [SDTCVecEltisVT<0, i64>, 281 SDTCVecEltisVT<1, i32>, 282 SDTCisSameSizeAs<0,1>, 283 SDTCisSameAs<1,2>]>>; 284 285def X86extrqi : SDNode<"X86ISD::EXTRQI", 286 SDTypeProfile<1, 3, [SDTCisVT<0, v2i64>, SDTCisSameAs<0,1>, 287 SDTCisVT<2, i8>, SDTCisVT<3, i8>]>>; 288def X86insertqi : SDNode<"X86ISD::INSERTQI", 289 SDTypeProfile<1, 4, [SDTCisVT<0, v2i64>, SDTCisSameAs<0,1>, 290 SDTCisSameAs<1,2>, SDTCisVT<3, i8>, 291 SDTCisVT<4, i8>]>>; 292 293// Specific shuffle nodes - At some point ISD::VECTOR_SHUFFLE will always get 294// translated into one of the target nodes below during lowering. 295// Note: this is a work in progress... 296def SDTShuff1Op : SDTypeProfile<1, 1, [SDTCisVec<0>, SDTCisSameAs<0,1>]>; 297def SDTShuff2Op : SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisSameAs<0,1>, 298 SDTCisSameAs<0,2>]>; 299 300def SDTShuff2OpM : SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisSameAs<0,1>, 301 SDTCisSameSizeAs<0,2>, 302 SDTCisSameNumEltsAs<0,2>]>; 303def SDTShuff2OpI : SDTypeProfile<1, 2, [SDTCisVec<0>, 304 SDTCisSameAs<0,1>, SDTCisVT<2, i8>]>; 305def SDTShuff3OpI : SDTypeProfile<1, 3, [SDTCisVec<0>, SDTCisSameAs<0,1>, 306 SDTCisSameAs<0,2>, SDTCisVT<3, i8>]>; 307def SDTFPBinOpImmRound: SDTypeProfile<1, 4, [SDTCisVec<0>, SDTCisSameAs<0,1>, 308 SDTCisSameAs<0,2>, SDTCisInt<3>, SDTCisInt<4>]>; 309def SDTFPUnaryOpImmRound: SDTypeProfile<1, 3, [SDTCisVec<0>, SDTCisSameAs<0,1>, 310 SDTCisInt<2>, SDTCisInt<3>]>; 311 312def SDTVBroadcast : SDTypeProfile<1, 1, [SDTCisVec<0>]>; 313def SDTVBroadcastm : SDTypeProfile<1, 1, [SDTCisVec<0>, 314 SDTCisInt<0>, SDTCisInt<1>]>; 315 316def SDTBlend : SDTypeProfile<1, 3, [SDTCisVec<0>, SDTCisSameAs<0,1>, 317 SDTCisSameAs<1,2>, SDTCisVT<3, i8>]>; 318 319def SDTTernlog : SDTypeProfile<1, 4, [SDTCisVec<0>, SDTCisSameAs<0,1>, 320 SDTCisSameAs<0,2>, SDTCisSameAs<0,3>, 321 SDTCisVT<4, i8>]>; 322 323def SDTFPBinOpRound : SDTypeProfile<1, 3, [ // fadd_round, fmul_round, etc. 324 SDTCisSameAs<0, 1>, SDTCisSameAs<0, 2>, SDTCisFP<0>, SDTCisInt<3>]>; 325 326def SDTFPUnaryOpRound : SDTypeProfile<1, 2, [ // fsqrt_round, fgetexp_round, etc. 327 SDTCisSameAs<0, 1>, SDTCisFP<0>, SDTCisInt<2>]>; 328 329def SDTFma : SDTypeProfile<1, 3, [SDTCisSameAs<0,1>, 330 SDTCisSameAs<1,2>, SDTCisSameAs<1,3>]>; 331def SDTFmaRound : SDTypeProfile<1, 4, [SDTCisSameAs<0,1>, 332 SDTCisSameAs<1,2>, SDTCisSameAs<1,3>, SDTCisInt<4>]>; 333def STDFp1SrcRm : SDTypeProfile<1, 2, [SDTCisSameAs<0,1>, 334 SDTCisVec<0>, SDTCisVT<2, i32>]>; 335def STDFp2SrcRm : SDTypeProfile<1, 3, [SDTCisSameAs<0,1>, 336 SDTCisVec<0>, SDTCisVT<3, i32>]>; 337def STDFp3SrcRm : SDTypeProfile<1, 4, [SDTCisSameAs<0,1>, 338 SDTCisVec<0>, SDTCisVT<3, i32>, SDTCisVT<4, i32>]>; 339 340def X86PAlignr : SDNode<"X86ISD::PALIGNR", SDTShuff3OpI>; 341def X86VAlign : SDNode<"X86ISD::VALIGN", SDTShuff3OpI>; 342 343def X86Abs : SDNode<"X86ISD::ABS", SDTIntUnaryOp>; 344def X86Conflict : SDNode<"X86ISD::CONFLICT", SDTIntUnaryOp>; 345 346def X86PShufd : SDNode<"X86ISD::PSHUFD", SDTShuff2OpI>; 347def X86PShufhw : SDNode<"X86ISD::PSHUFHW", SDTShuff2OpI>; 348def X86PShuflw : SDNode<"X86ISD::PSHUFLW", SDTShuff2OpI>; 349 350def X86Shufp : SDNode<"X86ISD::SHUFP", SDTShuff3OpI>; 351def X86Shuf128 : SDNode<"X86ISD::SHUF128", SDTShuff3OpI>; 352 353def X86Movddup : SDNode<"X86ISD::MOVDDUP", SDTShuff1Op>; 354def X86Movshdup : SDNode<"X86ISD::MOVSHDUP", SDTShuff1Op>; 355def X86Movsldup : SDNode<"X86ISD::MOVSLDUP", SDTShuff1Op>; 356 357def X86Movsd : SDNode<"X86ISD::MOVSD", SDTShuff2Op>; 358def X86Movss : SDNode<"X86ISD::MOVSS", SDTShuff2Op>; 359 360def X86Movlhps : SDNode<"X86ISD::MOVLHPS", SDTShuff2Op>; 361def X86Movlhpd : SDNode<"X86ISD::MOVLHPD", SDTShuff2Op>; 362def X86Movhlps : SDNode<"X86ISD::MOVHLPS", SDTShuff2Op>; 363 364def X86Movlps : SDNode<"X86ISD::MOVLPS", SDTShuff2Op>; 365def X86Movlpd : SDNode<"X86ISD::MOVLPD", SDTShuff2Op>; 366 367def SDTPack : SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisVec<1>, 368 SDTCisSameSizeAs<0,1>, 369 SDTCisSameAs<1,2>]>; 370def X86Packss : SDNode<"X86ISD::PACKSS", SDTPack>; 371def X86Packus : SDNode<"X86ISD::PACKUS", SDTPack>; 372 373def X86Unpckl : SDNode<"X86ISD::UNPCKL", SDTShuff2Op>; 374def X86Unpckh : SDNode<"X86ISD::UNPCKH", SDTShuff2Op>; 375 376def X86vpmaddubsw : SDNode<"X86ISD::VPMADDUBSW" , SDTPack>; 377def X86vpmaddwd : SDNode<"X86ISD::VPMADDWD" , SDTPack>; 378 379def X86VPermilpv : SDNode<"X86ISD::VPERMILPV", SDTShuff2OpM>; 380def X86VPermilpi : SDNode<"X86ISD::VPERMILPI", SDTShuff2OpI>; 381def X86VPermv : SDNode<"X86ISD::VPERMV", 382 SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisInt<1>, 383 SDTCisSameNumEltsAs<0,1>, 384 SDTCisSameSizeAs<0,1>, 385 SDTCisSameAs<0,2>]>>; 386def X86VPermi : SDNode<"X86ISD::VPERMI", SDTShuff2OpI>; 387def X86VPermt2 : SDNode<"X86ISD::VPERMV3", 388 SDTypeProfile<1, 3, [SDTCisVec<0>, 389 SDTCisSameAs<0,1>, SDTCisInt<2>, 390 SDTCisVec<2>, SDTCisSameNumEltsAs<0, 2>, 391 SDTCisSameSizeAs<0,2>, 392 SDTCisSameAs<0,3>]>, []>; 393 394def X86VPermi2X : SDNode<"X86ISD::VPERMIV3", 395 SDTypeProfile<1, 3, [SDTCisVec<0>, SDTCisInt<1>, 396 SDTCisVec<1>, SDTCisSameNumEltsAs<0, 1>, 397 SDTCisSameSizeAs<0,1>, 398 SDTCisSameAs<0,2>, 399 SDTCisSameAs<0,3>]>, []>; 400 401def X86vpternlog : SDNode<"X86ISD::VPTERNLOG", SDTTernlog>; 402 403def X86VPerm2x128 : SDNode<"X86ISD::VPERM2X128", SDTShuff3OpI>; 404 405def X86VFixupimm : SDNode<"X86ISD::VFIXUPIMM", SDTFPBinOpImmRound>; 406def X86VRange : SDNode<"X86ISD::VRANGE", SDTFPBinOpImmRound>; 407def X86VReduce : SDNode<"X86ISD::VREDUCE", SDTFPUnaryOpImmRound>; 408def X86VRndScale : SDNode<"X86ISD::VRNDSCALE", SDTFPUnaryOpImmRound>; 409def X86VGetMant : SDNode<"X86ISD::VGETMANT", SDTFPUnaryOpImmRound>; 410def X86Vfpclass : SDNode<"X86ISD::VFPCLASS", 411 SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCVecEltisVT<0, i1>, 412 SDTCisVec<1>, SDTCisFP<1>, 413 SDTCisSameNumEltsAs<0,1>, 414 SDTCisVT<2, i32>]>, []>; 415def X86Vfpclasss : SDNode<"X86ISD::VFPCLASSS", 416 SDTypeProfile<1, 2, [SDTCisVT<0, i1>, 417 SDTCisFP<1>, SDTCisVT<2, i32>]>,[]>; 418 419def X86SubVBroadcast : SDNode<"X86ISD::SUBV_BROADCAST", 420 SDTypeProfile<1, 1, [SDTCisVec<0>, SDTCisVec<1>, 421 SDTCisSubVecOfVec<1, 0>]>, []>; 422// SDTCisSubVecOfVec restriction cannot be applied for 128 bit version of VBROADCASTI32x2. 423def X86SubV32x2Broadcast : SDNode<"X86ISD::SUBV_BROADCAST", 424 SDTypeProfile<1, 1, [SDTCisVec<0>, 425 SDTCisSameAs<0,1>]>, []>; 426 427def X86VBroadcast : SDNode<"X86ISD::VBROADCAST", SDTVBroadcast>; 428def X86VBroadcastm : SDNode<"X86ISD::VBROADCASTM", SDTVBroadcastm>; 429def X86Vinsert : SDNode<"X86ISD::VINSERT", SDTypeProfile<1, 3, 430 [SDTCisSameAs<0, 1>, SDTCisEltOfVec<2, 1>, 431 SDTCisPtrTy<3>]>, []>; 432def X86Vextract : SDNode<"X86ISD::VEXTRACT", SDTypeProfile<1, 2, 433 [SDTCisEltOfVec<0, 1>, SDTCisVec<1>, 434 SDTCisPtrTy<2>]>, []>; 435 436def X86Blendi : SDNode<"X86ISD::BLENDI", SDTBlend>; 437 438def X86Addsub : SDNode<"X86ISD::ADDSUB", SDTFPBinOp>; 439 440def X86faddRnd : SDNode<"X86ISD::FADD_RND", SDTFPBinOpRound>; 441def X86fsubRnd : SDNode<"X86ISD::FSUB_RND", SDTFPBinOpRound>; 442def X86fmulRnd : SDNode<"X86ISD::FMUL_RND", SDTFPBinOpRound>; 443def X86fdivRnd : SDNode<"X86ISD::FDIV_RND", SDTFPBinOpRound>; 444def X86fmaxRnd : SDNode<"X86ISD::FMAX_RND", SDTFPBinOpRound>; 445def X86scalef : SDNode<"X86ISD::SCALEF", SDTFPBinOpRound>; 446def X86fminRnd : SDNode<"X86ISD::FMIN_RND", SDTFPBinOpRound>; 447def X86fsqrtRnd : SDNode<"X86ISD::FSQRT_RND", SDTFPUnaryOpRound>; 448def X86fsqrtRnds : SDNode<"X86ISD::FSQRT_RND", STDFp2SrcRm>; 449def X86fgetexpRnd : SDNode<"X86ISD::FGETEXP_RND", SDTFPUnaryOpRound>; 450def X86fgetexpRnds : SDNode<"X86ISD::FGETEXP_RND", STDFp2SrcRm>; 451 452def X86Fmadd : SDNode<"X86ISD::FMADD", SDTFma>; 453def X86Fnmadd : SDNode<"X86ISD::FNMADD", SDTFma>; 454def X86Fmsub : SDNode<"X86ISD::FMSUB", SDTFma>; 455def X86Fnmsub : SDNode<"X86ISD::FNMSUB", SDTFma>; 456def X86Fmaddsub : SDNode<"X86ISD::FMADDSUB", SDTFma>; 457def X86Fmsubadd : SDNode<"X86ISD::FMSUBADD", SDTFma>; 458 459def X86FmaddRnd : SDNode<"X86ISD::FMADD_RND", SDTFmaRound>; 460def X86FnmaddRnd : SDNode<"X86ISD::FNMADD_RND", SDTFmaRound>; 461def X86FmsubRnd : SDNode<"X86ISD::FMSUB_RND", SDTFmaRound>; 462def X86FnmsubRnd : SDNode<"X86ISD::FNMSUB_RND", SDTFmaRound>; 463def X86FmaddsubRnd : SDNode<"X86ISD::FMADDSUB_RND", SDTFmaRound>; 464def X86FmsubaddRnd : SDNode<"X86ISD::FMSUBADD_RND", SDTFmaRound>; 465 466def X86rsqrt28 : SDNode<"X86ISD::RSQRT28", STDFp1SrcRm>; 467def X86rcp28 : SDNode<"X86ISD::RCP28", STDFp1SrcRm>; 468def X86exp2 : SDNode<"X86ISD::EXP2", STDFp1SrcRm>; 469 470def X86rsqrt28s : SDNode<"X86ISD::RSQRT28", STDFp2SrcRm>; 471def X86rcp28s : SDNode<"X86ISD::RCP28", STDFp2SrcRm>; 472def X86RndScales : SDNode<"X86ISD::VRNDSCALE", STDFp3SrcRm>; 473def X86Reduces : SDNode<"X86ISD::VREDUCE", STDFp3SrcRm>; 474def X86GetMants : SDNode<"X86ISD::VGETMANT", STDFp3SrcRm>; 475 476def SDT_PCMPISTRI : SDTypeProfile<2, 3, [SDTCisVT<0, i32>, SDTCisVT<1, i32>, 477 SDTCisVT<2, v16i8>, SDTCisVT<3, v16i8>, 478 SDTCisVT<4, i8>]>; 479def SDT_PCMPESTRI : SDTypeProfile<2, 5, [SDTCisVT<0, i32>, SDTCisVT<1, i32>, 480 SDTCisVT<2, v16i8>, SDTCisVT<3, i32>, 481 SDTCisVT<4, v16i8>, SDTCisVT<5, i32>, 482 SDTCisVT<6, i8>]>; 483 484def X86pcmpistri : SDNode<"X86ISD::PCMPISTRI", SDT_PCMPISTRI>; 485def X86pcmpestri : SDNode<"X86ISD::PCMPESTRI", SDT_PCMPESTRI>; 486 487def X86compress: SDNode<"X86ISD::COMPRESS", SDTypeProfile<1, 1, 488 [SDTCisSameAs<0, 1>, SDTCisVec<1>]>, []>; 489def X86expand : SDNode<"X86ISD::EXPAND", SDTypeProfile<1, 1, 490 [SDTCisSameAs<0, 1>, SDTCisVec<1>]>, []>; 491 492def SDTintToFPRound: SDTypeProfile<1, 3, [SDTCisVec<0>, SDTCisFP<0>, 493 SDTCisSameAs<0,1>, SDTCisInt<2>, 494 SDTCisVT<3, i32>]>; 495 496def SDTDoubleToInt: SDTypeProfile<1, 1, [SDTCisVec<0>, SDTCisVec<1>, 497 SDTCisInt<0>, SDTCVecEltisVT<1, f64>]>; 498def SDTFloatToInt: SDTypeProfile<1, 1, [SDTCisVec<0>, SDTCisVec<1>, 499 SDTCisInt<0>, SDTCVecEltisVT<1, f32>]>; 500 501def SDTDoubleToIntRnd: SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisVec<1>, 502 SDTCisInt<0>, SDTCVecEltisVT<1, f64>]>; 503def SDTSDoubleToIntRnd: SDTypeProfile<1, 2, [SDTCisInt<0>,SDTCisFP<1>, 504 SDTCVecEltisVT<1, f64>, SDTCisInt<2>]>; 505def SDTFloatToIntRnd: SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisVec<1>, 506 SDTCisInt<0>, SDTCVecEltisVT<1, f32>]>; 507def SDTSFloatToIntRnd: SDTypeProfile<1, 2, [SDTCisInt<0>, SDTCisFP<1>, 508 SDTCVecEltisVT<1, f32>, SDTCisInt<2>]>; 509def SDTVintToFPRound: SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisVec<1>, 510 SDTCisFP<0>, SDTCVecEltisVT<1, i32>, 511 SDTCisInt<2>]>; 512def SDTVlongToFPRound: SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisVec<1>, 513 SDTCisFP<0>, SDTCVecEltisVT<1, i64>, 514 SDTCisInt<2>]>; 515 516def SDTVFPToIntRound: SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisVec<1>, 517 SDTCisFP<1>, SDTCVecEltisVT<0, i32>, 518 SDTCisInt<2>]>; 519def SDTVFPToLongRound: SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisVec<1>, 520 SDTCisFP<1>, SDTCVecEltisVT<0, i64>, 521 SDTCisInt<2>]>; 522 523// Scalar 524def X86SintToFpRnd : SDNode<"X86ISD::SINT_TO_FP_RND", SDTintToFPRound>; 525def X86UintToFpRnd : SDNode<"X86ISD::UINT_TO_FP_RND", SDTintToFPRound>; 526 527def X86cvttss2IntRnd : SDNode<"X86ISD::FP_TO_SINT_RND", SDTSFloatToIntRnd>; 528def X86cvttss2UIntRnd : SDNode<"X86ISD::FP_TO_UINT_RND", SDTSFloatToIntRnd>; 529def X86cvttsd2IntRnd : SDNode<"X86ISD::FP_TO_SINT_RND", SDTSDoubleToIntRnd>; 530def X86cvttsd2UIntRnd : SDNode<"X86ISD::FP_TO_UINT_RND", SDTSDoubleToIntRnd>; 531// Vector with rounding mode 532 533// cvtt fp-to-int staff 534def X86VFpToSintRnd : SDNode<"ISD::FP_TO_SINT", SDTVFPToIntRound>; 535def X86VFpToUintRnd : SDNode<"ISD::FP_TO_UINT", SDTVFPToIntRound>; 536def X86VFpToSlongRnd : SDNode<"ISD::FP_TO_SINT", SDTVFPToLongRound>; 537def X86VFpToUlongRnd : SDNode<"ISD::FP_TO_UINT", SDTVFPToLongRound>; 538 539def X86VSintToFpRnd : SDNode<"ISD::SINT_TO_FP", SDTVintToFPRound>; 540def X86VUintToFpRnd : SDNode<"ISD::UINT_TO_FP", SDTVintToFPRound>; 541def X86VSlongToFpRnd : SDNode<"ISD::SINT_TO_FP", SDTVlongToFPRound>; 542def X86VUlongToFpRnd : SDNode<"ISD::UINT_TO_FP", SDTVlongToFPRound>; 543 544// cvt fp-to-int staff 545def X86cvtps2IntRnd : SDNode<"X86ISD::FP_TO_SINT_RND", SDTFloatToIntRnd>; 546def X86cvtps2UIntRnd : SDNode<"X86ISD::FP_TO_UINT_RND", SDTFloatToIntRnd>; 547def X86cvtpd2IntRnd : SDNode<"X86ISD::FP_TO_SINT_RND", SDTDoubleToIntRnd>; 548def X86cvtpd2UIntRnd : SDNode<"X86ISD::FP_TO_UINT_RND", SDTDoubleToIntRnd>; 549 550// Vector without rounding mode 551def X86cvtps2Int : SDNode<"X86ISD::FP_TO_SINT_RND", SDTFloatToInt>; 552def X86cvtps2UInt : SDNode<"X86ISD::FP_TO_UINT_RND", SDTFloatToInt>; 553def X86cvtpd2Int : SDNode<"X86ISD::FP_TO_SINT_RND", SDTDoubleToInt>; 554def X86cvtpd2UInt : SDNode<"X86ISD::FP_TO_UINT_RND", SDTDoubleToInt>; 555 556def X86cvtph2ps : SDNode<"ISD::FP16_TO_FP", 557 SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisVec<1>, 558 SDTCVecEltisVT<0, f32>, 559 SDTCVecEltisVT<1, i16>, 560 SDTCisFP<0>, 561 SDTCisVT<2, i32>]> >; 562 563def X86cvtps2ph : SDNode<"ISD::FP_TO_FP16", 564 SDTypeProfile<1, 3, [SDTCisVec<0>, SDTCisVec<1>, 565 SDTCVecEltisVT<0, i16>, 566 SDTCVecEltisVT<1, f32>, 567 SDTCisFP<1>, SDTCisVT<2, i32>, 568 SDTCisVT<3, i32>]> >; 569def X86vfpextRnd : SDNode<"X86ISD::VFPEXT", 570 SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisVec<1>, 571 SDTCisFP<0>, SDTCisFP<1>, 572 SDTCVecEltisVT<0, f64>, 573 SDTCVecEltisVT<1, f32>, 574 SDTCisOpSmallerThanOp<1, 0>, 575 SDTCisVT<2, i32>]>>; 576def X86vfproundRnd: SDNode<"X86ISD::VFPROUND", 577 SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisVec<1>, 578 SDTCisFP<0>, SDTCisFP<1>, 579 SDTCVecEltisVT<0, f32>, 580 SDTCVecEltisVT<1, f64>, 581 SDTCisOpSmallerThanOp<0, 1>, 582 SDTCisVT<2, i32>]>>; 583 584//===----------------------------------------------------------------------===// 585// SSE Complex Patterns 586//===----------------------------------------------------------------------===// 587 588// These are 'extloads' from a scalar to the low element of a vector, zeroing 589// the top elements. These are used for the SSE 'ss' and 'sd' instruction 590// forms. 591def sse_load_f32 : ComplexPattern<v4f32, 5, "selectScalarSSELoad", [], 592 [SDNPHasChain, SDNPMayLoad, SDNPMemOperand, 593 SDNPWantRoot]>; 594def sse_load_f64 : ComplexPattern<v2f64, 5, "selectScalarSSELoad", [], 595 [SDNPHasChain, SDNPMayLoad, SDNPMemOperand, 596 SDNPWantRoot]>; 597 598def ssmem : Operand<v4f32> { 599 let PrintMethod = "printf32mem"; 600 let MIOperandInfo = (ops ptr_rc, i8imm, ptr_rc_nosp, i32imm, i8imm); 601 let ParserMatchClass = X86Mem32AsmOperand; 602 let OperandType = "OPERAND_MEMORY"; 603} 604def sdmem : Operand<v2f64> { 605 let PrintMethod = "printf64mem"; 606 let MIOperandInfo = (ops ptr_rc, i8imm, ptr_rc_nosp, i32imm, i8imm); 607 let ParserMatchClass = X86Mem64AsmOperand; 608 let OperandType = "OPERAND_MEMORY"; 609} 610 611//===----------------------------------------------------------------------===// 612// SSE pattern fragments 613//===----------------------------------------------------------------------===// 614 615// 128-bit load pattern fragments 616// NOTE: all 128-bit integer vector loads are promoted to v2i64 617def loadv4f32 : PatFrag<(ops node:$ptr), (v4f32 (load node:$ptr))>; 618def loadv2f64 : PatFrag<(ops node:$ptr), (v2f64 (load node:$ptr))>; 619def loadv2i64 : PatFrag<(ops node:$ptr), (v2i64 (load node:$ptr))>; 620 621// 256-bit load pattern fragments 622// NOTE: all 256-bit integer vector loads are promoted to v4i64 623def loadv8f32 : PatFrag<(ops node:$ptr), (v8f32 (load node:$ptr))>; 624def loadv4f64 : PatFrag<(ops node:$ptr), (v4f64 (load node:$ptr))>; 625def loadv4i64 : PatFrag<(ops node:$ptr), (v4i64 (load node:$ptr))>; 626 627// 512-bit load pattern fragments 628def loadv16f32 : PatFrag<(ops node:$ptr), (v16f32 (load node:$ptr))>; 629def loadv8f64 : PatFrag<(ops node:$ptr), (v8f64 (load node:$ptr))>; 630def loadv64i8 : PatFrag<(ops node:$ptr), (v64i8 (load node:$ptr))>; 631def loadv32i16 : PatFrag<(ops node:$ptr), (v32i16 (load node:$ptr))>; 632def loadv16i32 : PatFrag<(ops node:$ptr), (v16i32 (load node:$ptr))>; 633def loadv8i64 : PatFrag<(ops node:$ptr), (v8i64 (load node:$ptr))>; 634 635// 128-/256-/512-bit extload pattern fragments 636def extloadv2f32 : PatFrag<(ops node:$ptr), (v2f64 (extloadvf32 node:$ptr))>; 637def extloadv4f32 : PatFrag<(ops node:$ptr), (v4f64 (extloadvf32 node:$ptr))>; 638def extloadv8f32 : PatFrag<(ops node:$ptr), (v8f64 (extloadvf32 node:$ptr))>; 639 640// These are needed to match a scalar load that is used in a vector-only 641// math instruction such as the FP logical ops: andps, andnps, orps, xorps. 642// The memory operand is required to be a 128-bit load, so it must be converted 643// from a vector to a scalar. 644def loadf32_128 : PatFrag<(ops node:$ptr), 645 (f32 (extractelt (loadv4f32 node:$ptr), (iPTR 0)))>; 646def loadf64_128 : PatFrag<(ops node:$ptr), 647 (f64 (extractelt (loadv2f64 node:$ptr), (iPTR 0)))>; 648 649// Like 'store', but always requires 128-bit vector alignment. 650def alignedstore : PatFrag<(ops node:$val, node:$ptr), 651 (store node:$val, node:$ptr), [{ 652 return cast<StoreSDNode>(N)->getAlignment() >= 16; 653}]>; 654 655// Like 'store', but always requires 256-bit vector alignment. 656def alignedstore256 : PatFrag<(ops node:$val, node:$ptr), 657 (store node:$val, node:$ptr), [{ 658 return cast<StoreSDNode>(N)->getAlignment() >= 32; 659}]>; 660 661// Like 'store', but always requires 512-bit vector alignment. 662def alignedstore512 : PatFrag<(ops node:$val, node:$ptr), 663 (store node:$val, node:$ptr), [{ 664 return cast<StoreSDNode>(N)->getAlignment() >= 64; 665}]>; 666 667// Like 'load', but always requires 128-bit vector alignment. 668def alignedload : PatFrag<(ops node:$ptr), (load node:$ptr), [{ 669 return cast<LoadSDNode>(N)->getAlignment() >= 16; 670}]>; 671 672// Like 'X86vzload', but always requires 128-bit vector alignment. 673def alignedX86vzload : PatFrag<(ops node:$ptr), (X86vzload node:$ptr), [{ 674 return cast<MemSDNode>(N)->getAlignment() >= 16; 675}]>; 676 677// Like 'load', but always requires 256-bit vector alignment. 678def alignedload256 : PatFrag<(ops node:$ptr), (load node:$ptr), [{ 679 return cast<LoadSDNode>(N)->getAlignment() >= 32; 680}]>; 681 682// Like 'load', but always requires 512-bit vector alignment. 683def alignedload512 : PatFrag<(ops node:$ptr), (load node:$ptr), [{ 684 return cast<LoadSDNode>(N)->getAlignment() >= 64; 685}]>; 686 687def alignedloadfsf32 : PatFrag<(ops node:$ptr), 688 (f32 (alignedload node:$ptr))>; 689def alignedloadfsf64 : PatFrag<(ops node:$ptr), 690 (f64 (alignedload node:$ptr))>; 691 692// 128-bit aligned load pattern fragments 693// NOTE: all 128-bit integer vector loads are promoted to v2i64 694def alignedloadv4f32 : PatFrag<(ops node:$ptr), 695 (v4f32 (alignedload node:$ptr))>; 696def alignedloadv2f64 : PatFrag<(ops node:$ptr), 697 (v2f64 (alignedload node:$ptr))>; 698def alignedloadv2i64 : PatFrag<(ops node:$ptr), 699 (v2i64 (alignedload node:$ptr))>; 700 701// 256-bit aligned load pattern fragments 702// NOTE: all 256-bit integer vector loads are promoted to v4i64 703def alignedloadv8f32 : PatFrag<(ops node:$ptr), 704 (v8f32 (alignedload256 node:$ptr))>; 705def alignedloadv4f64 : PatFrag<(ops node:$ptr), 706 (v4f64 (alignedload256 node:$ptr))>; 707def alignedloadv4i64 : PatFrag<(ops node:$ptr), 708 (v4i64 (alignedload256 node:$ptr))>; 709 710// 512-bit aligned load pattern fragments 711def alignedloadv16f32 : PatFrag<(ops node:$ptr), 712 (v16f32 (alignedload512 node:$ptr))>; 713def alignedloadv16i32 : PatFrag<(ops node:$ptr), 714 (v16i32 (alignedload512 node:$ptr))>; 715def alignedloadv8f64 : PatFrag<(ops node:$ptr), 716 (v8f64 (alignedload512 node:$ptr))>; 717def alignedloadv8i64 : PatFrag<(ops node:$ptr), 718 (v8i64 (alignedload512 node:$ptr))>; 719 720// Like 'load', but uses special alignment checks suitable for use in 721// memory operands in most SSE instructions, which are required to 722// be naturally aligned on some targets but not on others. If the subtarget 723// allows unaligned accesses, match any load, though this may require 724// setting a feature bit in the processor (on startup, for example). 725// Opteron 10h and later implement such a feature. 726def memop : PatFrag<(ops node:$ptr), (load node:$ptr), [{ 727 return Subtarget->hasSSEUnalignedMem() 728 || cast<LoadSDNode>(N)->getAlignment() >= 16; 729}]>; 730 731def memopfsf32 : PatFrag<(ops node:$ptr), (f32 (memop node:$ptr))>; 732def memopfsf64 : PatFrag<(ops node:$ptr), (f64 (memop node:$ptr))>; 733 734// 128-bit memop pattern fragments 735// NOTE: all 128-bit integer vector loads are promoted to v2i64 736def memopv4f32 : PatFrag<(ops node:$ptr), (v4f32 (memop node:$ptr))>; 737def memopv2f64 : PatFrag<(ops node:$ptr), (v2f64 (memop node:$ptr))>; 738def memopv2i64 : PatFrag<(ops node:$ptr), (v2i64 (memop node:$ptr))>; 739 740// These are needed to match a scalar memop that is used in a vector-only 741// math instruction such as the FP logical ops: andps, andnps, orps, xorps. 742// The memory operand is required to be a 128-bit load, so it must be converted 743// from a vector to a scalar. 744def memopfsf32_128 : PatFrag<(ops node:$ptr), 745 (f32 (extractelt (memopv4f32 node:$ptr), (iPTR 0)))>; 746def memopfsf64_128 : PatFrag<(ops node:$ptr), 747 (f64 (extractelt (memopv2f64 node:$ptr), (iPTR 0)))>; 748 749 750// SSSE3 uses MMX registers for some instructions. They aren't aligned on a 751// 16-byte boundary. 752// FIXME: 8 byte alignment for mmx reads is not required 753def memop64 : PatFrag<(ops node:$ptr), (load node:$ptr), [{ 754 return cast<LoadSDNode>(N)->getAlignment() >= 8; 755}]>; 756 757def memopmmx : PatFrag<(ops node:$ptr), (x86mmx (memop64 node:$ptr))>; 758 759def mgatherv4i32 : PatFrag<(ops node:$src1, node:$src2, node:$src3), 760 (masked_gather node:$src1, node:$src2, node:$src3) , [{ 761 if (MaskedGatherSDNode *Mgt = dyn_cast<MaskedGatherSDNode>(N)) 762 return (Mgt->getIndex().getValueType() == MVT::v4i32 || 763 Mgt->getBasePtr().getValueType() == MVT::v4i32); 764 return false; 765}]>; 766 767def mgatherv8i32 : PatFrag<(ops node:$src1, node:$src2, node:$src3), 768 (masked_gather node:$src1, node:$src2, node:$src3) , [{ 769 if (MaskedGatherSDNode *Mgt = dyn_cast<MaskedGatherSDNode>(N)) 770 return (Mgt->getIndex().getValueType() == MVT::v8i32 || 771 Mgt->getBasePtr().getValueType() == MVT::v8i32); 772 return false; 773}]>; 774 775def mgatherv2i64 : PatFrag<(ops node:$src1, node:$src2, node:$src3), 776 (masked_gather node:$src1, node:$src2, node:$src3) , [{ 777 if (MaskedGatherSDNode *Mgt = dyn_cast<MaskedGatherSDNode>(N)) 778 return (Mgt->getIndex().getValueType() == MVT::v2i64 || 779 Mgt->getBasePtr().getValueType() == MVT::v2i64); 780 return false; 781}]>; 782def mgatherv4i64 : PatFrag<(ops node:$src1, node:$src2, node:$src3), 783 (masked_gather node:$src1, node:$src2, node:$src3) , [{ 784 if (MaskedGatherSDNode *Mgt = dyn_cast<MaskedGatherSDNode>(N)) 785 return (Mgt->getIndex().getValueType() == MVT::v4i64 || 786 Mgt->getBasePtr().getValueType() == MVT::v4i64); 787 return false; 788}]>; 789def mgatherv8i64 : PatFrag<(ops node:$src1, node:$src2, node:$src3), 790 (masked_gather node:$src1, node:$src2, node:$src3) , [{ 791 if (MaskedGatherSDNode *Mgt = dyn_cast<MaskedGatherSDNode>(N)) 792 return (Mgt->getIndex().getValueType() == MVT::v8i64 || 793 Mgt->getBasePtr().getValueType() == MVT::v8i64); 794 return false; 795}]>; 796def mgatherv16i32 : PatFrag<(ops node:$src1, node:$src2, node:$src3), 797 (masked_gather node:$src1, node:$src2, node:$src3) , [{ 798 if (MaskedGatherSDNode *Mgt = dyn_cast<MaskedGatherSDNode>(N)) 799 return (Mgt->getIndex().getValueType() == MVT::v16i32 || 800 Mgt->getBasePtr().getValueType() == MVT::v16i32); 801 return false; 802}]>; 803 804def mscatterv2i64 : PatFrag<(ops node:$src1, node:$src2, node:$src3), 805 (masked_scatter node:$src1, node:$src2, node:$src3) , [{ 806 if (MaskedScatterSDNode *Sc = dyn_cast<MaskedScatterSDNode>(N)) 807 return (Sc->getIndex().getValueType() == MVT::v2i64 || 808 Sc->getBasePtr().getValueType() == MVT::v2i64); 809 return false; 810}]>; 811 812def mscatterv4i32 : PatFrag<(ops node:$src1, node:$src2, node:$src3), 813 (masked_scatter node:$src1, node:$src2, node:$src3) , [{ 814 if (MaskedScatterSDNode *Sc = dyn_cast<MaskedScatterSDNode>(N)) 815 return (Sc->getIndex().getValueType() == MVT::v4i32 || 816 Sc->getBasePtr().getValueType() == MVT::v4i32); 817 return false; 818}]>; 819 820def mscatterv4i64 : PatFrag<(ops node:$src1, node:$src2, node:$src3), 821 (masked_scatter node:$src1, node:$src2, node:$src3) , [{ 822 if (MaskedScatterSDNode *Sc = dyn_cast<MaskedScatterSDNode>(N)) 823 return (Sc->getIndex().getValueType() == MVT::v4i64 || 824 Sc->getBasePtr().getValueType() == MVT::v4i64); 825 return false; 826}]>; 827 828def mscatterv8i32 : PatFrag<(ops node:$src1, node:$src2, node:$src3), 829 (masked_scatter node:$src1, node:$src2, node:$src3) , [{ 830 if (MaskedScatterSDNode *Sc = dyn_cast<MaskedScatterSDNode>(N)) 831 return (Sc->getIndex().getValueType() == MVT::v8i32 || 832 Sc->getBasePtr().getValueType() == MVT::v8i32); 833 return false; 834}]>; 835 836def mscatterv8i64 : PatFrag<(ops node:$src1, node:$src2, node:$src3), 837 (masked_scatter node:$src1, node:$src2, node:$src3) , [{ 838 if (MaskedScatterSDNode *Sc = dyn_cast<MaskedScatterSDNode>(N)) 839 return (Sc->getIndex().getValueType() == MVT::v8i64 || 840 Sc->getBasePtr().getValueType() == MVT::v8i64); 841 return false; 842}]>; 843def mscatterv16i32 : PatFrag<(ops node:$src1, node:$src2, node:$src3), 844 (masked_scatter node:$src1, node:$src2, node:$src3) , [{ 845 if (MaskedScatterSDNode *Sc = dyn_cast<MaskedScatterSDNode>(N)) 846 return (Sc->getIndex().getValueType() == MVT::v16i32 || 847 Sc->getBasePtr().getValueType() == MVT::v16i32); 848 return false; 849}]>; 850 851// 128-bit bitconvert pattern fragments 852def bc_v4f32 : PatFrag<(ops node:$in), (v4f32 (bitconvert node:$in))>; 853def bc_v2f64 : PatFrag<(ops node:$in), (v2f64 (bitconvert node:$in))>; 854def bc_v16i8 : PatFrag<(ops node:$in), (v16i8 (bitconvert node:$in))>; 855def bc_v8i16 : PatFrag<(ops node:$in), (v8i16 (bitconvert node:$in))>; 856def bc_v4i32 : PatFrag<(ops node:$in), (v4i32 (bitconvert node:$in))>; 857def bc_v2i64 : PatFrag<(ops node:$in), (v2i64 (bitconvert node:$in))>; 858 859// 256-bit bitconvert pattern fragments 860def bc_v32i8 : PatFrag<(ops node:$in), (v32i8 (bitconvert node:$in))>; 861def bc_v16i16 : PatFrag<(ops node:$in), (v16i16 (bitconvert node:$in))>; 862def bc_v8i32 : PatFrag<(ops node:$in), (v8i32 (bitconvert node:$in))>; 863def bc_v4i64 : PatFrag<(ops node:$in), (v4i64 (bitconvert node:$in))>; 864def bc_v8f32 : PatFrag<(ops node:$in), (v8f32 (bitconvert node:$in))>; 865 866// 512-bit bitconvert pattern fragments 867def bc_v16i32 : PatFrag<(ops node:$in), (v16i32 (bitconvert node:$in))>; 868def bc_v8i64 : PatFrag<(ops node:$in), (v8i64 (bitconvert node:$in))>; 869def bc_v8f64 : PatFrag<(ops node:$in), (v8f64 (bitconvert node:$in))>; 870def bc_v16f32 : PatFrag<(ops node:$in), (v16f32 (bitconvert node:$in))>; 871 872def vzmovl_v2i64 : PatFrag<(ops node:$src), 873 (bitconvert (v2i64 (X86vzmovl 874 (v2i64 (scalar_to_vector (loadi64 node:$src))))))>; 875def vzmovl_v4i32 : PatFrag<(ops node:$src), 876 (bitconvert (v4i32 (X86vzmovl 877 (v4i32 (scalar_to_vector (loadi32 node:$src))))))>; 878 879def vzload_v2i64 : PatFrag<(ops node:$src), 880 (bitconvert (v2i64 (X86vzload node:$src)))>; 881 882 883def fp32imm0 : PatLeaf<(f32 fpimm), [{ 884 return N->isExactlyValue(+0.0); 885}]>; 886 887def I8Imm : SDNodeXForm<imm, [{ 888 // Transformation function: get the low 8 bits. 889 return getI8Imm((uint8_t)N->getZExtValue(), SDLoc(N)); 890}]>; 891 892def FROUND_NO_EXC : ImmLeaf<i32, [{ return Imm == 8; }]>; 893def FROUND_CURRENT : ImmLeaf<i32, [{ 894 return Imm == X86::STATIC_ROUNDING::CUR_DIRECTION; 895}]>; 896 897// BYTE_imm - Transform bit immediates into byte immediates. 898def BYTE_imm : SDNodeXForm<imm, [{ 899 // Transformation function: imm >> 3 900 return getI32Imm(N->getZExtValue() >> 3, SDLoc(N)); 901}]>; 902 903// EXTRACT_get_vextract128_imm xform function: convert extract_subvector index 904// to VEXTRACTF128/VEXTRACTI128 imm. 905def EXTRACT_get_vextract128_imm : SDNodeXForm<extract_subvector, [{ 906 return getI8Imm(X86::getExtractVEXTRACT128Immediate(N), SDLoc(N)); 907}]>; 908 909// INSERT_get_vinsert128_imm xform function: convert insert_subvector index to 910// VINSERTF128/VINSERTI128 imm. 911def INSERT_get_vinsert128_imm : SDNodeXForm<insert_subvector, [{ 912 return getI8Imm(X86::getInsertVINSERT128Immediate(N), SDLoc(N)); 913}]>; 914 915// EXTRACT_get_vextract256_imm xform function: convert extract_subvector index 916// to VEXTRACTF64x4 imm. 917def EXTRACT_get_vextract256_imm : SDNodeXForm<extract_subvector, [{ 918 return getI8Imm(X86::getExtractVEXTRACT256Immediate(N), SDLoc(N)); 919}]>; 920 921// INSERT_get_vinsert256_imm xform function: convert insert_subvector index to 922// VINSERTF64x4 imm. 923def INSERT_get_vinsert256_imm : SDNodeXForm<insert_subvector, [{ 924 return getI8Imm(X86::getInsertVINSERT256Immediate(N), SDLoc(N)); 925}]>; 926 927def vextract128_extract : PatFrag<(ops node:$bigvec, node:$index), 928 (extract_subvector node:$bigvec, 929 node:$index), [{ 930 return X86::isVEXTRACT128Index(N); 931}], EXTRACT_get_vextract128_imm>; 932 933def vinsert128_insert : PatFrag<(ops node:$bigvec, node:$smallvec, 934 node:$index), 935 (insert_subvector node:$bigvec, node:$smallvec, 936 node:$index), [{ 937 return X86::isVINSERT128Index(N); 938}], INSERT_get_vinsert128_imm>; 939 940 941def vextract256_extract : PatFrag<(ops node:$bigvec, node:$index), 942 (extract_subvector node:$bigvec, 943 node:$index), [{ 944 return X86::isVEXTRACT256Index(N); 945}], EXTRACT_get_vextract256_imm>; 946 947def vinsert256_insert : PatFrag<(ops node:$bigvec, node:$smallvec, 948 node:$index), 949 (insert_subvector node:$bigvec, node:$smallvec, 950 node:$index), [{ 951 return X86::isVINSERT256Index(N); 952}], INSERT_get_vinsert256_imm>; 953 954def masked_load_aligned128 : PatFrag<(ops node:$src1, node:$src2, node:$src3), 955 (masked_load node:$src1, node:$src2, node:$src3), [{ 956 if (auto *Load = dyn_cast<MaskedLoadSDNode>(N)) 957 return Load->getAlignment() >= 16; 958 return false; 959}]>; 960 961def masked_load_aligned256 : PatFrag<(ops node:$src1, node:$src2, node:$src3), 962 (masked_load node:$src1, node:$src2, node:$src3), [{ 963 if (auto *Load = dyn_cast<MaskedLoadSDNode>(N)) 964 return Load->getAlignment() >= 32; 965 return false; 966}]>; 967 968def masked_load_aligned512 : PatFrag<(ops node:$src1, node:$src2, node:$src3), 969 (masked_load node:$src1, node:$src2, node:$src3), [{ 970 if (auto *Load = dyn_cast<MaskedLoadSDNode>(N)) 971 return Load->getAlignment() >= 64; 972 return false; 973}]>; 974 975def masked_load_unaligned : PatFrag<(ops node:$src1, node:$src2, node:$src3), 976 (masked_load node:$src1, node:$src2, node:$src3), [{ 977 return isa<MaskedLoadSDNode>(N); 978}]>; 979 980// masked store fragments. 981// X86mstore can't be implemented in core DAG files because some targets 982// doesn't support vector type ( llvm-tblgen will fail) 983def X86mstore : PatFrag<(ops node:$src1, node:$src2, node:$src3), 984 (masked_store node:$src1, node:$src2, node:$src3), [{ 985 return !cast<MaskedStoreSDNode>(N)->isTruncatingStore(); 986}]>; 987 988def masked_store_aligned128 : PatFrag<(ops node:$src1, node:$src2, node:$src3), 989 (X86mstore node:$src1, node:$src2, node:$src3), [{ 990 if (auto *Store = dyn_cast<MaskedStoreSDNode>(N)) 991 return Store->getAlignment() >= 16; 992 return false; 993}]>; 994 995def masked_store_aligned256 : PatFrag<(ops node:$src1, node:$src2, node:$src3), 996 (X86mstore node:$src1, node:$src2, node:$src3), [{ 997 if (auto *Store = dyn_cast<MaskedStoreSDNode>(N)) 998 return Store->getAlignment() >= 32; 999 return false; 1000}]>; 1001 1002def masked_store_aligned512 : PatFrag<(ops node:$src1, node:$src2, node:$src3), 1003 (X86mstore node:$src1, node:$src2, node:$src3), [{ 1004 if (auto *Store = dyn_cast<MaskedStoreSDNode>(N)) 1005 return Store->getAlignment() >= 64; 1006 return false; 1007}]>; 1008 1009def masked_store_unaligned : PatFrag<(ops node:$src1, node:$src2, node:$src3), 1010 (X86mstore node:$src1, node:$src2, node:$src3), [{ 1011 return isa<MaskedStoreSDNode>(N); 1012}]>; 1013 1014// masked truncstore fragments 1015// X86mtruncstore can't be implemented in core DAG files because some targets 1016// doesn't support vector type ( llvm-tblgen will fail) 1017def X86mtruncstore : PatFrag<(ops node:$src1, node:$src2, node:$src3), 1018 (masked_store node:$src1, node:$src2, node:$src3), [{ 1019 return cast<MaskedStoreSDNode>(N)->isTruncatingStore(); 1020}]>; 1021def masked_truncstorevi8 : 1022 PatFrag<(ops node:$src1, node:$src2, node:$src3), 1023 (X86mtruncstore node:$src1, node:$src2, node:$src3), [{ 1024 return cast<MaskedStoreSDNode>(N)->getMemoryVT().getScalarType() == MVT::i8; 1025}]>; 1026def masked_truncstorevi16 : 1027 PatFrag<(ops node:$src1, node:$src2, node:$src3), 1028 (X86mtruncstore node:$src1, node:$src2, node:$src3), [{ 1029 return cast<MaskedStoreSDNode>(N)->getMemoryVT().getScalarType() == MVT::i16; 1030}]>; 1031def masked_truncstorevi32 : 1032 PatFrag<(ops node:$src1, node:$src2, node:$src3), 1033 (X86mtruncstore node:$src1, node:$src2, node:$src3), [{ 1034 return cast<MaskedStoreSDNode>(N)->getMemoryVT().getScalarType() == MVT::i32; 1035}]>; 1036