1//===-- X86InstrFragmentsSIMD.td - x86 SIMD ISA ------------*- tablegen -*-===// 2// 3// The LLVM Compiler Infrastructure 4// 5// This file is distributed under the University of Illinois Open Source 6// License. See LICENSE.TXT for details. 7// 8//===----------------------------------------------------------------------===// 9// 10// This file provides pattern fragments useful for SIMD instructions. 11// 12//===----------------------------------------------------------------------===// 13 14//===----------------------------------------------------------------------===// 15// MMX specific DAG Nodes. 16//===----------------------------------------------------------------------===// 17 18// Low word of MMX to GPR. 19def MMX_X86movd2w : SDNode<"X86ISD::MMX_MOVD2W", SDTypeProfile<1, 1, 20 [SDTCisVT<0, i32>, SDTCisVT<1, x86mmx>]>>; 21// GPR to low word of MMX. 22def MMX_X86movw2d : SDNode<"X86ISD::MMX_MOVW2D", SDTypeProfile<1, 1, 23 [SDTCisVT<0, x86mmx>, SDTCisVT<1, i32>]>>; 24 25//===----------------------------------------------------------------------===// 26// MMX Pattern Fragments 27//===----------------------------------------------------------------------===// 28 29def load_mmx : PatFrag<(ops node:$ptr), (x86mmx (load node:$ptr))>; 30 31//===----------------------------------------------------------------------===// 32// SSE specific DAG Nodes. 33//===----------------------------------------------------------------------===// 34 35def SDTX86VFCMP : SDTypeProfile<1, 3, [SDTCisFP<0>, SDTCisVec<0>, 36 SDTCisSameAs<0, 1>, SDTCisSameAs<1, 2>, 37 SDTCisVT<3, i8>]>; 38 39def X86fmin : SDNode<"X86ISD::FMIN", SDTFPBinOp>; 40def X86fmax : SDNode<"X86ISD::FMAX", SDTFPBinOp>; 41def X86fmins : SDNode<"X86ISD::FMINS", SDTFPBinOp>; 42def X86fmaxs : SDNode<"X86ISD::FMAXS", SDTFPBinOp>; 43 44// Commutative and Associative FMIN and FMAX. 45def X86fminc : SDNode<"X86ISD::FMINC", SDTFPBinOp, 46 [SDNPCommutative, SDNPAssociative]>; 47def X86fmaxc : SDNode<"X86ISD::FMAXC", SDTFPBinOp, 48 [SDNPCommutative, SDNPAssociative]>; 49 50def X86fand : SDNode<"X86ISD::FAND", SDTFPBinOp, 51 [SDNPCommutative, SDNPAssociative]>; 52def X86for : SDNode<"X86ISD::FOR", SDTFPBinOp, 53 [SDNPCommutative, SDNPAssociative]>; 54def X86fxor : SDNode<"X86ISD::FXOR", SDTFPBinOp, 55 [SDNPCommutative, SDNPAssociative]>; 56def X86fandn : SDNode<"X86ISD::FANDN", SDTFPBinOp>; 57def X86frsqrt : SDNode<"X86ISD::FRSQRT", SDTFPUnaryOp>; 58def X86frcp : SDNode<"X86ISD::FRCP", SDTFPUnaryOp>; 59def X86fhadd : SDNode<"X86ISD::FHADD", SDTFPBinOp>; 60def X86fhsub : SDNode<"X86ISD::FHSUB", SDTFPBinOp>; 61def X86hadd : SDNode<"X86ISD::HADD", SDTIntBinOp>; 62def X86hsub : SDNode<"X86ISD::HSUB", SDTIntBinOp>; 63def X86comi : SDNode<"X86ISD::COMI", SDTX86CmpTest>; 64def X86ucomi : SDNode<"X86ISD::UCOMI", SDTX86CmpTest>; 65def X86cmps : SDNode<"X86ISD::FSETCC", SDTX86Cmps>; 66def X86pshufb : SDNode<"X86ISD::PSHUFB", 67 SDTypeProfile<1, 2, [SDTCVecEltisVT<0, i8>, SDTCisSameAs<0,1>, 68 SDTCisSameAs<0,2>]>>; 69def X86psadbw : SDNode<"X86ISD::PSADBW", 70 SDTypeProfile<1, 2, [SDTCVecEltisVT<0, i64>, 71 SDTCVecEltisVT<1, i8>, 72 SDTCisSameSizeAs<0,1>, 73 SDTCisSameAs<1,2>]>, [SDNPCommutative]>; 74def X86dbpsadbw : SDNode<"X86ISD::DBPSADBW", 75 SDTypeProfile<1, 3, [SDTCVecEltisVT<0, i16>, 76 SDTCVecEltisVT<1, i8>, 77 SDTCisSameSizeAs<0,1>, 78 SDTCisSameAs<1,2>, SDTCisInt<3>]>>; 79def X86andnp : SDNode<"X86ISD::ANDNP", 80 SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisSameAs<0,1>, 81 SDTCisSameAs<0,2>]>>; 82def X86multishift : SDNode<"X86ISD::MULTISHIFT", 83 SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisVec<1>, 84 SDTCisSameAs<1,2>]>>; 85def X86pextrb : SDNode<"X86ISD::PEXTRB", 86 SDTypeProfile<1, 2, [SDTCisVT<0, i32>, SDTCisVT<1, v16i8>, 87 SDTCisPtrTy<2>]>>; 88def X86pextrw : SDNode<"X86ISD::PEXTRW", 89 SDTypeProfile<1, 2, [SDTCisVT<0, i32>, SDTCisVT<1, v8i16>, 90 SDTCisPtrTy<2>]>>; 91def X86pinsrb : SDNode<"X86ISD::PINSRB", 92 SDTypeProfile<1, 3, [SDTCisVT<0, v16i8>, SDTCisSameAs<0,1>, 93 SDTCisVT<2, i32>, SDTCisPtrTy<3>]>>; 94def X86pinsrw : SDNode<"X86ISD::PINSRW", 95 SDTypeProfile<1, 3, [SDTCisVT<0, v8i16>, SDTCisSameAs<0,1>, 96 SDTCisVT<2, i32>, SDTCisPtrTy<3>]>>; 97def X86insertps : SDNode<"X86ISD::INSERTPS", 98 SDTypeProfile<1, 3, [SDTCisVT<0, v4f32>, SDTCisSameAs<0,1>, 99 SDTCisVT<2, v4f32>, SDTCisVT<3, i8>]>>; 100def X86vzmovl : SDNode<"X86ISD::VZEXT_MOVL", 101 SDTypeProfile<1, 1, [SDTCisSameAs<0,1>]>>; 102 103def X86vzload : SDNode<"X86ISD::VZEXT_LOAD", SDTLoad, 104 [SDNPHasChain, SDNPMayLoad, SDNPMemOperand]>; 105 106def X86vzext : SDNode<"X86ISD::VZEXT", 107 SDTypeProfile<1, 1, [SDTCisVec<0>, SDTCisVec<1>, 108 SDTCisInt<0>, SDTCisInt<1>, 109 SDTCisOpSmallerThanOp<1, 0>]>>; 110 111def X86vsext : SDNode<"X86ISD::VSEXT", 112 SDTypeProfile<1, 1, [SDTCisVec<0>, SDTCisVec<1>, 113 SDTCisInt<0>, SDTCisInt<1>, 114 SDTCisOpSmallerThanOp<1, 0>]>>; 115 116def SDTVtrunc : SDTypeProfile<1, 1, [SDTCisVec<0>, SDTCisVec<1>, 117 SDTCisInt<0>, SDTCisInt<1>, 118 SDTCisOpSmallerThanOp<0, 1>]>; 119 120def X86vtrunc : SDNode<"X86ISD::VTRUNC", SDTVtrunc>; 121def X86vtruncs : SDNode<"X86ISD::VTRUNCS", SDTVtrunc>; 122def X86vtruncus : SDNode<"X86ISD::VTRUNCUS", SDTVtrunc>; 123 124def X86vfpext : SDNode<"X86ISD::VFPEXT", 125 SDTypeProfile<1, 1, [SDTCVecEltisVT<0, f64>, 126 SDTCVecEltisVT<1, f32>, 127 SDTCisSameSizeAs<0, 1>]>>; 128def X86vfpround: SDNode<"X86ISD::VFPROUND", 129 SDTypeProfile<1, 1, [SDTCVecEltisVT<0, f32>, 130 SDTCVecEltisVT<1, f64>, 131 SDTCisSameSizeAs<0, 1>]>>; 132 133def X86froundRnd: SDNode<"X86ISD::VFPROUNDS_RND", 134 SDTypeProfile<1, 3, [SDTCVecEltisVT<0, f32>, 135 SDTCisSameAs<0, 1>, 136 SDTCVecEltisVT<2, f64>, 137 SDTCisSameSizeAs<0, 2>, 138 SDTCisVT<3, i32>]>>; 139 140def X86fpextRnd : SDNode<"X86ISD::VFPEXTS_RND", 141 SDTypeProfile<1, 3, [SDTCVecEltisVT<0, f64>, 142 SDTCisSameAs<0, 1>, 143 SDTCVecEltisVT<2, f32>, 144 SDTCisSameSizeAs<0, 2>, 145 SDTCisVT<3, i32>]>>; 146 147def X86vshiftimm : SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisSameAs<0,1>, 148 SDTCisVT<2, i8>, SDTCisInt<0>]>; 149 150def X86vshldq : SDNode<"X86ISD::VSHLDQ", X86vshiftimm>; 151def X86vshrdq : SDNode<"X86ISD::VSRLDQ", X86vshiftimm>; 152def X86cmpp : SDNode<"X86ISD::CMPP", SDTX86VFCMP>; 153def X86pcmpeq : SDNode<"X86ISD::PCMPEQ", SDTIntBinOp, [SDNPCommutative]>; 154def X86pcmpgt : SDNode<"X86ISD::PCMPGT", SDTIntBinOp>; 155 156def X86CmpMaskCC : 157 SDTypeProfile<1, 3, [SDTCisVec<0>, SDTCVecEltisVT<0, i1>, 158 SDTCisVec<1>, SDTCisSameAs<2, 1>, 159 SDTCisSameNumEltsAs<0, 1>, SDTCisVT<3, i8>]>; 160def X86CmpMaskCCRound : 161 SDTypeProfile<1, 4, [SDTCisVec<0>,SDTCVecEltisVT<0, i1>, 162 SDTCisVec<1>, SDTCisFP<1>, SDTCisSameAs<2, 1>, 163 SDTCisSameNumEltsAs<0, 1>, SDTCisVT<3, i8>, 164 SDTCisVT<4, i32>]>; 165def X86CmpMaskCCScalar : 166 SDTypeProfile<1, 3, [SDTCisInt<0>, SDTCisFP<1>, SDTCisSameAs<1, 2>, 167 SDTCisVT<3, i8>]>; 168 169def X86CmpMaskCCScalarRound : 170 SDTypeProfile<1, 4, [SDTCisInt<0>, SDTCisFP<1>, SDTCisSameAs<1, 2>, 171 SDTCisVT<3, i8>, SDTCisVT<4, i32>]>; 172 173def X86cmpm : SDNode<"X86ISD::CMPM", X86CmpMaskCC>; 174// Hack to make CMPM commutable in tablegen patterns for load folding. 175def X86cmpm_c : SDNode<"X86ISD::CMPM", X86CmpMaskCC, [SDNPCommutative]>; 176def X86cmpmRnd : SDNode<"X86ISD::CMPM_RND", X86CmpMaskCCRound>; 177def X86cmpms : SDNode<"X86ISD::FSETCCM", X86CmpMaskCCScalar>; 178def X86cmpmsRnd : SDNode<"X86ISD::FSETCCM_RND", X86CmpMaskCCScalarRound>; 179 180def X86phminpos: SDNode<"X86ISD::PHMINPOS", 181 SDTypeProfile<1, 1, [SDTCisVT<0, v8i16>, SDTCisVT<1, v8i16>]>>; 182 183def X86vshiftuniform : SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisSameAs<0,1>, 184 SDTCisVec<2>, SDTCisInt<0>, 185 SDTCisInt<1>]>; 186 187def X86vshl : SDNode<"X86ISD::VSHL", X86vshiftuniform>; 188def X86vsrl : SDNode<"X86ISD::VSRL", X86vshiftuniform>; 189def X86vsra : SDNode<"X86ISD::VSRA", X86vshiftuniform>; 190 191def X86vshiftvariable : SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisSameAs<0,1>, 192 SDTCisSameAs<0,2>, SDTCisInt<0>]>; 193 194def X86vsrav : SDNode<"X86ISD::VSRAV", X86vshiftvariable>; 195 196def X86vshli : SDNode<"X86ISD::VSHLI", X86vshiftimm>; 197def X86vsrli : SDNode<"X86ISD::VSRLI", X86vshiftimm>; 198def X86vsrai : SDNode<"X86ISD::VSRAI", X86vshiftimm>; 199 200def X86kshiftl : SDNode<"X86ISD::KSHIFTL", 201 SDTypeProfile<1, 2, [SDTCVecEltisVT<0, i1>, 202 SDTCisSameAs<0, 1>, 203 SDTCisVT<2, i8>]>>; 204def X86kshiftr : SDNode<"X86ISD::KSHIFTR", 205 SDTypeProfile<1, 2, [SDTCVecEltisVT<0, i1>, 206 SDTCisSameAs<0, 1>, 207 SDTCisVT<2, i8>]>>; 208 209def X86kadd : SDNode<"X86ISD::KADD", SDTIntBinOp, [SDNPCommutative]>; 210 211def X86vrotli : SDNode<"X86ISD::VROTLI", X86vshiftimm>; 212def X86vrotri : SDNode<"X86ISD::VROTRI", X86vshiftimm>; 213 214def X86vpshl : SDNode<"X86ISD::VPSHL", X86vshiftvariable>; 215def X86vpsha : SDNode<"X86ISD::VPSHA", X86vshiftvariable>; 216 217def X86vpcom : SDNode<"X86ISD::VPCOM", 218 SDTypeProfile<1, 3, [SDTCisVec<0>, SDTCisSameAs<0,1>, 219 SDTCisSameAs<0,2>, 220 SDTCisVT<3, i8>, SDTCisInt<0>]>>; 221def X86vpcomu : SDNode<"X86ISD::VPCOMU", 222 SDTypeProfile<1, 3, [SDTCisVec<0>, SDTCisSameAs<0,1>, 223 SDTCisSameAs<0,2>, 224 SDTCisVT<3, i8>, SDTCisInt<0>]>>; 225def X86vpermil2 : SDNode<"X86ISD::VPERMIL2", 226 SDTypeProfile<1, 4, [SDTCisVec<0>, SDTCisSameAs<0,1>, 227 SDTCisSameAs<0,2>, 228 SDTCisFP<0>, SDTCisInt<3>, 229 SDTCisSameNumEltsAs<0, 3>, 230 SDTCisSameSizeAs<0,3>, 231 SDTCisVT<4, i8>]>>; 232def X86vpperm : SDNode<"X86ISD::VPPERM", 233 SDTypeProfile<1, 3, [SDTCisVT<0, v16i8>, SDTCisSameAs<0,1>, 234 SDTCisSameAs<0,2>, SDTCisSameAs<0, 3>]>>; 235 236def SDTX86CmpPTest : SDTypeProfile<1, 2, [SDTCisVT<0, i32>, 237 SDTCisVec<1>, 238 SDTCisSameAs<2, 1>]>; 239 240def X86addus : SDNode<"X86ISD::ADDUS", SDTIntBinOp, [SDNPCommutative]>; 241def X86subus : SDNode<"X86ISD::SUBUS", SDTIntBinOp>; 242def X86adds : SDNode<"X86ISD::ADDS", SDTIntBinOp, [SDNPCommutative]>; 243def X86subs : SDNode<"X86ISD::SUBS", SDTIntBinOp>; 244def X86mulhrs : SDNode<"X86ISD::MULHRS", SDTIntBinOp, [SDNPCommutative]>; 245def X86avg : SDNode<"X86ISD::AVG" , SDTIntBinOp, [SDNPCommutative]>; 246def X86ptest : SDNode<"X86ISD::PTEST", SDTX86CmpPTest>; 247def X86testp : SDNode<"X86ISD::TESTP", SDTX86CmpPTest>; 248def X86kortest : SDNode<"X86ISD::KORTEST", SDTX86CmpPTest>; 249def X86ktest : SDNode<"X86ISD::KTEST", SDTX86CmpPTest>; 250 251def X86movmsk : SDNode<"X86ISD::MOVMSK", 252 SDTypeProfile<1, 1, [SDTCisVT<0, i32>, SDTCisVec<1>]>>; 253 254def X86selects : SDNode<"X86ISD::SELECTS", 255 SDTypeProfile<1, 3, [SDTCisVT<1, v1i1>, 256 SDTCisSameAs<0, 2>, 257 SDTCisSameAs<2, 3>]>>; 258 259def X86pmuludq : SDNode<"X86ISD::PMULUDQ", 260 SDTypeProfile<1, 2, [SDTCVecEltisVT<0, i64>, 261 SDTCisSameAs<0,1>, 262 SDTCisSameAs<1,2>]>, 263 [SDNPCommutative]>; 264def X86pmuldq : SDNode<"X86ISD::PMULDQ", 265 SDTypeProfile<1, 2, [SDTCVecEltisVT<0, i64>, 266 SDTCisSameAs<0,1>, 267 SDTCisSameAs<1,2>]>, 268 [SDNPCommutative]>; 269 270def X86extrqi : SDNode<"X86ISD::EXTRQI", 271 SDTypeProfile<1, 3, [SDTCisVT<0, v2i64>, SDTCisSameAs<0,1>, 272 SDTCisVT<2, i8>, SDTCisVT<3, i8>]>>; 273def X86insertqi : SDNode<"X86ISD::INSERTQI", 274 SDTypeProfile<1, 4, [SDTCisVT<0, v2i64>, SDTCisSameAs<0,1>, 275 SDTCisSameAs<1,2>, SDTCisVT<3, i8>, 276 SDTCisVT<4, i8>]>>; 277 278// Specific shuffle nodes - At some point ISD::VECTOR_SHUFFLE will always get 279// translated into one of the target nodes below during lowering. 280// Note: this is a work in progress... 281def SDTShuff1Op : SDTypeProfile<1, 1, [SDTCisVec<0>, SDTCisSameAs<0,1>]>; 282def SDTShuff2Op : SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisSameAs<0,1>, 283 SDTCisSameAs<0,2>]>; 284def SDTShuff2OpFP : SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisFP<0>, 285 SDTCisSameAs<0,1>, SDTCisSameAs<0,2>]>; 286 287def SDTShuff2OpM : SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisSameAs<0,1>, 288 SDTCisFP<0>, SDTCisInt<2>, 289 SDTCisSameNumEltsAs<0,2>, 290 SDTCisSameSizeAs<0,2>]>; 291def SDTShuff2OpI : SDTypeProfile<1, 2, [SDTCisVec<0>, 292 SDTCisSameAs<0,1>, SDTCisVT<2, i8>]>; 293def SDTShuff3OpI : SDTypeProfile<1, 3, [SDTCisVec<0>, SDTCisSameAs<0,1>, 294 SDTCisSameAs<0,2>, SDTCisVT<3, i8>]>; 295def SDTFPBinOpImm: SDTypeProfile<1, 3, [SDTCisFP<0>, SDTCisVec<0>, 296 SDTCisSameAs<0,1>, 297 SDTCisSameAs<0,2>, 298 SDTCisVT<3, i32>]>; 299def SDTFPBinOpImmRound: SDTypeProfile<1, 4, [SDTCisFP<0>, SDTCisVec<0>, 300 SDTCisSameAs<0,1>, 301 SDTCisSameAs<0,2>, 302 SDTCisVT<3, i32>, 303 SDTCisVT<4, i32>]>; 304def SDTFPTernaryOpImmRound: SDTypeProfile<1, 5, [SDTCisFP<0>, SDTCisSameAs<0,1>, 305 SDTCisSameAs<0,2>, 306 SDTCisInt<3>, 307 SDTCisSameSizeAs<0, 3>, 308 SDTCisSameNumEltsAs<0, 3>, 309 SDTCisVT<4, i32>, 310 SDTCisVT<5, i32>]>; 311def SDTFPUnaryOpImm: SDTypeProfile<1, 2, [SDTCisFP<0>, SDTCisVec<0>, 312 SDTCisSameAs<0,1>, 313 SDTCisVT<2, i32>]>; 314def SDTFPUnaryOpImmRound: SDTypeProfile<1, 3, [SDTCisFP<0>, SDTCisVec<0>, 315 SDTCisSameAs<0,1>, 316 SDTCisVT<2, i32>, 317 SDTCisVT<3, i32>]>; 318 319def SDTVBroadcast : SDTypeProfile<1, 1, [SDTCisVec<0>]>; 320def SDTVBroadcastm : SDTypeProfile<1, 1, [SDTCisVec<0>, 321 SDTCisInt<0>, SDTCisInt<1>]>; 322 323def SDTBlend : SDTypeProfile<1, 3, [SDTCisVec<0>, SDTCisSameAs<0,1>, 324 SDTCisSameAs<1,2>, SDTCisVT<3, i8>]>; 325 326def SDTTernlog : SDTypeProfile<1, 4, [SDTCisInt<0>, SDTCisVec<0>, 327 SDTCisSameAs<0,1>, SDTCisSameAs<0,2>, 328 SDTCisSameAs<0,3>, SDTCisVT<4, i8>]>; 329 330def SDTFPBinOpRound : SDTypeProfile<1, 3, [ // fadd_round, fmul_round, etc. 331 SDTCisSameAs<0, 1>, SDTCisSameAs<0, 2>, SDTCisFP<0>, SDTCisVT<3, i32>]>; 332 333def SDTFPUnaryOpRound : SDTypeProfile<1, 2, [ // fsqrt_round, fgetexp_round, etc. 334 SDTCisSameAs<0, 1>, SDTCisFP<0>, SDTCisVT<2, i32>]>; 335 336def SDTFmaRound : SDTypeProfile<1, 4, [SDTCisSameAs<0,1>, 337 SDTCisSameAs<1,2>, SDTCisSameAs<1,3>, 338 SDTCisFP<0>, SDTCisVT<4, i32>]>; 339 340def X86PAlignr : SDNode<"X86ISD::PALIGNR", 341 SDTypeProfile<1, 3, [SDTCVecEltisVT<0, i8>, 342 SDTCisSameAs<0,1>, 343 SDTCisSameAs<0,2>, 344 SDTCisVT<3, i8>]>>; 345def X86VAlign : SDNode<"X86ISD::VALIGN", SDTShuff3OpI>; 346 347def X86VShld : SDNode<"X86ISD::VSHLD", SDTShuff3OpI>; 348def X86VShrd : SDNode<"X86ISD::VSHRD", SDTShuff3OpI>; 349def X86VShldv : SDNode<"X86ISD::VSHLDV", 350 SDTypeProfile<1, 3, [SDTCisVec<0>, 351 SDTCisSameAs<0,1>, 352 SDTCisSameAs<0,2>, 353 SDTCisSameAs<0,3>]>>; 354def X86VShrdv : SDNode<"X86ISD::VSHRDV", 355 SDTypeProfile<1, 3, [SDTCisVec<0>, 356 SDTCisSameAs<0,1>, 357 SDTCisSameAs<0,2>, 358 SDTCisSameAs<0,3>]>>; 359 360def X86Conflict : SDNode<"X86ISD::CONFLICT", SDTIntUnaryOp>; 361 362def X86PShufd : SDNode<"X86ISD::PSHUFD", SDTShuff2OpI>; 363def X86PShufhw : SDNode<"X86ISD::PSHUFHW", SDTShuff2OpI>; 364def X86PShuflw : SDNode<"X86ISD::PSHUFLW", SDTShuff2OpI>; 365 366def X86Shufp : SDNode<"X86ISD::SHUFP", SDTShuff3OpI>; 367def X86Shuf128 : SDNode<"X86ISD::SHUF128", SDTShuff3OpI>; 368 369def X86Movddup : SDNode<"X86ISD::MOVDDUP", SDTShuff1Op>; 370def X86Movshdup : SDNode<"X86ISD::MOVSHDUP", SDTShuff1Op>; 371def X86Movsldup : SDNode<"X86ISD::MOVSLDUP", SDTShuff1Op>; 372 373def X86Movsd : SDNode<"X86ISD::MOVSD", SDTShuff2OpFP>; 374def X86Movss : SDNode<"X86ISD::MOVSS", SDTShuff2OpFP>; 375 376def X86Movlhps : SDNode<"X86ISD::MOVLHPS", SDTShuff2OpFP>; 377def X86Movhlps : SDNode<"X86ISD::MOVHLPS", SDTShuff2OpFP>; 378 379def SDTPack : SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisInt<0>, 380 SDTCisVec<1>, SDTCisInt<1>, 381 SDTCisSameSizeAs<0,1>, 382 SDTCisSameAs<1,2>, 383 SDTCisOpSmallerThanOp<0, 1>]>; 384def X86Packss : SDNode<"X86ISD::PACKSS", SDTPack>; 385def X86Packus : SDNode<"X86ISD::PACKUS", SDTPack>; 386 387def X86Unpckl : SDNode<"X86ISD::UNPCKL", SDTShuff2Op>; 388def X86Unpckh : SDNode<"X86ISD::UNPCKH", SDTShuff2Op>; 389 390def X86vpmaddubsw : SDNode<"X86ISD::VPMADDUBSW", 391 SDTypeProfile<1, 2, [SDTCVecEltisVT<0, i16>, 392 SDTCVecEltisVT<1, i8>, 393 SDTCisSameSizeAs<0,1>, 394 SDTCisSameAs<1,2>]>>; 395def X86vpmaddwd : SDNode<"X86ISD::VPMADDWD", 396 SDTypeProfile<1, 2, [SDTCVecEltisVT<0, i32>, 397 SDTCVecEltisVT<1, i16>, 398 SDTCisSameSizeAs<0,1>, 399 SDTCisSameAs<1,2>]>, 400 [SDNPCommutative]>; 401 402def X86VPermilpv : SDNode<"X86ISD::VPERMILPV", SDTShuff2OpM>; 403def X86VPermilpi : SDNode<"X86ISD::VPERMILPI", SDTShuff2OpI>; 404def X86VPermv : SDNode<"X86ISD::VPERMV", 405 SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisInt<1>, 406 SDTCisSameNumEltsAs<0,1>, 407 SDTCisSameSizeAs<0,1>, 408 SDTCisSameAs<0,2>]>>; 409def X86VPermi : SDNode<"X86ISD::VPERMI", SDTShuff2OpI>; 410def X86VPermt2 : SDNode<"X86ISD::VPERMV3", 411 SDTypeProfile<1, 3, [SDTCisVec<0>, 412 SDTCisSameAs<0,1>, SDTCisInt<2>, 413 SDTCisVec<2>, SDTCisSameNumEltsAs<0, 2>, 414 SDTCisSameSizeAs<0,2>, 415 SDTCisSameAs<0,3>]>, []>; 416 417def X86vpternlog : SDNode<"X86ISD::VPTERNLOG", SDTTernlog>; 418 419def X86VPerm2x128 : SDNode<"X86ISD::VPERM2X128", SDTShuff3OpI>; 420 421def X86VFixupimm : SDNode<"X86ISD::VFIXUPIMM", SDTFPTernaryOpImmRound>; 422def X86VFixupimmScalar : SDNode<"X86ISD::VFIXUPIMMS", SDTFPTernaryOpImmRound>; 423def X86VRange : SDNode<"X86ISD::VRANGE", SDTFPBinOpImm>; 424def X86VRangeRnd : SDNode<"X86ISD::VRANGE_RND", SDTFPBinOpImmRound>; 425def X86VReduce : SDNode<"X86ISD::VREDUCE", SDTFPUnaryOpImm>; 426def X86VReduceRnd : SDNode<"X86ISD::VREDUCE_RND", SDTFPUnaryOpImmRound>; 427def X86VRndScale : SDNode<"X86ISD::VRNDSCALE", SDTFPUnaryOpImm>; 428def X86VRndScaleRnd: SDNode<"X86ISD::VRNDSCALE_RND", SDTFPUnaryOpImmRound>; 429def X86VGetMant : SDNode<"X86ISD::VGETMANT", SDTFPUnaryOpImm>; 430def X86VGetMantRnd : SDNode<"X86ISD::VGETMANT_RND", SDTFPUnaryOpImmRound>; 431def X86Vfpclass : SDNode<"X86ISD::VFPCLASS", 432 SDTypeProfile<1, 2, [SDTCVecEltisVT<0, i1>, 433 SDTCisFP<1>, 434 SDTCisSameNumEltsAs<0,1>, 435 SDTCisVT<2, i32>]>, []>; 436def X86Vfpclasss : SDNode<"X86ISD::VFPCLASSS", 437 SDTypeProfile<1, 2, [SDTCisVT<0, v1i1>, 438 SDTCisFP<1>, SDTCisVT<2, i32>]>,[]>; 439 440def X86SubVBroadcast : SDNode<"X86ISD::SUBV_BROADCAST", 441 SDTypeProfile<1, 1, [SDTCisVec<0>, SDTCisVec<1>, 442 SDTCisSubVecOfVec<1, 0>]>, []>; 443 444def X86VBroadcast : SDNode<"X86ISD::VBROADCAST", SDTVBroadcast>; 445def X86VBroadcastm : SDNode<"X86ISD::VBROADCASTM", SDTVBroadcastm>; 446 447def X86Blendi : SDNode<"X86ISD::BLENDI", SDTBlend>; 448 449def X86Addsub : SDNode<"X86ISD::ADDSUB", SDTFPBinOp>; 450 451def X86faddRnd : SDNode<"X86ISD::FADD_RND", SDTFPBinOpRound>; 452def X86faddRnds : SDNode<"X86ISD::FADDS_RND", SDTFPBinOpRound>; 453def X86fsubRnd : SDNode<"X86ISD::FSUB_RND", SDTFPBinOpRound>; 454def X86fsubRnds : SDNode<"X86ISD::FSUBS_RND", SDTFPBinOpRound>; 455def X86fmulRnd : SDNode<"X86ISD::FMUL_RND", SDTFPBinOpRound>; 456def X86fmulRnds : SDNode<"X86ISD::FMULS_RND", SDTFPBinOpRound>; 457def X86fdivRnd : SDNode<"X86ISD::FDIV_RND", SDTFPBinOpRound>; 458def X86fdivRnds : SDNode<"X86ISD::FDIVS_RND", SDTFPBinOpRound>; 459def X86fmaxRnd : SDNode<"X86ISD::FMAX_RND", SDTFPBinOpRound>; 460def X86fmaxRnds : SDNode<"X86ISD::FMAXS_RND", SDTFPBinOpRound>; 461def X86fminRnd : SDNode<"X86ISD::FMIN_RND", SDTFPBinOpRound>; 462def X86fminRnds : SDNode<"X86ISD::FMINS_RND", SDTFPBinOpRound>; 463def X86scalef : SDNode<"X86ISD::SCALEF", SDTFPBinOpRound>; 464def X86scalefs : SDNode<"X86ISD::SCALEFS", SDTFPBinOpRound>; 465def X86fsqrtRnd : SDNode<"X86ISD::FSQRT_RND", SDTFPUnaryOpRound>; 466def X86fsqrtRnds : SDNode<"X86ISD::FSQRTS_RND", SDTFPBinOpRound>; 467def X86fgetexpRnd : SDNode<"X86ISD::FGETEXP_RND", SDTFPUnaryOpRound>; 468def X86fgetexpRnds : SDNode<"X86ISD::FGETEXPS_RND", SDTFPBinOpRound>; 469 470def X86Fmadd : SDNode<"ISD::FMA", SDTFPTernaryOp, [SDNPCommutative]>; 471def X86Fnmadd : SDNode<"X86ISD::FNMADD", SDTFPTernaryOp, [SDNPCommutative]>; 472def X86Fmsub : SDNode<"X86ISD::FMSUB", SDTFPTernaryOp, [SDNPCommutative]>; 473def X86Fnmsub : SDNode<"X86ISD::FNMSUB", SDTFPTernaryOp, [SDNPCommutative]>; 474def X86Fmaddsub : SDNode<"X86ISD::FMADDSUB", SDTFPTernaryOp, [SDNPCommutative]>; 475def X86Fmsubadd : SDNode<"X86ISD::FMSUBADD", SDTFPTernaryOp, [SDNPCommutative]>; 476 477def X86FmaddRnd : SDNode<"X86ISD::FMADD_RND", SDTFmaRound, [SDNPCommutative]>; 478def X86FnmaddRnd : SDNode<"X86ISD::FNMADD_RND", SDTFmaRound, [SDNPCommutative]>; 479def X86FmsubRnd : SDNode<"X86ISD::FMSUB_RND", SDTFmaRound, [SDNPCommutative]>; 480def X86FnmsubRnd : SDNode<"X86ISD::FNMSUB_RND", SDTFmaRound, [SDNPCommutative]>; 481def X86FmaddsubRnd : SDNode<"X86ISD::FMADDSUB_RND", SDTFmaRound, [SDNPCommutative]>; 482def X86FmsubaddRnd : SDNode<"X86ISD::FMSUBADD_RND", SDTFmaRound, [SDNPCommutative]>; 483 484def SDTIFma : SDTypeProfile<1, 3, [SDTCisInt<0>, SDTCisSameAs<0,1>, 485 SDTCisSameAs<1,2>, SDTCisSameAs<1,3>]>; 486def x86vpmadd52l : SDNode<"X86ISD::VPMADD52L", SDTIFma, [SDNPCommutative]>; 487def x86vpmadd52h : SDNode<"X86ISD::VPMADD52H", SDTIFma, [SDNPCommutative]>; 488 489def X86rsqrt14 : SDNode<"X86ISD::RSQRT14", SDTFPUnaryOp>; 490def X86rcp14 : SDNode<"X86ISD::RCP14", SDTFPUnaryOp>; 491 492// VNNI 493def SDTVnni : SDTypeProfile<1, 3, [SDTCisVec<0>, SDTCisSameAs<0,1>, 494 SDTCisSameAs<1,2>, SDTCisSameAs<1,3>]>; 495def X86Vpdpbusd : SDNode<"X86ISD::VPDPBUSD", SDTVnni>; 496def X86Vpdpbusds : SDNode<"X86ISD::VPDPBUSDS", SDTVnni>; 497def X86Vpdpwssd : SDNode<"X86ISD::VPDPWSSD", SDTVnni>; 498def X86Vpdpwssds : SDNode<"X86ISD::VPDPWSSDS", SDTVnni>; 499 500def X86rsqrt28 : SDNode<"X86ISD::RSQRT28", SDTFPUnaryOpRound>; 501def X86rcp28 : SDNode<"X86ISD::RCP28", SDTFPUnaryOpRound>; 502def X86exp2 : SDNode<"X86ISD::EXP2", SDTFPUnaryOpRound>; 503 504def X86rsqrt14s : SDNode<"X86ISD::RSQRT14S", SDTFPBinOp>; 505def X86rcp14s : SDNode<"X86ISD::RCP14S", SDTFPBinOp>; 506def X86rsqrt28s : SDNode<"X86ISD::RSQRT28S", SDTFPBinOpRound>; 507def X86rcp28s : SDNode<"X86ISD::RCP28S", SDTFPBinOpRound>; 508def X86Ranges : SDNode<"X86ISD::VRANGES", SDTFPBinOpImm>; 509def X86RndScales : SDNode<"X86ISD::VRNDSCALES", SDTFPBinOpImm>; 510def X86Reduces : SDNode<"X86ISD::VREDUCES", SDTFPBinOpImm>; 511def X86GetMants : SDNode<"X86ISD::VGETMANTS", SDTFPBinOpImm>; 512def X86RangesRnd : SDNode<"X86ISD::VRANGES_RND", SDTFPBinOpImmRound>; 513def X86RndScalesRnd : SDNode<"X86ISD::VRNDSCALES_RND", SDTFPBinOpImmRound>; 514def X86ReducesRnd : SDNode<"X86ISD::VREDUCES_RND", SDTFPBinOpImmRound>; 515def X86GetMantsRnd : SDNode<"X86ISD::VGETMANTS_RND", SDTFPBinOpImmRound>; 516 517def X86compress: SDNode<"X86ISD::COMPRESS", SDTypeProfile<1, 1, 518 [SDTCisSameAs<0, 1>, SDTCisVec<1>]>, []>; 519def X86expand : SDNode<"X86ISD::EXPAND", SDTypeProfile<1, 1, 520 [SDTCisSameAs<0, 1>, SDTCisVec<1>]>, []>; 521 522// vpshufbitqmb 523def X86Vpshufbitqmb : SDNode<"X86ISD::VPSHUFBITQMB", 524 SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisVec<1>, 525 SDTCisSameAs<1,2>, 526 SDTCVecEltisVT<0,i1>, 527 SDTCisSameNumEltsAs<0,1>]>>; 528 529def SDTintToFPRound: SDTypeProfile<1, 3, [SDTCisVec<0>, SDTCisFP<0>, 530 SDTCisSameAs<0,1>, SDTCisInt<2>, 531 SDTCisVT<3, i32>]>; 532 533def SDTFloatToInt: SDTypeProfile<1, 1, [SDTCisVec<0>, SDTCisVec<1>, 534 SDTCisInt<0>, SDTCisFP<1>]>; 535def SDTFloatToIntRnd: SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisVec<1>, 536 SDTCisInt<0>, SDTCisFP<1>, 537 SDTCisVT<2, i32>]>; 538def SDTSFloatToIntRnd: SDTypeProfile<1, 2, [SDTCisInt<0>, SDTCisFP<1>, 539 SDTCisVec<1>, SDTCisVT<2, i32>]>; 540 541def SDTVintToFP: SDTypeProfile<1, 1, [SDTCisVec<0>, SDTCisVec<1>, 542 SDTCisFP<0>, SDTCisInt<1>]>; 543def SDTVintToFPRound: SDTypeProfile<1, 2, [SDTCisVec<0>, SDTCisVec<1>, 544 SDTCisFP<0>, SDTCisInt<1>, 545 SDTCisVT<2, i32>]>; 546 547// Scalar 548def X86SintToFpRnd : SDNode<"X86ISD::SCALAR_SINT_TO_FP_RND", SDTintToFPRound>; 549def X86UintToFpRnd : SDNode<"X86ISD::SCALAR_UINT_TO_FP_RND", SDTintToFPRound>; 550 551def X86cvtts2IntRnd : SDNode<"X86ISD::CVTTS2SI_RND", SDTSFloatToIntRnd>; 552def X86cvtts2UIntRnd : SDNode<"X86ISD::CVTTS2UI_RND", SDTSFloatToIntRnd>; 553 554def X86cvts2si : SDNode<"X86ISD::CVTS2SI_RND", SDTSFloatToIntRnd>; 555def X86cvts2usi : SDNode<"X86ISD::CVTS2UI_RND", SDTSFloatToIntRnd>; 556 557// Vector with rounding mode 558 559// cvtt fp-to-int staff 560def X86cvttp2siRnd : SDNode<"X86ISD::CVTTP2SI_RND", SDTFloatToIntRnd>; 561def X86cvttp2uiRnd : SDNode<"X86ISD::CVTTP2UI_RND", SDTFloatToIntRnd>; 562 563def X86VSintToFpRnd : SDNode<"X86ISD::SINT_TO_FP_RND", SDTVintToFPRound>; 564def X86VUintToFpRnd : SDNode<"X86ISD::UINT_TO_FP_RND", SDTVintToFPRound>; 565 566// cvt fp-to-int staff 567def X86cvtp2IntRnd : SDNode<"X86ISD::CVTP2SI_RND", SDTFloatToIntRnd>; 568def X86cvtp2UIntRnd : SDNode<"X86ISD::CVTP2UI_RND", SDTFloatToIntRnd>; 569 570// Vector without rounding mode 571 572// cvtt fp-to-int staff 573def X86cvttp2si : SDNode<"X86ISD::CVTTP2SI", SDTFloatToInt>; 574def X86cvttp2ui : SDNode<"X86ISD::CVTTP2UI", SDTFloatToInt>; 575 576def X86VSintToFP : SDNode<"X86ISD::CVTSI2P", SDTVintToFP>; 577def X86VUintToFP : SDNode<"X86ISD::CVTUI2P", SDTVintToFP>; 578 579// cvt int-to-fp staff 580def X86cvtp2Int : SDNode<"X86ISD::CVTP2SI", SDTFloatToInt>; 581def X86cvtp2UInt : SDNode<"X86ISD::CVTP2UI", SDTFloatToInt>; 582 583 584def X86cvtph2ps : SDNode<"X86ISD::CVTPH2PS", 585 SDTypeProfile<1, 1, [SDTCVecEltisVT<0, f32>, 586 SDTCVecEltisVT<1, i16>]> >; 587 588def X86cvtph2psRnd : SDNode<"X86ISD::CVTPH2PS_RND", 589 SDTypeProfile<1, 2, [SDTCVecEltisVT<0, f32>, 590 SDTCVecEltisVT<1, i16>, 591 SDTCisVT<2, i32>]> >; 592 593def X86cvtps2ph : SDNode<"X86ISD::CVTPS2PH", 594 SDTypeProfile<1, 2, [SDTCVecEltisVT<0, i16>, 595 SDTCVecEltisVT<1, f32>, 596 SDTCisVT<2, i32>]> >; 597def X86vfpextRnd : SDNode<"X86ISD::VFPEXT_RND", 598 SDTypeProfile<1, 2, [SDTCVecEltisVT<0, f64>, 599 SDTCVecEltisVT<1, f32>, 600 SDTCisOpSmallerThanOp<1, 0>, 601 SDTCisVT<2, i32>]>>; 602def X86vfproundRnd: SDNode<"X86ISD::VFPROUND_RND", 603 SDTypeProfile<1, 2, [SDTCVecEltisVT<0, f32>, 604 SDTCVecEltisVT<1, f64>, 605 SDTCisOpSmallerThanOp<0, 1>, 606 SDTCisVT<2, i32>]>>; 607 608// galois field arithmetic 609def X86GF2P8affineinvqb : SDNode<"X86ISD::GF2P8AFFINEINVQB", SDTBlend>; 610def X86GF2P8affineqb : SDNode<"X86ISD::GF2P8AFFINEQB", SDTBlend>; 611def X86GF2P8mulb : SDNode<"X86ISD::GF2P8MULB", SDTIntBinOp>; 612 613//===----------------------------------------------------------------------===// 614// SSE Complex Patterns 615//===----------------------------------------------------------------------===// 616 617// These are 'extloads' from a scalar to the low element of a vector, zeroing 618// the top elements. These are used for the SSE 'ss' and 'sd' instruction 619// forms. 620def sse_load_f32 : ComplexPattern<v4f32, 5, "selectScalarSSELoad", [], 621 [SDNPHasChain, SDNPMayLoad, SDNPMemOperand, 622 SDNPWantRoot, SDNPWantParent]>; 623def sse_load_f64 : ComplexPattern<v2f64, 5, "selectScalarSSELoad", [], 624 [SDNPHasChain, SDNPMayLoad, SDNPMemOperand, 625 SDNPWantRoot, SDNPWantParent]>; 626 627def ssmem : Operand<v4f32> { 628 let PrintMethod = "printf32mem"; 629 let MIOperandInfo = (ops ptr_rc, i8imm, ptr_rc_nosp, i32imm, SEGMENT_REG); 630 let ParserMatchClass = X86Mem32AsmOperand; 631 let OperandType = "OPERAND_MEMORY"; 632} 633def sdmem : Operand<v2f64> { 634 let PrintMethod = "printf64mem"; 635 let MIOperandInfo = (ops ptr_rc, i8imm, ptr_rc_nosp, i32imm, SEGMENT_REG); 636 let ParserMatchClass = X86Mem64AsmOperand; 637 let OperandType = "OPERAND_MEMORY"; 638} 639 640//===----------------------------------------------------------------------===// 641// SSE pattern fragments 642//===----------------------------------------------------------------------===// 643 644// Vector load wrappers to prevent folding of non-temporal aligned loads on 645// supporting targets. 646def vecload : PatFrag<(ops node:$ptr), (load node:$ptr), [{ 647 return !useNonTemporalLoad(cast<LoadSDNode>(N)); 648}]>; 649 650// 128-bit load pattern fragments 651// NOTE: all 128-bit integer vector loads are promoted to v2i64 652def loadv4f32 : PatFrag<(ops node:$ptr), (v4f32 (vecload node:$ptr))>; 653def loadv2f64 : PatFrag<(ops node:$ptr), (v2f64 (vecload node:$ptr))>; 654def loadv2i64 : PatFrag<(ops node:$ptr), (v2i64 (vecload node:$ptr))>; 655 656// 256-bit load pattern fragments 657// NOTE: all 256-bit integer vector loads are promoted to v4i64 658def loadv8f32 : PatFrag<(ops node:$ptr), (v8f32 (vecload node:$ptr))>; 659def loadv4f64 : PatFrag<(ops node:$ptr), (v4f64 (vecload node:$ptr))>; 660def loadv4i64 : PatFrag<(ops node:$ptr), (v4i64 (vecload node:$ptr))>; 661 662// 512-bit load pattern fragments 663def loadv16f32 : PatFrag<(ops node:$ptr), (v16f32 (vecload node:$ptr))>; 664def loadv8f64 : PatFrag<(ops node:$ptr), (v8f64 (vecload node:$ptr))>; 665def loadv8i64 : PatFrag<(ops node:$ptr), (v8i64 (vecload node:$ptr))>; 666 667// 128-/256-/512-bit extload pattern fragments 668def extloadv2f32 : PatFrag<(ops node:$ptr), (v2f64 (extloadvf32 node:$ptr))>; 669def extloadv4f32 : PatFrag<(ops node:$ptr), (v4f64 (extloadvf32 node:$ptr))>; 670def extloadv8f32 : PatFrag<(ops node:$ptr), (v8f64 (extloadvf32 node:$ptr))>; 671 672// Like 'store', but always requires vector size alignment. 673def alignedstore : PatFrag<(ops node:$val, node:$ptr), 674 (store node:$val, node:$ptr), [{ 675 auto *St = cast<StoreSDNode>(N); 676 return St->getAlignment() >= St->getMemoryVT().getStoreSize(); 677}]>; 678 679// Like 'load', but always requires 128-bit vector alignment. 680def alignedvecload : PatFrag<(ops node:$ptr), (load node:$ptr), [{ 681 auto *Ld = cast<LoadSDNode>(N); 682 return Ld->getAlignment() >= Ld->getMemoryVT().getStoreSize() && 683 !useNonTemporalLoad(cast<LoadSDNode>(N)); 684}]>; 685 686// 128-bit aligned load pattern fragments 687// NOTE: all 128-bit integer vector loads are promoted to v2i64 688def alignedloadv4f32 : PatFrag<(ops node:$ptr), 689 (v4f32 (alignedvecload node:$ptr))>; 690def alignedloadv2f64 : PatFrag<(ops node:$ptr), 691 (v2f64 (alignedvecload node:$ptr))>; 692def alignedloadv2i64 : PatFrag<(ops node:$ptr), 693 (v2i64 (alignedvecload node:$ptr))>; 694 695// 256-bit aligned load pattern fragments 696// NOTE: all 256-bit integer vector loads are promoted to v4i64 697def alignedloadv8f32 : PatFrag<(ops node:$ptr), 698 (v8f32 (alignedvecload node:$ptr))>; 699def alignedloadv4f64 : PatFrag<(ops node:$ptr), 700 (v4f64 (alignedvecload node:$ptr))>; 701def alignedloadv4i64 : PatFrag<(ops node:$ptr), 702 (v4i64 (alignedvecload node:$ptr))>; 703 704// 512-bit aligned load pattern fragments 705def alignedloadv16f32 : PatFrag<(ops node:$ptr), 706 (v16f32 (alignedvecload node:$ptr))>; 707def alignedloadv8f64 : PatFrag<(ops node:$ptr), 708 (v8f64 (alignedvecload node:$ptr))>; 709def alignedloadv8i64 : PatFrag<(ops node:$ptr), 710 (v8i64 (alignedvecload node:$ptr))>; 711 712// Like 'vecload', but uses special alignment checks suitable for use in 713// memory operands in most SSE instructions, which are required to 714// be naturally aligned on some targets but not on others. If the subtarget 715// allows unaligned accesses, match any load, though this may require 716// setting a feature bit in the processor (on startup, for example). 717// Opteron 10h and later implement such a feature. 718def memop : PatFrag<(ops node:$ptr), (vecload node:$ptr), [{ 719 auto *Ld = cast<LoadSDNode>(N); 720 return Subtarget->hasSSEUnalignedMem() || 721 Ld->getAlignment() >= Ld->getMemoryVT().getStoreSize(); 722}]>; 723 724// 128-bit memop pattern fragments 725// NOTE: all 128-bit integer vector loads are promoted to v2i64 726def memopv4f32 : PatFrag<(ops node:$ptr), (v4f32 (memop node:$ptr))>; 727def memopv2f64 : PatFrag<(ops node:$ptr), (v2f64 (memop node:$ptr))>; 728def memopv2i64 : PatFrag<(ops node:$ptr), (v2i64 (memop node:$ptr))>; 729 730def X86masked_gather : SDNode<"X86ISD::MGATHER", 731 SDTypeProfile<2, 3, [SDTCisVec<0>, 732 SDTCisVec<1>, SDTCisInt<1>, 733 SDTCisSameAs<0, 2>, 734 SDTCisSameAs<1, 3>, 735 SDTCisPtrTy<4>]>, 736 [SDNPHasChain, SDNPMayLoad, SDNPMemOperand]>; 737 738def X86masked_scatter : SDNode<"X86ISD::MSCATTER", 739 SDTypeProfile<1, 3, [SDTCisVec<0>, SDTCisVec<1>, 740 SDTCisSameAs<0, 2>, 741 SDTCVecEltisVT<0, i1>, 742 SDTCisPtrTy<3>]>, 743 [SDNPHasChain, SDNPMayStore, SDNPMemOperand]>; 744 745def mgatherv4i32 : PatFrag<(ops node:$src1, node:$src2, node:$src3), 746 (X86masked_gather node:$src1, node:$src2, node:$src3) , [{ 747 X86MaskedGatherSDNode *Mgt = cast<X86MaskedGatherSDNode>(N); 748 return Mgt->getIndex().getValueType() == MVT::v4i32; 749}]>; 750 751def mgatherv8i32 : PatFrag<(ops node:$src1, node:$src2, node:$src3), 752 (X86masked_gather node:$src1, node:$src2, node:$src3) , [{ 753 X86MaskedGatherSDNode *Mgt = cast<X86MaskedGatherSDNode>(N); 754 return Mgt->getIndex().getValueType() == MVT::v8i32; 755}]>; 756 757def mgatherv2i64 : PatFrag<(ops node:$src1, node:$src2, node:$src3), 758 (X86masked_gather node:$src1, node:$src2, node:$src3) , [{ 759 X86MaskedGatherSDNode *Mgt = cast<X86MaskedGatherSDNode>(N); 760 return Mgt->getIndex().getValueType() == MVT::v2i64; 761}]>; 762def mgatherv4i64 : PatFrag<(ops node:$src1, node:$src2, node:$src3), 763 (X86masked_gather node:$src1, node:$src2, node:$src3) , [{ 764 X86MaskedGatherSDNode *Mgt = cast<X86MaskedGatherSDNode>(N); 765 return Mgt->getIndex().getValueType() == MVT::v4i64; 766}]>; 767def mgatherv8i64 : PatFrag<(ops node:$src1, node:$src2, node:$src3), 768 (X86masked_gather node:$src1, node:$src2, node:$src3) , [{ 769 X86MaskedGatherSDNode *Mgt = cast<X86MaskedGatherSDNode>(N); 770 return Mgt->getIndex().getValueType() == MVT::v8i64; 771}]>; 772def mgatherv16i32 : PatFrag<(ops node:$src1, node:$src2, node:$src3), 773 (X86masked_gather node:$src1, node:$src2, node:$src3) , [{ 774 X86MaskedGatherSDNode *Mgt = cast<X86MaskedGatherSDNode>(N); 775 return Mgt->getIndex().getValueType() == MVT::v16i32; 776}]>; 777 778def mscatterv2i64 : PatFrag<(ops node:$src1, node:$src2, node:$src3), 779 (X86masked_scatter node:$src1, node:$src2, node:$src3) , [{ 780 X86MaskedScatterSDNode *Sc = cast<X86MaskedScatterSDNode>(N); 781 return Sc->getIndex().getValueType() == MVT::v2i64; 782}]>; 783 784def mscatterv4i32 : PatFrag<(ops node:$src1, node:$src2, node:$src3), 785 (X86masked_scatter node:$src1, node:$src2, node:$src3) , [{ 786 X86MaskedScatterSDNode *Sc = cast<X86MaskedScatterSDNode>(N); 787 return Sc->getIndex().getValueType() == MVT::v4i32; 788}]>; 789 790def mscatterv4i64 : PatFrag<(ops node:$src1, node:$src2, node:$src3), 791 (X86masked_scatter node:$src1, node:$src2, node:$src3) , [{ 792 X86MaskedScatterSDNode *Sc = cast<X86MaskedScatterSDNode>(N); 793 return Sc->getIndex().getValueType() == MVT::v4i64; 794}]>; 795 796def mscatterv8i32 : PatFrag<(ops node:$src1, node:$src2, node:$src3), 797 (X86masked_scatter node:$src1, node:$src2, node:$src3) , [{ 798 X86MaskedScatterSDNode *Sc = cast<X86MaskedScatterSDNode>(N); 799 return Sc->getIndex().getValueType() == MVT::v8i32; 800}]>; 801 802def mscatterv8i64 : PatFrag<(ops node:$src1, node:$src2, node:$src3), 803 (X86masked_scatter node:$src1, node:$src2, node:$src3) , [{ 804 X86MaskedScatterSDNode *Sc = cast<X86MaskedScatterSDNode>(N); 805 return Sc->getIndex().getValueType() == MVT::v8i64; 806}]>; 807def mscatterv16i32 : PatFrag<(ops node:$src1, node:$src2, node:$src3), 808 (X86masked_scatter node:$src1, node:$src2, node:$src3) , [{ 809 X86MaskedScatterSDNode *Sc = cast<X86MaskedScatterSDNode>(N); 810 return Sc->getIndex().getValueType() == MVT::v16i32; 811}]>; 812 813// 128-bit bitconvert pattern fragments 814def bc_v4f32 : PatFrag<(ops node:$in), (v4f32 (bitconvert node:$in))>; 815def bc_v2f64 : PatFrag<(ops node:$in), (v2f64 (bitconvert node:$in))>; 816def bc_v16i8 : PatFrag<(ops node:$in), (v16i8 (bitconvert node:$in))>; 817def bc_v8i16 : PatFrag<(ops node:$in), (v8i16 (bitconvert node:$in))>; 818def bc_v4i32 : PatFrag<(ops node:$in), (v4i32 (bitconvert node:$in))>; 819def bc_v2i64 : PatFrag<(ops node:$in), (v2i64 (bitconvert node:$in))>; 820 821// 256-bit bitconvert pattern fragments 822def bc_v32i8 : PatFrag<(ops node:$in), (v32i8 (bitconvert node:$in))>; 823def bc_v16i16 : PatFrag<(ops node:$in), (v16i16 (bitconvert node:$in))>; 824def bc_v8i32 : PatFrag<(ops node:$in), (v8i32 (bitconvert node:$in))>; 825def bc_v4i64 : PatFrag<(ops node:$in), (v4i64 (bitconvert node:$in))>; 826def bc_v8f32 : PatFrag<(ops node:$in), (v8f32 (bitconvert node:$in))>; 827def bc_v4f64 : PatFrag<(ops node:$in), (v4f64 (bitconvert node:$in))>; 828 829// 512-bit bitconvert pattern fragments 830def bc_v64i8 : PatFrag<(ops node:$in), (v64i8 (bitconvert node:$in))>; 831def bc_v16i32 : PatFrag<(ops node:$in), (v16i32 (bitconvert node:$in))>; 832def bc_v8i64 : PatFrag<(ops node:$in), (v8i64 (bitconvert node:$in))>; 833def bc_v8f64 : PatFrag<(ops node:$in), (v8f64 (bitconvert node:$in))>; 834def bc_v16f32 : PatFrag<(ops node:$in), (v16f32 (bitconvert node:$in))>; 835 836def vzmovl_v2i64 : PatFrag<(ops node:$src), 837 (bitconvert (v2i64 (X86vzmovl 838 (v2i64 (scalar_to_vector (loadi64 node:$src))))))>; 839def vzmovl_v4i32 : PatFrag<(ops node:$src), 840 (bitconvert (v4i32 (X86vzmovl 841 (v4i32 (scalar_to_vector (loadi32 node:$src))))))>; 842 843def vzload_v2i64 : PatFrag<(ops node:$src), 844 (bitconvert (v2i64 (X86vzload node:$src)))>; 845 846 847def fp32imm0 : PatLeaf<(f32 fpimm), [{ 848 return N->isExactlyValue(+0.0); 849}]>; 850 851def fp64imm0 : PatLeaf<(f64 fpimm), [{ 852 return N->isExactlyValue(+0.0); 853}]>; 854 855def I8Imm : SDNodeXForm<imm, [{ 856 // Transformation function: get the low 8 bits. 857 return getI8Imm((uint8_t)N->getZExtValue(), SDLoc(N)); 858}]>; 859 860def FROUND_NO_EXC : PatLeaf<(i32 8)>; 861def FROUND_CURRENT : PatLeaf<(i32 4)>; 862 863// BYTE_imm - Transform bit immediates into byte immediates. 864def BYTE_imm : SDNodeXForm<imm, [{ 865 // Transformation function: imm >> 3 866 return getI32Imm(N->getZExtValue() >> 3, SDLoc(N)); 867}]>; 868 869// EXTRACT_get_vextract128_imm xform function: convert extract_subvector index 870// to VEXTRACTF128/VEXTRACTI128 imm. 871def EXTRACT_get_vextract128_imm : SDNodeXForm<extract_subvector, [{ 872 return getExtractVEXTRACTImmediate(N, 128, SDLoc(N)); 873}]>; 874 875// INSERT_get_vinsert128_imm xform function: convert insert_subvector index to 876// VINSERTF128/VINSERTI128 imm. 877def INSERT_get_vinsert128_imm : SDNodeXForm<insert_subvector, [{ 878 return getInsertVINSERTImmediate(N, 128, SDLoc(N)); 879}]>; 880 881// EXTRACT_get_vextract256_imm xform function: convert extract_subvector index 882// to VEXTRACTF64x4 imm. 883def EXTRACT_get_vextract256_imm : SDNodeXForm<extract_subvector, [{ 884 return getExtractVEXTRACTImmediate(N, 256, SDLoc(N)); 885}]>; 886 887// INSERT_get_vinsert256_imm xform function: convert insert_subvector index to 888// VINSERTF64x4 imm. 889def INSERT_get_vinsert256_imm : SDNodeXForm<insert_subvector, [{ 890 return getInsertVINSERTImmediate(N, 256, SDLoc(N)); 891}]>; 892 893def vextract128_extract : PatFrag<(ops node:$bigvec, node:$index), 894 (extract_subvector node:$bigvec, 895 node:$index), [{}], 896 EXTRACT_get_vextract128_imm>; 897 898def vinsert128_insert : PatFrag<(ops node:$bigvec, node:$smallvec, 899 node:$index), 900 (insert_subvector node:$bigvec, node:$smallvec, 901 node:$index), [{}], 902 INSERT_get_vinsert128_imm>; 903 904def vextract256_extract : PatFrag<(ops node:$bigvec, node:$index), 905 (extract_subvector node:$bigvec, 906 node:$index), [{}], 907 EXTRACT_get_vextract256_imm>; 908 909def vinsert256_insert : PatFrag<(ops node:$bigvec, node:$smallvec, 910 node:$index), 911 (insert_subvector node:$bigvec, node:$smallvec, 912 node:$index), [{}], 913 INSERT_get_vinsert256_imm>; 914 915def X86mload : PatFrag<(ops node:$src1, node:$src2, node:$src3), 916 (masked_load node:$src1, node:$src2, node:$src3), [{ 917 return !cast<MaskedLoadSDNode>(N)->isExpandingLoad() && 918 cast<MaskedLoadSDNode>(N)->getExtensionType() == ISD::NON_EXTLOAD; 919}]>; 920 921def masked_load_aligned128 : PatFrag<(ops node:$src1, node:$src2, node:$src3), 922 (X86mload node:$src1, node:$src2, node:$src3), [{ 923 return cast<MaskedLoadSDNode>(N)->getAlignment() >= 16; 924}]>; 925 926def masked_load_aligned256 : PatFrag<(ops node:$src1, node:$src2, node:$src3), 927 (X86mload node:$src1, node:$src2, node:$src3), [{ 928 return cast<MaskedLoadSDNode>(N)->getAlignment() >= 32; 929}]>; 930 931def masked_load_aligned512 : PatFrag<(ops node:$src1, node:$src2, node:$src3), 932 (X86mload node:$src1, node:$src2, node:$src3), [{ 933 return cast<MaskedLoadSDNode>(N)->getAlignment() >= 64; 934}]>; 935 936def masked_load_unaligned : PatFrag<(ops node:$src1, node:$src2, node:$src3), 937 (masked_load node:$src1, node:$src2, node:$src3), [{ 938 return !cast<MaskedLoadSDNode>(N)->isExpandingLoad() && 939 cast<MaskedLoadSDNode>(N)->getExtensionType() == ISD::NON_EXTLOAD; 940}]>; 941 942def X86mExpandingLoad : PatFrag<(ops node:$src1, node:$src2, node:$src3), 943 (masked_load node:$src1, node:$src2, node:$src3), [{ 944 return cast<MaskedLoadSDNode>(N)->isExpandingLoad(); 945}]>; 946 947// Masked store fragments. 948// X86mstore can't be implemented in core DAG files because some targets 949// do not support vector types (llvm-tblgen will fail). 950def X86mstore : PatFrag<(ops node:$src1, node:$src2, node:$src3), 951 (masked_store node:$src1, node:$src2, node:$src3), [{ 952 return (!cast<MaskedStoreSDNode>(N)->isTruncatingStore()) && 953 (!cast<MaskedStoreSDNode>(N)->isCompressingStore()); 954}]>; 955 956def masked_store_aligned128 : PatFrag<(ops node:$src1, node:$src2, node:$src3), 957 (X86mstore node:$src1, node:$src2, node:$src3), [{ 958 return cast<MaskedStoreSDNode>(N)->getAlignment() >= 16; 959}]>; 960 961def masked_store_aligned256 : PatFrag<(ops node:$src1, node:$src2, node:$src3), 962 (X86mstore node:$src1, node:$src2, node:$src3), [{ 963 return cast<MaskedStoreSDNode>(N)->getAlignment() >= 32; 964}]>; 965 966def masked_store_aligned512 : PatFrag<(ops node:$src1, node:$src2, node:$src3), 967 (X86mstore node:$src1, node:$src2, node:$src3), [{ 968 return cast<MaskedStoreSDNode>(N)->getAlignment() >= 64; 969}]>; 970 971def masked_store_unaligned : PatFrag<(ops node:$src1, node:$src2, node:$src3), 972 (masked_store node:$src1, node:$src2, node:$src3), [{ 973 return (!cast<MaskedStoreSDNode>(N)->isTruncatingStore()) && 974 (!cast<MaskedStoreSDNode>(N)->isCompressingStore()); 975}]>; 976 977def X86mCompressingStore : PatFrag<(ops node:$src1, node:$src2, node:$src3), 978 (masked_store node:$src1, node:$src2, node:$src3), [{ 979 return cast<MaskedStoreSDNode>(N)->isCompressingStore(); 980}]>; 981 982// masked truncstore fragments 983// X86mtruncstore can't be implemented in core DAG files because some targets 984// doesn't support vector type ( llvm-tblgen will fail) 985def X86mtruncstore : PatFrag<(ops node:$src1, node:$src2, node:$src3), 986 (masked_store node:$src1, node:$src2, node:$src3), [{ 987 return cast<MaskedStoreSDNode>(N)->isTruncatingStore(); 988}]>; 989def masked_truncstorevi8 : 990 PatFrag<(ops node:$src1, node:$src2, node:$src3), 991 (X86mtruncstore node:$src1, node:$src2, node:$src3), [{ 992 return cast<MaskedStoreSDNode>(N)->getMemoryVT().getScalarType() == MVT::i8; 993}]>; 994def masked_truncstorevi16 : 995 PatFrag<(ops node:$src1, node:$src2, node:$src3), 996 (X86mtruncstore node:$src1, node:$src2, node:$src3), [{ 997 return cast<MaskedStoreSDNode>(N)->getMemoryVT().getScalarType() == MVT::i16; 998}]>; 999def masked_truncstorevi32 : 1000 PatFrag<(ops node:$src1, node:$src2, node:$src3), 1001 (X86mtruncstore node:$src1, node:$src2, node:$src3), [{ 1002 return cast<MaskedStoreSDNode>(N)->getMemoryVT().getScalarType() == MVT::i32; 1003}]>; 1004 1005def X86TruncSStore : SDNode<"X86ISD::VTRUNCSTORES", SDTStore, 1006 [SDNPHasChain, SDNPMayStore, SDNPMemOperand]>; 1007 1008def X86TruncUSStore : SDNode<"X86ISD::VTRUNCSTOREUS", SDTStore, 1009 [SDNPHasChain, SDNPMayStore, SDNPMemOperand]>; 1010 1011def X86MTruncSStore : SDNode<"X86ISD::VMTRUNCSTORES", SDTMaskedStore, 1012 [SDNPHasChain, SDNPMayStore, SDNPMemOperand]>; 1013 1014def X86MTruncUSStore : SDNode<"X86ISD::VMTRUNCSTOREUS", SDTMaskedStore, 1015 [SDNPHasChain, SDNPMayStore, SDNPMemOperand]>; 1016 1017def truncstore_s_vi8 : PatFrag<(ops node:$val, node:$ptr), 1018 (X86TruncSStore node:$val, node:$ptr), [{ 1019 return cast<TruncSStoreSDNode>(N)->getMemoryVT().getScalarType() == MVT::i8; 1020}]>; 1021 1022def truncstore_us_vi8 : PatFrag<(ops node:$val, node:$ptr), 1023 (X86TruncUSStore node:$val, node:$ptr), [{ 1024 return cast<TruncUSStoreSDNode>(N)->getMemoryVT().getScalarType() == MVT::i8; 1025}]>; 1026 1027def truncstore_s_vi16 : PatFrag<(ops node:$val, node:$ptr), 1028 (X86TruncSStore node:$val, node:$ptr), [{ 1029 return cast<TruncSStoreSDNode>(N)->getMemoryVT().getScalarType() == MVT::i16; 1030}]>; 1031 1032def truncstore_us_vi16 : PatFrag<(ops node:$val, node:$ptr), 1033 (X86TruncUSStore node:$val, node:$ptr), [{ 1034 return cast<TruncUSStoreSDNode>(N)->getMemoryVT().getScalarType() == MVT::i16; 1035}]>; 1036 1037def truncstore_s_vi32 : PatFrag<(ops node:$val, node:$ptr), 1038 (X86TruncSStore node:$val, node:$ptr), [{ 1039 return cast<TruncSStoreSDNode>(N)->getMemoryVT().getScalarType() == MVT::i32; 1040}]>; 1041 1042def truncstore_us_vi32 : PatFrag<(ops node:$val, node:$ptr), 1043 (X86TruncUSStore node:$val, node:$ptr), [{ 1044 return cast<TruncUSStoreSDNode>(N)->getMemoryVT().getScalarType() == MVT::i32; 1045}]>; 1046 1047def masked_truncstore_s_vi8 : PatFrag<(ops node:$src1, node:$src2, node:$src3), 1048 (X86MTruncSStore node:$src1, node:$src2, node:$src3), [{ 1049 return cast<MaskedTruncSStoreSDNode>(N)->getMemoryVT().getScalarType() == MVT::i8; 1050}]>; 1051 1052def masked_truncstore_us_vi8 : PatFrag<(ops node:$src1, node:$src2, node:$src3), 1053 (X86MTruncUSStore node:$src1, node:$src2, node:$src3), [{ 1054 return cast<MaskedTruncUSStoreSDNode>(N)->getMemoryVT().getScalarType() == MVT::i8; 1055}]>; 1056 1057def masked_truncstore_s_vi16 : PatFrag<(ops node:$src1, node:$src2, node:$src3), 1058 (X86MTruncSStore node:$src1, node:$src2, node:$src3), [{ 1059 return cast<MaskedTruncSStoreSDNode>(N)->getMemoryVT().getScalarType() == MVT::i16; 1060}]>; 1061 1062def masked_truncstore_us_vi16 : PatFrag<(ops node:$src1, node:$src2, node:$src3), 1063 (X86MTruncUSStore node:$src1, node:$src2, node:$src3), [{ 1064 return cast<MaskedTruncUSStoreSDNode>(N)->getMemoryVT().getScalarType() == MVT::i16; 1065}]>; 1066 1067def masked_truncstore_s_vi32 : PatFrag<(ops node:$src1, node:$src2, node:$src3), 1068 (X86MTruncSStore node:$src1, node:$src2, node:$src3), [{ 1069 return cast<MaskedTruncSStoreSDNode>(N)->getMemoryVT().getScalarType() == MVT::i32; 1070}]>; 1071 1072def masked_truncstore_us_vi32 : PatFrag<(ops node:$src1, node:$src2, node:$src3), 1073 (X86MTruncUSStore node:$src1, node:$src2, node:$src3), [{ 1074 return cast<MaskedTruncUSStoreSDNode>(N)->getMemoryVT().getScalarType() == MVT::i32; 1075}]>; 1076