Lines Matching refs:pfx
676 static Bool IS_VALID_PFX ( Prefix pfx ) { in IS_VALID_PFX() argument
677 return toBool((pfx & 0xFF000000) == PFX_EMPTY); in IS_VALID_PFX()
680 static Bool haveREX ( Prefix pfx ) { in haveREX() argument
681 return toBool(pfx & PFX_REX); in haveREX()
684 static Int getRexW ( Prefix pfx ) { in getRexW() argument
685 return (pfx & PFX_REXW) ? 1 : 0; in getRexW()
687 static Int getRexR ( Prefix pfx ) { in getRexR() argument
688 return (pfx & PFX_REXR) ? 1 : 0; in getRexR()
690 static Int getRexX ( Prefix pfx ) { in getRexX() argument
691 return (pfx & PFX_REXX) ? 1 : 0; in getRexX()
693 static Int getRexB ( Prefix pfx ) { in getRexB() argument
694 return (pfx & PFX_REXB) ? 1 : 0; in getRexB()
699 static Bool haveF2orF3 ( Prefix pfx ) { in haveF2orF3() argument
700 return toBool((pfx & (PFX_F2|PFX_F3)) > 0); in haveF2orF3()
702 static Bool haveF2andF3 ( Prefix pfx ) { in haveF2andF3() argument
703 return toBool((pfx & (PFX_F2|PFX_F3)) == (PFX_F2|PFX_F3)); in haveF2andF3()
705 static Bool haveF2 ( Prefix pfx ) { in haveF2() argument
706 return toBool((pfx & PFX_F2) > 0); in haveF2()
708 static Bool haveF3 ( Prefix pfx ) { in haveF3() argument
709 return toBool((pfx & PFX_F3) > 0); in haveF3()
712 static Bool have66 ( Prefix pfx ) { in have66() argument
713 return toBool((pfx & PFX_66) > 0); in have66()
715 static Bool haveASO ( Prefix pfx ) { in haveASO() argument
716 return toBool((pfx & PFX_ASO) > 0); in haveASO()
718 static Bool haveLOCK ( Prefix pfx ) { in haveLOCK() argument
719 return toBool((pfx & PFX_LOCK) > 0); in haveLOCK()
723 static Bool have66noF2noF3 ( Prefix pfx ) in have66noF2noF3() argument
726 toBool((pfx & (PFX_66|PFX_F2|PFX_F3)) == PFX_66); in have66noF2noF3()
730 static Bool haveF2no66noF3 ( Prefix pfx ) in haveF2no66noF3() argument
733 toBool((pfx & (PFX_66|PFX_F2|PFX_F3)) == PFX_F2); in haveF2no66noF3()
737 static Bool haveF3no66noF2 ( Prefix pfx ) in haveF3no66noF2() argument
740 toBool((pfx & (PFX_66|PFX_F2|PFX_F3)) == PFX_F3); in haveF3no66noF2()
744 static Bool haveF3noF2 ( Prefix pfx ) in haveF3noF2() argument
747 toBool((pfx & (PFX_F2|PFX_F3)) == PFX_F3); in haveF3noF2()
751 static Bool haveF2noF3 ( Prefix pfx ) in haveF2noF3() argument
754 toBool((pfx & (PFX_F2|PFX_F3)) == PFX_F2); in haveF2noF3()
758 static Bool haveNo66noF2noF3 ( Prefix pfx ) in haveNo66noF2noF3() argument
761 toBool((pfx & (PFX_66|PFX_F2|PFX_F3)) == 0); in haveNo66noF2noF3()
765 static Bool have66orF2orF3 ( Prefix pfx ) in have66orF2orF3() argument
767 return toBool( ! haveNo66noF2noF3(pfx) ); in have66orF2orF3()
771 static Bool have66orF3 ( Prefix pfx ) in have66orF3() argument
773 return toBool((pfx & (PFX_66|PFX_F3)) > 0); in have66orF3()
784 static UInt getVexNvvvv ( Prefix pfx ) { in getVexNvvvv() argument
785 UInt r = (UInt)pfx; in getVexNvvvv()
790 static Bool haveVEX ( Prefix pfx ) { in haveVEX() argument
791 return toBool(pfx & PFX_VEX); in haveVEX()
794 static Int getVexL ( Prefix pfx ) { in getVexL() argument
795 return (pfx & PFX_VEXL) ? 1 : 0; in getVexL()
1157 static IRExpr* getIReg64rexX ( Prefix pfx, UInt lo3bits ) in getIReg64rexX() argument
1160 vassert(IS_VALID_PFX(pfx)); in getIReg64rexX()
1161 return getIReg64( lo3bits | (getRexX(pfx) << 3) ); in getIReg64rexX()
1164 static const HChar* nameIReg64rexX ( Prefix pfx, UInt lo3bits ) in nameIReg64rexX() argument
1167 vassert(IS_VALID_PFX(pfx)); in nameIReg64rexX()
1168 return nameIReg( 8, lo3bits | (getRexX(pfx) << 3), False ); in nameIReg64rexX()
1171 static const HChar* nameIRegRexB ( Int sz, Prefix pfx, UInt lo3bits ) in nameIRegRexB() argument
1174 vassert(IS_VALID_PFX(pfx)); in nameIRegRexB()
1176 return nameIReg( sz, lo3bits | (getRexB(pfx) << 3), in nameIRegRexB()
1177 toBool(sz==1 && !haveREX(pfx)) ); in nameIRegRexB()
1180 static IRExpr* getIRegRexB ( Int sz, Prefix pfx, UInt lo3bits ) in getIRegRexB() argument
1183 vassert(IS_VALID_PFX(pfx)); in getIRegRexB()
1189 offsetIReg( sz, lo3bits | (getRexB(pfx) << 3), in getIRegRexB()
1196 offsetIReg( sz, lo3bits | (getRexB(pfx) << 3), in getIRegRexB()
1197 toBool(sz==1 && !haveREX(pfx)) ), in getIRegRexB()
1203 static void putIRegRexB ( Int sz, Prefix pfx, UInt lo3bits, IRExpr* e ) in putIRegRexB() argument
1206 vassert(IS_VALID_PFX(pfx)); in putIRegRexB()
1210 offsetIReg( sz, lo3bits | (getRexB(pfx) << 3), in putIRegRexB()
1211 toBool(sz==1 && !haveREX(pfx)) ), in putIRegRexB()
1225 static UInt gregOfRexRM ( Prefix pfx, UChar mod_reg_rm ) in gregOfRexRM() argument
1228 reg += (pfx & PFX_REXR) ? 8 : 0; in gregOfRexRM()
1237 static UInt eregOfRexRM ( Prefix pfx, UChar mod_reg_rm ) in eregOfRexRM() argument
1242 rm += (pfx & PFX_REXB) ? 8 : 0; in eregOfRexRM()
1253 static UInt offsetIRegG ( Int sz, Prefix pfx, UChar mod_reg_rm ) in offsetIRegG() argument
1257 vassert(IS_VALID_PFX(pfx)); in offsetIRegG()
1259 reg = gregOfRexRM( pfx, mod_reg_rm ); in offsetIRegG()
1260 return offsetIReg( sz, reg, toBool(sz == 1 && !haveREX(pfx)) ); in offsetIRegG()
1264 IRExpr* getIRegG ( Int sz, Prefix pfx, UChar mod_reg_rm ) in getIRegG() argument
1269 IRExpr_Get( offsetIRegG( sz, pfx, mod_reg_rm ), in getIRegG()
1272 return IRExpr_Get( offsetIRegG( sz, pfx, mod_reg_rm ), in getIRegG()
1278 void putIRegG ( Int sz, Prefix pfx, UChar mod_reg_rm, IRExpr* e ) in putIRegG() argument
1284 stmt( IRStmt_Put( offsetIRegG( sz, pfx, mod_reg_rm ), e ) ); in putIRegG()
1288 const HChar* nameIRegG ( Int sz, Prefix pfx, UChar mod_reg_rm ) in nameIRegG() argument
1290 return nameIReg( sz, gregOfRexRM(pfx,mod_reg_rm), in nameIRegG()
1291 toBool(sz==1 && !haveREX(pfx)) ); in nameIRegG()
1296 IRExpr* getIRegV ( Int sz, Prefix pfx ) in getIRegV() argument
1301 IRExpr_Get( offsetIReg( sz, getVexNvvvv(pfx), False ), in getIRegV()
1304 return IRExpr_Get( offsetIReg( sz, getVexNvvvv(pfx), False ), in getIRegV()
1310 void putIRegV ( Int sz, Prefix pfx, IRExpr* e ) in putIRegV() argument
1316 stmt( IRStmt_Put( offsetIReg( sz, getVexNvvvv(pfx), False ), e ) ); in putIRegV()
1320 const HChar* nameIRegV ( Int sz, Prefix pfx ) in nameIRegV() argument
1322 return nameIReg( sz, getVexNvvvv(pfx), False ); in nameIRegV()
1332 static UInt offsetIRegE ( Int sz, Prefix pfx, UChar mod_reg_rm ) in offsetIRegE() argument
1336 vassert(IS_VALID_PFX(pfx)); in offsetIRegE()
1338 reg = eregOfRexRM( pfx, mod_reg_rm ); in offsetIRegE()
1339 return offsetIReg( sz, reg, toBool(sz == 1 && !haveREX(pfx)) ); in offsetIRegE()
1343 IRExpr* getIRegE ( Int sz, Prefix pfx, UChar mod_reg_rm ) in getIRegE() argument
1348 IRExpr_Get( offsetIRegE( sz, pfx, mod_reg_rm ), in getIRegE()
1351 return IRExpr_Get( offsetIRegE( sz, pfx, mod_reg_rm ), in getIRegE()
1357 void putIRegE ( Int sz, Prefix pfx, UChar mod_reg_rm, IRExpr* e ) in putIRegE() argument
1363 stmt( IRStmt_Put( offsetIRegE( sz, pfx, mod_reg_rm ), e ) ); in putIRegE()
1367 const HChar* nameIRegE ( Int sz, Prefix pfx, UChar mod_reg_rm ) in nameIRegE() argument
1369 return nameIReg( sz, eregOfRexRM(pfx,mod_reg_rm), in nameIRegE()
1370 toBool(sz==1 && !haveREX(pfx)) ); in nameIRegE()
2306 const HChar* segRegTxt ( Prefix pfx ) in segRegTxt() argument
2308 if (pfx & PFX_CS) return "%cs:"; in segRegTxt()
2309 if (pfx & PFX_DS) return "%ds:"; in segRegTxt()
2310 if (pfx & PFX_ES) return "%es:"; in segRegTxt()
2311 if (pfx & PFX_FS) return "%fs:"; in segRegTxt()
2312 if (pfx & PFX_GS) return "%gs:"; in segRegTxt()
2313 if (pfx & PFX_SS) return "%ss:"; in segRegTxt()
2324 Prefix pfx, IRExpr* virtual ) in handleAddrOverrides() argument
2331 if (pfx & PFX_FS) { in handleAddrOverrides()
2341 if (pfx & PFX_GS) { in handleAddrOverrides()
2354 if (haveASO(pfx)) in handleAddrOverrides()
2448 const VexAbiInfo* vbi, Prefix pfx, Long delta, in disAMode() argument
2472 DIS(buf, "%s(%s)", segRegTxt(pfx), nameIRegRexB(8,pfx,rm)); in disAMode()
2475 handleAddrOverrides(vbi, pfx, getIRegRexB(8,pfx,rm))); in disAMode()
2486 DIS(buf, "%s(%s)", segRegTxt(pfx), nameIRegRexB(8,pfx,rm)); in disAMode()
2488 DIS(buf, "%s%lld(%s)", segRegTxt(pfx), d, nameIRegRexB(8,pfx,rm)); in disAMode()
2492 handleAddrOverrides(vbi, pfx, in disAMode()
2493 binop(Iop_Add64,getIRegRexB(8,pfx,rm),mkU64(d)))); in disAMode()
2503 DIS(buf, "%s%lld(%s)", segRegTxt(pfx), d, nameIRegRexB(8,pfx,rm)); in disAMode()
2506 handleAddrOverrides(vbi, pfx, in disAMode()
2507 binop(Iop_Add64,getIRegRexB(8,pfx,rm),mkU64(d)))); in disAMode()
2521 DIS(buf, "%s%lld(%%rip)", segRegTxt(pfx), d); in disAMode()
2531 handleAddrOverrides(vbi, pfx, in disAMode()
2559 Bool index_is_SP = toBool(index_r == R_RSP && 0==getRexX(pfx)); in disAMode()
2564 DIS(buf, "%s(%s,%s)", segRegTxt(pfx), in disAMode()
2565 nameIRegRexB(8,pfx,base_r), in disAMode()
2566 nameIReg64rexX(pfx,index_r)); in disAMode()
2568 DIS(buf, "%s(%s,%s,%d)", segRegTxt(pfx), in disAMode()
2569 nameIRegRexB(8,pfx,base_r), in disAMode()
2570 nameIReg64rexX(pfx,index_r), 1<<scale); in disAMode()
2575 handleAddrOverrides(vbi, pfx, in disAMode()
2577 getIRegRexB(8,pfx,base_r), in disAMode()
2578 binop(Iop_Shl64, getIReg64rexX(pfx,index_r), in disAMode()
2584 DIS(buf, "%s%lld(,%s,%d)", segRegTxt(pfx), d, in disAMode()
2585 nameIReg64rexX(pfx,index_r), 1<<scale); in disAMode()
2589 handleAddrOverrides(vbi, pfx, in disAMode()
2591 binop(Iop_Shl64, getIReg64rexX(pfx,index_r), in disAMode()
2597 DIS(buf, "%s(%s)", segRegTxt(pfx), nameIRegRexB(8,pfx,base_r)); in disAMode()
2600 handleAddrOverrides(vbi, pfx, getIRegRexB(8,pfx,base_r))); in disAMode()
2605 DIS(buf, "%s%lld", segRegTxt(pfx), d); in disAMode()
2608 handleAddrOverrides(vbi, pfx, mkU64(d))); in disAMode()
2630 if (index_r == R_RSP && 0==getRexX(pfx)) { in disAMode()
2631 DIS(buf, "%s%lld(%s)", segRegTxt(pfx), in disAMode()
2632 d, nameIRegRexB(8,pfx,base_r)); in disAMode()
2635 handleAddrOverrides(vbi, pfx, in disAMode()
2636 binop(Iop_Add64, getIRegRexB(8,pfx,base_r), mkU64(d)) )); in disAMode()
2639 DIS(buf, "%s%lld(%s,%s)", segRegTxt(pfx), d, in disAMode()
2640 nameIRegRexB(8,pfx,base_r), in disAMode()
2641 nameIReg64rexX(pfx,index_r)); in disAMode()
2643 DIS(buf, "%s%lld(%s,%s,%d)", segRegTxt(pfx), d, in disAMode()
2644 nameIRegRexB(8,pfx,base_r), in disAMode()
2645 nameIReg64rexX(pfx,index_r), 1<<scale); in disAMode()
2650 handleAddrOverrides(vbi, pfx, in disAMode()
2653 getIRegRexB(8,pfx,base_r), in disAMode()
2655 getIReg64rexX(pfx,index_r), mkU8(scale))), in disAMode()
2677 if (index_r == R_RSP && 0==getRexX(pfx)) { in disAMode()
2678 DIS(buf, "%s%lld(%s)", segRegTxt(pfx), in disAMode()
2679 d, nameIRegRexB(8,pfx,base_r)); in disAMode()
2682 handleAddrOverrides(vbi, pfx, in disAMode()
2683 binop(Iop_Add64, getIRegRexB(8,pfx,base_r), mkU64(d)) )); in disAMode()
2686 DIS(buf, "%s%lld(%s,%s)", segRegTxt(pfx), d, in disAMode()
2687 nameIRegRexB(8,pfx,base_r), in disAMode()
2688 nameIReg64rexX(pfx,index_r)); in disAMode()
2690 DIS(buf, "%s%lld(%s,%s,%d)", segRegTxt(pfx), d, in disAMode()
2691 nameIRegRexB(8,pfx,base_r), in disAMode()
2692 nameIReg64rexX(pfx,index_r), 1<<scale); in disAMode()
2697 handleAddrOverrides(vbi, pfx, in disAMode()
2700 getIRegRexB(8,pfx,base_r), in disAMode()
2702 getIReg64rexX(pfx,index_r), mkU8(scale))), in disAMode()
2720 const VexAbiInfo* vbi, Prefix pfx, Long delta, in disAVSIBMode() argument
2744 *rI = index_r | (getRexX(pfx) << 3); in disAVSIBMode()
2757 DIS(buf, "%s%lld(,%s)", segRegTxt(pfx), d, vindex); in disAVSIBMode()
2759 DIS(buf, "%s%lld(,%s,%d)", segRegTxt(pfx), d, vindex, 1<<scale); in disAVSIBMode()
2764 DIS(buf, "%s(%s,%s)", segRegTxt(pfx), in disAVSIBMode()
2765 nameIRegRexB(8,pfx,base_r), vindex); in disAVSIBMode()
2767 DIS(buf, "%s(%s,%s,%d)", segRegTxt(pfx), in disAVSIBMode()
2768 nameIRegRexB(8,pfx,base_r), vindex, 1<<scale); in disAVSIBMode()
2781 DIS(buf, "%s%lld(%s,%s)", segRegTxt(pfx), d, in disAVSIBMode()
2782 nameIRegRexB(8,pfx,base_r), vindex); in disAVSIBMode()
2784 DIS(buf, "%s%lld(%s,%s,%d)", segRegTxt(pfx), d, in disAVSIBMode()
2785 nameIRegRexB(8,pfx,base_r), vindex, 1<<scale); in disAVSIBMode()
2791 return disAMode_copy2tmp( getIRegRexB(8,pfx,base_r) ); in disAVSIBMode()
2792 return disAMode_copy2tmp( binop(Iop_Add64, getIRegRexB(8,pfx,base_r), in disAVSIBMode()
2801 static UInt lengthAMode ( Prefix pfx, Long delta ) in lengthAMode() argument
2909 Prefix pfx, in dis_op2_E_G() argument
2939 && offsetIRegG(size,pfx,rm) == offsetIRegE(size,pfx,rm)) { in dis_op2_E_G()
2942 putIRegG(size,pfx,rm, mkU(ty,0)); in dis_op2_E_G()
2945 assign( dst0, getIRegG(size,pfx,rm) ); in dis_op2_E_G()
2946 assign( src, getIRegE(size,pfx,rm) ); in dis_op2_E_G()
2951 putIRegG(size, pfx, rm, mkexpr(dst1)); in dis_op2_E_G()
2956 putIRegG(size, pfx, rm, mkexpr(dst1)); in dis_op2_E_G()
2964 putIRegG(size, pfx, rm, mkexpr(dst1)); in dis_op2_E_G()
2968 nameIRegE(size,pfx,rm), in dis_op2_E_G()
2969 nameIRegG(size,pfx,rm)); in dis_op2_E_G()
2973 addr = disAMode ( &len, vbi, pfx, delta0, dis_buf, 0 ); in dis_op2_E_G()
2974 assign( dst0, getIRegG(size,pfx,rm) ); in dis_op2_E_G()
2980 putIRegG(size, pfx, rm, mkexpr(dst1)); in dis_op2_E_G()
2985 putIRegG(size, pfx, rm, mkexpr(dst1)); in dis_op2_E_G()
2993 putIRegG(size, pfx, rm, mkexpr(dst1)); in dis_op2_E_G()
2997 dis_buf, nameIRegG(size, pfx, rm)); in dis_op2_E_G()
3025 Prefix pfx, in dis_op2_G_E() argument
3055 && offsetIRegG(size,pfx,rm) == offsetIRegE(size,pfx,rm)) { in dis_op2_G_E()
3056 putIRegE(size,pfx,rm, mkU(ty,0)); in dis_op2_G_E()
3059 assign(dst0, getIRegE(size,pfx,rm)); in dis_op2_G_E()
3060 assign(src, getIRegG(size,pfx,rm)); in dis_op2_G_E()
3065 putIRegE(size, pfx, rm, mkexpr(dst1)); in dis_op2_G_E()
3070 putIRegE(size, pfx, rm, mkexpr(dst1)); in dis_op2_G_E()
3078 putIRegE(size, pfx, rm, mkexpr(dst1)); in dis_op2_G_E()
3082 nameIRegG(size,pfx,rm), in dis_op2_G_E()
3083 nameIRegE(size,pfx,rm)); in dis_op2_G_E()
3089 addr = disAMode ( &len, vbi, pfx, delta0, dis_buf, 0 ); in dis_op2_G_E()
3091 assign(src, getIRegG(size,pfx,rm)); in dis_op2_G_E()
3094 if (haveLOCK(pfx)) { in dis_op2_G_E()
3105 if (haveLOCK(pfx)) { in dis_op2_G_E()
3117 if (haveLOCK(pfx)) { in dis_op2_G_E()
3134 nameIRegG(size,pfx,rm), dis_buf); in dis_op2_G_E()
3158 Prefix pfx, in dis_mov_E_G() argument
3167 putIRegG(size, pfx, rm, getIRegE(size, pfx, rm)); in dis_mov_E_G()
3169 nameIRegE(size,pfx,rm), in dis_mov_E_G()
3170 nameIRegG(size,pfx,rm)); in dis_mov_E_G()
3176 IRTemp addr = disAMode ( &len, vbi, pfx, delta0, dis_buf, 0 ); in dis_mov_E_G()
3177 putIRegG(size, pfx, rm, loadLE(szToITy(size), mkexpr(addr))); in dis_mov_E_G()
3180 nameIRegG(size,pfx,rm)); in dis_mov_E_G()
3205 Prefix pfx, in dis_mov_G_E() argument
3217 if (haveF2orF3(pfx)) { *ok = False; return delta0; } in dis_mov_G_E()
3218 putIRegE(size, pfx, rm, getIRegG(size, pfx, rm)); in dis_mov_G_E()
3220 nameIRegG(size,pfx,rm), in dis_mov_G_E()
3221 nameIRegE(size,pfx,rm)); in dis_mov_G_E()
3227 if (haveF2(pfx)) { *ok = False; return delta0; } in dis_mov_G_E()
3229 IRTemp addr = disAMode ( &len, vbi, pfx, delta0, dis_buf, 0 ); in dis_mov_G_E()
3230 storeLE( mkexpr(addr), getIRegG(size, pfx, rm) ); in dis_mov_G_E()
3232 nameIRegG(size,pfx,rm), in dis_mov_G_E()
3292 Prefix pfx, in dis_movx_E_G() argument
3297 putIRegG(szd, pfx, rm, in dis_movx_E_G()
3300 getIRegE(szs,pfx,rm))); in dis_movx_E_G()
3304 nameIRegE(szs,pfx,rm), in dis_movx_E_G()
3305 nameIRegG(szd,pfx,rm)); in dis_movx_E_G()
3313 IRTemp addr = disAMode ( &len, vbi, pfx, delta, dis_buf, 0 ); in dis_movx_E_G()
3314 putIRegG(szd, pfx, rm, in dis_movx_E_G()
3322 nameIRegG(szd,pfx,rm)); in dis_movx_E_G()
3396 Prefix pfx, in dis_Grp1() argument
3423 assign(dst0, getIRegE(sz,pfx,modrm)); in dis_Grp1()
3442 putIRegE(sz, pfx, modrm, mkexpr(dst1)); in dis_Grp1()
3447 nameIRegE(sz,pfx,modrm)); in dis_Grp1()
3449 addr = disAMode ( &len, vbi, pfx, delta, dis_buf, /*xtra*/d_sz ); in dis_Grp1()
3455 if (haveLOCK(pfx)) { in dis_Grp1()
3466 if (haveLOCK(pfx)) { in dis_Grp1()
3478 if (haveLOCK(pfx)) { in dis_Grp1()
3506 Prefix pfx, in dis_Grp2() argument
3526 assign(dst0, getIRegE(sz, pfx, modrm)); in dis_Grp2()
3529 addr = disAMode ( &len, vbi, pfx, delta, dis_buf, /*xtra*/d_sz ); in dis_Grp2()
3751 putIRegE(sz, pfx, modrm, mkexpr(dst1)); in dis_Grp2()
3759 vex_printf(", %s\n", nameIRegE(sz,pfx,modrm)); in dis_Grp2()
3780 Prefix pfx, in dis_Grp8_Imm() argument
3801 if (haveF2orF3(pfx)) { in dis_Grp8_Imm()
3808 if (haveF2orF3(pfx)) { in dis_Grp8_Imm()
3809 if (haveF2andF3(pfx) || !haveLOCK(pfx)) { in dis_Grp8_Imm()
3843 assign( t2, widenUto64(getIRegE(sz, pfx, modrm)) ); in dis_Grp8_Imm()
3847 src_val, nameIRegE(sz,pfx,modrm)); in dis_Grp8_Imm()
3850 t_addr = disAMode ( &len, vbi, pfx, delta, dis_buf, 1 ); in dis_Grp8_Imm()
3879 putIRegE(sz, pfx, modrm, narrowTo(ty, mkexpr(t2m))); in dis_Grp8_Imm()
3881 if (haveLOCK(pfx)) { in dis_Grp8_Imm()
3990 Prefix pfx, Int sz, Long delta, Bool* decode_OK ) in dis_Grp3() argument
4004 if (haveF2orF3(pfx)) goto unhandled; in dis_Grp3()
4012 getIRegE(sz,pfx,modrm), in dis_Grp3()
4017 nameIRegE(sz, pfx, modrm)); in dis_Grp3()
4025 putIRegE(sz, pfx, modrm, in dis_Grp3()
4027 getIRegE(sz, pfx, modrm))); in dis_Grp3()
4029 nameIRegE(sz, pfx, modrm)); in dis_Grp3()
4037 assign(src, getIRegE(sz, pfx, modrm)); in dis_Grp3()
4041 putIRegE(sz, pfx, modrm, mkexpr(dst1)); in dis_Grp3()
4042 DIP("neg%c %s\n", nameISize(sz), nameIRegE(sz, pfx, modrm)); in dis_Grp3()
4047 assign(src, getIRegE(sz,pfx,modrm)); in dis_Grp3()
4049 nameIRegE(sz,pfx,modrm) ); in dis_Grp3()
4054 assign(src, getIRegE(sz,pfx,modrm)); in dis_Grp3()
4056 nameIRegE(sz,pfx,modrm) ); in dis_Grp3()
4060 assign( t1, getIRegE(sz, pfx, modrm) ); in dis_Grp3()
4063 nameIRegE(sz, pfx, modrm)); in dis_Grp3()
4067 assign( t1, getIRegE(sz, pfx, modrm) ); in dis_Grp3()
4070 nameIRegE(sz, pfx, modrm)); in dis_Grp3()
4078 Bool validF2orF3 = haveF2orF3(pfx) ? False : True; in dis_Grp3()
4080 && haveF2orF3(pfx) && !haveF2andF3(pfx) && haveLOCK(pfx)) { in dis_Grp3()
4085 addr = disAMode ( &len, vbi, pfx, delta, dis_buf, in dis_Grp3()
4113 if (haveLOCK(pfx)) { in dis_Grp3()
4129 if (haveLOCK(pfx)) { in dis_Grp3()
4168 Prefix pfx, Long delta, Bool* decode_OK ) in dis_Grp4() argument
4182 if (haveF2orF3(pfx)) goto unhandled; in dis_Grp4()
4183 assign(t1, getIRegE(1, pfx, modrm)); in dis_Grp4()
4187 putIRegE(1, pfx, modrm, mkexpr(t2)); in dis_Grp4()
4192 putIRegE(1, pfx, modrm, mkexpr(t2)); in dis_Grp4()
4201 nameIRegE(1, pfx, modrm)); in dis_Grp4()
4204 Bool validF2orF3 = haveF2orF3(pfx) ? False : True; in dis_Grp4()
4206 && haveF2orF3(pfx) && !haveF2andF3(pfx) && haveLOCK(pfx)) { in dis_Grp4()
4211 IRTemp addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_Grp4()
4216 if (haveLOCK(pfx)) { in dis_Grp4()
4226 if (haveLOCK(pfx)) { in dis_Grp4()
4252 Prefix pfx, Int sz, Long delta, in dis_Grp5() argument
4271 if (haveF2orF3(pfx) in dis_Grp5()
4272 && ! (haveF2(pfx) in dis_Grp5()
4275 assign(t1, getIRegE(sz,pfx,modrm)); in dis_Grp5()
4282 putIRegE(sz,pfx,modrm, mkexpr(t2)); in dis_Grp5()
4289 putIRegE(sz,pfx,modrm, mkexpr(t2)); in dis_Grp5()
4294 if (haveF2(pfx)) DIP("bnd ; "); /* MPX bnd prefix. */ in dis_Grp5()
4297 assign(t3, getIRegE(sz,pfx,modrm)); in dis_Grp5()
4310 if (haveF2(pfx)) DIP("bnd ; "); /* MPX bnd prefix. */ in dis_Grp5()
4313 assign(t3, getIRegE(sz,pfx,modrm)); in dis_Grp5()
4324 assign(t3, getIRegE(sz,pfx,modrm)); in dis_Grp5()
4341 nameIRegE(sz, pfx, modrm)); in dis_Grp5()
4344 Bool validF2orF3 = haveF2orF3(pfx) ? False : True; in dis_Grp5()
4346 && haveF2orF3(pfx) && !haveF2andF3(pfx) && haveLOCK(pfx)) { in dis_Grp5()
4349 && (haveF2(pfx) && !haveF3(pfx))) { in dis_Grp5()
4354 addr = disAMode ( &len, vbi, pfx, delta, dis_buf, 0 ); in dis_Grp5()
4364 if (haveLOCK(pfx)) { in dis_Grp5()
4376 if (haveLOCK(pfx)) { in dis_Grp5()
4387 if (haveF2(pfx)) DIP("bnd ; "); /* MPX bnd prefix. */ in dis_Grp5()
4403 if (haveF2(pfx)) DIP("bnd ; "); /* MPX bnd prefix. */ in dis_Grp5()
4463 void dis_string_op( void (*dis_OP)( Int, IRTemp, Prefix pfx ), in dis_string_op() argument
4464 Int sz, const HChar* name, Prefix pfx ) in dis_string_op()
4469 vassert(pfx == clearSegBits(pfx)); in dis_string_op()
4471 dis_OP( sz, t_inc, pfx ); in dis_string_op()
4476 void dis_MOVS ( Int sz, IRTemp t_inc, Prefix pfx ) in dis_MOVS() argument
4483 if (haveASO(pfx)) { in dis_MOVS()
4495 if (haveASO(pfx)) { in dis_MOVS()
4504 void dis_LODS ( Int sz, IRTemp t_inc, Prefix pfx ) in dis_LODS() argument
4510 if (haveASO(pfx)) in dis_LODS()
4518 if (haveASO(pfx)) in dis_LODS()
4524 void dis_STOS ( Int sz, IRTemp t_inc, Prefix pfx ) in dis_STOS() argument
4533 if (haveASO(pfx)) in dis_STOS()
4541 if (haveASO(pfx)) in dis_STOS()
4547 void dis_CMPS ( Int sz, IRTemp t_inc, Prefix pfx ) in dis_CMPS() argument
4556 if (haveASO(pfx)) { in dis_CMPS()
4572 if (haveASO(pfx)) { in dis_CMPS()
4581 void dis_SCAS ( Int sz, IRTemp t_inc, Prefix pfx ) in dis_SCAS() argument
4591 if (haveASO(pfx)) in dis_SCAS()
4601 if (haveASO(pfx)) in dis_SCAS()
4615 Prefix pfx ) in dis_REP_op() argument
4623 vassert(pfx == clearSegBits(pfx)); in dis_REP_op()
4625 if (haveASO(pfx)) { in dis_REP_op()
4638 if (haveASO(pfx)) in dis_REP_op()
4644 dis_OP (sz, t_inc, pfx); in dis_REP_op()
4668 Prefix pfx, in dis_mul_E_G() argument
4680 assign( tg, getIRegG(size, pfx, rm) ); in dis_mul_E_G()
4682 assign( te, getIRegE(size, pfx, rm) ); in dis_mul_E_G()
4684 IRTemp addr = disAMode( &alen, vbi, pfx, delta0, dis_buf, 0 ); in dis_mul_E_G()
4692 putIRegG(size, pfx, rm, mkexpr(resLo) ); in dis_mul_E_G()
4696 nameIRegE(size,pfx,rm), in dis_mul_E_G()
4697 nameIRegG(size,pfx,rm)); in dis_mul_E_G()
4702 nameIRegG(size,pfx,rm)); in dis_mul_E_G()
4711 Prefix pfx, in dis_imul_I_E_G() argument
4728 assign(te, getIRegE(size, pfx, rm)); in dis_imul_I_E_G()
4731 IRTemp addr = disAMode( &alen, vbi, pfx, delta, dis_buf, in dis_imul_I_E_G()
4746 putIRegG(size, pfx, rm, mkexpr(resLo)); in dis_imul_I_E_G()
4750 ( epartIsReg(rm) ? nameIRegE(size,pfx,rm) : dis_buf ), in dis_imul_I_E_G()
4751 nameIRegG(size,pfx,rm) ); in dis_imul_I_E_G()
5337 const VexAbiInfo* vbi, Prefix pfx, Long delta ) in dis_FPU() argument
5356 IRTemp addr = disAMode( &len, vbi, pfx, delta, dis_buf, 0 ); in dis_FPU()
5499 IRTemp addr = disAMode( &len, vbi, pfx, delta, dis_buf, 0 ); in dis_FPU()
6028 IRTemp addr = disAMode( &len, vbi, pfx, delta, dis_buf, 0 ); in dis_FPU()
6156 IRTemp addr = disAMode( &len, vbi, pfx, delta, dis_buf, 0 ); in dis_FPU()
6368 IRTemp addr = disAMode( &len, vbi, pfx, delta, dis_buf, 0 ); in dis_FPU()
6477 IRTemp addr = disAMode( &len, vbi, pfx, delta, dis_buf, 0 ); in dis_FPU()
6510 if ( have66(pfx) ) { in dis_FPU()
6581 if ( have66(pfx) ) { in dis_FPU()
6591 if ( have66(pfx) ) { in dis_FPU()
6646 if ( have66(pfx) ) { in dis_FPU()
6740 IRTemp addr = disAMode( &len, vbi, pfx, delta, dis_buf, 0 ); in dis_FPU()
6860 IRTemp addr = disAMode( &len, vbi, pfx, delta, dis_buf, 0 ); in dis_FPU()
7037 Prefix pfx, in dis_MMXop_regmem_to_reg() argument
7143 IRTemp addr = disAMode( &len, vbi, pfx, delta, dis_buf, 0 ); in dis_MMXop_regmem_to_reg()
7187 Prefix pfx, Long delta, in dis_MMX_shiftG_byE() argument
7207 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_MMX_shiftG_byE()
7319 const VexAbiInfo* vbi, Prefix pfx, Int sz, Long delta ) in dis_MMX() argument
7342 getIReg32(eregOfRexRM(pfx,modrm)) ) ); in dis_MMX()
7344 nameIReg32(eregOfRexRM(pfx,modrm)), in dis_MMX()
7347 IRTemp addr = disAMode( &len, vbi, pfx, delta, dis_buf, 0 ); in dis_MMX()
7364 getIReg64(eregOfRexRM(pfx,modrm)) ); in dis_MMX()
7366 nameIReg64(eregOfRexRM(pfx,modrm)), in dis_MMX()
7369 IRTemp addr = disAMode( &len, vbi, pfx, delta, dis_buf, 0 ); in dis_MMX()
7387 putIReg32( eregOfRexRM(pfx,modrm), in dis_MMX()
7391 nameIReg32(eregOfRexRM(pfx,modrm))); in dis_MMX()
7393 IRTemp addr = disAMode( &len, vbi, pfx, delta, dis_buf, 0 ); in dis_MMX()
7406 putIReg64( eregOfRexRM(pfx,modrm), in dis_MMX()
7410 nameIReg64(eregOfRexRM(pfx,modrm))); in dis_MMX()
7412 IRTemp addr = disAMode( &len, vbi, pfx, delta, dis_buf, 0 ); in dis_MMX()
7426 && /*ignore redundant REX.W*/!(sz==8 && haveNo66noF2noF3(pfx))) in dis_MMX()
7436 IRTemp addr = disAMode( &len, vbi, pfx, delta, dis_buf, 0 ); in dis_MMX()
7447 && /*ignore redundant REX.W*/!(sz==8 && haveNo66noF2noF3(pfx))) in dis_MMX()
7457 IRTemp addr = disAMode( &len, vbi, pfx, delta, dis_buf, 0 ); in dis_MMX()
7470 delta = dis_MMXop_regmem_to_reg ( vbi, pfx, delta, opc, "padd", True ); in dis_MMX()
7476 && /*ignore redundant REX.W*/!(sz==8 && haveNo66noF2noF3(pfx))) in dis_MMX()
7478 delta = dis_MMXop_regmem_to_reg ( vbi, pfx, delta, opc, "padds", True ); in dis_MMX()
7485 delta = dis_MMXop_regmem_to_reg ( vbi, pfx, delta, opc, "paddus", True ); in dis_MMX()
7493 delta = dis_MMXop_regmem_to_reg ( vbi, pfx, delta, opc, "psub", True ); in dis_MMX()
7500 delta = dis_MMXop_regmem_to_reg ( vbi, pfx, delta, opc, "psubs", True ); in dis_MMX()
7507 delta = dis_MMXop_regmem_to_reg ( vbi, pfx, delta, opc, "psubus", True ); in dis_MMX()
7513 delta = dis_MMXop_regmem_to_reg ( vbi, pfx, delta, opc, "pmulhw", False ); in dis_MMX()
7519 delta = dis_MMXop_regmem_to_reg ( vbi, pfx, delta, opc, "pmullw", False ); in dis_MMX()
7524 delta = dis_MMXop_regmem_to_reg ( vbi, pfx, delta, opc, "pmaddwd", False ); in dis_MMX()
7532 delta = dis_MMXop_regmem_to_reg ( vbi, pfx, delta, opc, "pcmpeq", True ); in dis_MMX()
7540 delta = dis_MMXop_regmem_to_reg ( vbi, pfx, delta, opc, "pcmpgt", True ); in dis_MMX()
7546 delta = dis_MMXop_regmem_to_reg ( vbi, pfx, delta, opc, "packssdw", False ); in dis_MMX()
7552 delta = dis_MMXop_regmem_to_reg ( vbi, pfx, delta, opc, "packsswb", False ); in dis_MMX()
7558 delta = dis_MMXop_regmem_to_reg ( vbi, pfx, delta, opc, "packuswb", False ); in dis_MMX()
7565 && /*ignore redundant REX.W*/!(sz==8 && haveNo66noF2noF3(pfx))) in dis_MMX()
7567 delta = dis_MMXop_regmem_to_reg ( vbi, pfx, delta, opc, "punpckh", True ); in dis_MMX()
7574 && /*ignore redundant REX.W*/!(sz==8 && haveNo66noF2noF3(pfx))) in dis_MMX()
7576 delta = dis_MMXop_regmem_to_reg ( vbi, pfx, delta, opc, "punpckl", True ); in dis_MMX()
7582 delta = dis_MMXop_regmem_to_reg ( vbi, pfx, delta, opc, "pand", False ); in dis_MMX()
7588 delta = dis_MMXop_regmem_to_reg ( vbi, pfx, delta, opc, "pandn", False ); in dis_MMX()
7594 delta = dis_MMXop_regmem_to_reg ( vbi, pfx, delta, opc, "por", False ); in dis_MMX()
7600 delta = dis_MMXop_regmem_to_reg ( vbi, pfx, delta, opc, "pxor", False ); in dis_MMX()
7604 delta = dis_MMX_shiftG_byE(vbi, pfx, delta, _name, _op); \ in dis_MMX()
7675 assign( addr, handleAddrOverrides( vbi, pfx, getIReg64(R_RDI) )); in dis_MMX()
7757 Prefix pfx, in dis_SHLRD_Gv_Ev() argument
7799 assign( gsrc, getIRegG(sz, pfx, modrm) ); in dis_SHLRD_Gv_Ev()
7803 assign( esrc, getIRegE(sz, pfx, modrm) ); in dis_SHLRD_Gv_Ev()
7807 nameIRegG(sz, pfx, modrm), nameIRegE(sz, pfx, modrm)); in dis_SHLRD_Gv_Ev()
7809 addr = disAMode ( &len, vbi, pfx, delta, dis_buf, in dis_SHLRD_Gv_Ev()
7817 nameIRegG(sz, pfx, modrm), dis_buf); in dis_SHLRD_Gv_Ev()
7913 putIRegE(sz, pfx, modrm, mkexpr(resTy)); in dis_SHLRD_Gv_Ev()
7942 Prefix pfx, Int sz, Long delta, BtOp op, in dis_bt_G_E() argument
7968 if (haveF2orF3(pfx)) { in dis_bt_G_E()
7975 if (haveF2orF3(pfx)) { in dis_bt_G_E()
7976 if (haveF2andF3(pfx) || !haveLOCK(pfx) || op == BtOpNone) { in dis_bt_G_E()
7983 assign( t_bitno0, widenSto64(getIRegG(sz, pfx, modrm)) ); in dis_bt_G_E()
8008 storeLE( mkexpr(t_rsp), getIRegE(sz, pfx, modrm) ); in dis_bt_G_E()
8020 t_addr0 = disAMode ( &len, vbi, pfx, delta, dis_buf, 0 ); in dis_bt_G_E()
8071 if ((haveLOCK(pfx)) && !epartIsReg(modrm)) { in dis_bt_G_E()
8103 putIRegE(sz, pfx, modrm, loadLE(szToITy(sz), mkexpr(t_rsp)) ); in dis_bt_G_E()
8108 nameBtOp(op), nameISize(sz), nameIRegG(sz, pfx, modrm), in dis_bt_G_E()
8109 ( epartIsReg(modrm) ? nameIRegE(sz, pfx, modrm) : dis_buf ) ); in dis_bt_G_E()
8119 Prefix pfx, Int sz, Long delta, Bool fwds ) in dis_bs_E_G() argument
8138 assign( src, getIRegE(sz, pfx, modrm) ); in dis_bs_E_G()
8141 IRTemp addr = disAMode( &len, vbi, pfx, delta, dis_buf, 0 ); in dis_bs_E_G()
8148 ( isReg ? nameIRegE(sz, pfx, modrm) : dis_buf ), in dis_bs_E_G()
8149 nameIRegG(sz, pfx, modrm)); in dis_bs_E_G()
8215 widenUto64( getIRegG( sz, pfx, modrm ) ) in dis_bs_E_G()
8228 putIRegG( sz, pfx, modrm, mkexpr(dst) ); in dis_bs_E_G()
8236 void codegen_xchg_rAX_Reg ( Prefix pfx, Int sz, UInt regLo3 ) in codegen_xchg_rAX_Reg() argument
8245 assign( t2, getIRegRexB(8, pfx, regLo3) ); in codegen_xchg_rAX_Reg()
8247 putIRegRexB(8, pfx, regLo3, mkexpr(t1) ); in codegen_xchg_rAX_Reg()
8250 assign( t2, getIRegRexB(4, pfx, regLo3) ); in codegen_xchg_rAX_Reg()
8252 putIRegRexB(4, pfx, regLo3, mkexpr(t1) ); in codegen_xchg_rAX_Reg()
8255 assign( t2, getIRegRexB(2, pfx, regLo3) ); in codegen_xchg_rAX_Reg()
8257 putIRegRexB(2, pfx, regLo3, mkexpr(t1) ); in codegen_xchg_rAX_Reg()
8261 nameIRegRexB(sz,pfx, regLo3)); in codegen_xchg_rAX_Reg()
8320 Prefix pfx, in dis_cmpxchg_G_E() argument
8352 if (haveF2orF3(pfx)) { in dis_cmpxchg_G_E()
8357 if (haveF2orF3(pfx)) { in dis_cmpxchg_G_E()
8358 if (haveF2andF3(pfx) || !haveLOCK(pfx)) { in dis_cmpxchg_G_E()
8367 assign( dest, getIRegE(size, pfx, rm) ); in dis_cmpxchg_G_E()
8369 assign( src, getIRegG(size, pfx, rm) ); in dis_cmpxchg_G_E()
8376 putIRegE(size, pfx, rm, mkexpr(dest2)); in dis_cmpxchg_G_E()
8378 nameIRegG(size,pfx,rm), in dis_cmpxchg_G_E()
8379 nameIRegE(size,pfx,rm) ); in dis_cmpxchg_G_E()
8381 else if (!epartIsReg(rm) && !haveLOCK(pfx)) { in dis_cmpxchg_G_E()
8383 addr = disAMode ( &len, vbi, pfx, delta0, dis_buf, 0 ); in dis_cmpxchg_G_E()
8386 assign( src, getIRegG(size, pfx, rm) ); in dis_cmpxchg_G_E()
8395 nameIRegG(size,pfx,rm), dis_buf); in dis_cmpxchg_G_E()
8397 else if (!epartIsReg(rm) && haveLOCK(pfx)) { in dis_cmpxchg_G_E()
8403 addr = disAMode ( &len, vbi, pfx, delta0, dis_buf, 0 ); in dis_cmpxchg_G_E()
8405 assign( src, getIRegG(size, pfx, rm) ); in dis_cmpxchg_G_E()
8416 nameIRegG(size,pfx,rm), dis_buf); in dis_cmpxchg_G_E()
8444 Prefix pfx, in dis_cmov_E_G() argument
8458 assign( tmps, getIRegE(sz, pfx, rm) ); in dis_cmov_E_G()
8459 assign( tmpd, getIRegG(sz, pfx, rm) ); in dis_cmov_E_G()
8461 putIRegG( sz, pfx, rm, in dis_cmov_E_G()
8467 nameIRegE(sz,pfx,rm), in dis_cmov_E_G()
8468 nameIRegG(sz,pfx,rm)); in dis_cmov_E_G()
8474 IRTemp addr = disAMode ( &len, vbi, pfx, delta0, dis_buf, 0 ); in dis_cmov_E_G()
8476 assign( tmpd, getIRegG(sz, pfx, rm) ); in dis_cmov_E_G()
8478 putIRegG( sz, pfx, rm, in dis_cmov_E_G()
8486 nameIRegG(sz,pfx,rm)); in dis_cmov_E_G()
8495 Prefix pfx, Int sz, Long delta0 ) in dis_xadd_G_E() argument
8519 assign( tmpd, getIRegE(sz, pfx, rm) ); in dis_xadd_G_E()
8520 assign( tmpt0, getIRegG(sz, pfx, rm) ); in dis_xadd_G_E()
8524 putIRegG(sz, pfx, rm, mkexpr(tmpd)); in dis_xadd_G_E()
8525 putIRegE(sz, pfx, rm, mkexpr(tmpt1)); in dis_xadd_G_E()
8527 nameISize(sz), nameIRegG(sz,pfx,rm), nameIRegE(sz,pfx,rm)); in dis_xadd_G_E()
8531 else if (!epartIsReg(rm) && !haveLOCK(pfx)) { in dis_xadd_G_E()
8533 IRTemp addr = disAMode ( &len, vbi, pfx, delta0, dis_buf, 0 ); in dis_xadd_G_E()
8535 assign( tmpt0, getIRegG(sz, pfx, rm) ); in dis_xadd_G_E()
8540 putIRegG(sz, pfx, rm, mkexpr(tmpd)); in dis_xadd_G_E()
8542 nameISize(sz), nameIRegG(sz,pfx,rm), dis_buf); in dis_xadd_G_E()
8546 else if (!epartIsReg(rm) && haveLOCK(pfx)) { in dis_xadd_G_E()
8548 IRTemp addr = disAMode ( &len, vbi, pfx, delta0, dis_buf, 0 ); in dis_xadd_G_E()
8550 assign( tmpt0, getIRegG(sz, pfx, rm) ); in dis_xadd_G_E()
8556 putIRegG(sz, pfx, rm, mkexpr(tmpd)); in dis_xadd_G_E()
8558 nameISize(sz), nameIRegG(sz,pfx,rm), dis_buf); in dis_xadd_G_E()
8702 Prefix pfx, Long delta, in dis_SSE_E_to_G_all_wrk() argument
8713 = invertG ? unop(Iop_NotV128, getXMMReg(gregOfRexRM(pfx,rm))) in dis_SSE_E_to_G_all_wrk()
8714 : getXMMReg(gregOfRexRM(pfx,rm)); in dis_SSE_E_to_G_all_wrk()
8717 gregOfRexRM(pfx,rm), in dis_SSE_E_to_G_all_wrk()
8721 getXMMReg(eregOfRexRM(pfx,rm))) in dis_SSE_E_to_G_all_wrk()
8723 getXMMReg(eregOfRexRM(pfx,rm))) in dis_SSE_E_to_G_all_wrk()
8726 nameXMMReg(eregOfRexRM(pfx,rm)), in dis_SSE_E_to_G_all_wrk()
8727 nameXMMReg(gregOfRexRM(pfx,rm)) ); in dis_SSE_E_to_G_all_wrk()
8730 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_SSE_E_to_G_all_wrk()
8732 gregOfRexRM(pfx,rm), in dis_SSE_E_to_G_all_wrk()
8742 nameXMMReg(gregOfRexRM(pfx,rm)) ); in dis_SSE_E_to_G_all_wrk()
8752 Prefix pfx, Long delta, in dis_SSE_E_to_G_all() argument
8755 return dis_SSE_E_to_G_all_wrk( vbi, pfx, delta, opname, op, False ); in dis_SSE_E_to_G_all()
8762 Prefix pfx, Long delta, in dis_SSE_E_to_G_all_invG() argument
8765 return dis_SSE_E_to_G_all_wrk( vbi, pfx, delta, opname, op, True ); in dis_SSE_E_to_G_all_invG()
8772 Prefix pfx, Long delta, in dis_SSE_E_to_G_lo32() argument
8779 IRExpr* gpart = getXMMReg(gregOfRexRM(pfx,rm)); in dis_SSE_E_to_G_lo32()
8781 putXMMReg( gregOfRexRM(pfx,rm), in dis_SSE_E_to_G_lo32()
8783 getXMMReg(eregOfRexRM(pfx,rm))) ); in dis_SSE_E_to_G_lo32()
8785 nameXMMReg(eregOfRexRM(pfx,rm)), in dis_SSE_E_to_G_lo32()
8786 nameXMMReg(gregOfRexRM(pfx,rm)) ); in dis_SSE_E_to_G_lo32()
8792 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_SSE_E_to_G_lo32()
8795 putXMMReg( gregOfRexRM(pfx,rm), in dis_SSE_E_to_G_lo32()
8799 nameXMMReg(gregOfRexRM(pfx,rm)) ); in dis_SSE_E_to_G_lo32()
8808 Prefix pfx, Long delta, in dis_SSE_E_to_G_lo64() argument
8815 IRExpr* gpart = getXMMReg(gregOfRexRM(pfx,rm)); in dis_SSE_E_to_G_lo64()
8817 putXMMReg( gregOfRexRM(pfx,rm), in dis_SSE_E_to_G_lo64()
8819 getXMMReg(eregOfRexRM(pfx,rm))) ); in dis_SSE_E_to_G_lo64()
8821 nameXMMReg(eregOfRexRM(pfx,rm)), in dis_SSE_E_to_G_lo64()
8822 nameXMMReg(gregOfRexRM(pfx,rm)) ); in dis_SSE_E_to_G_lo64()
8828 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_SSE_E_to_G_lo64()
8831 putXMMReg( gregOfRexRM(pfx,rm), in dis_SSE_E_to_G_lo64()
8835 nameXMMReg(gregOfRexRM(pfx,rm)) ); in dis_SSE_E_to_G_lo64()
8845 Prefix pfx, Long delta, in dis_SSE_E_to_G_unary_all() argument
8857 IRExpr* src = getXMMReg(eregOfRexRM(pfx,rm)); in dis_SSE_E_to_G_unary_all()
8861 putXMMReg( gregOfRexRM(pfx,rm), res ); in dis_SSE_E_to_G_unary_all()
8863 nameXMMReg(eregOfRexRM(pfx,rm)), in dis_SSE_E_to_G_unary_all()
8864 nameXMMReg(gregOfRexRM(pfx,rm)) ); in dis_SSE_E_to_G_unary_all()
8867 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_SSE_E_to_G_unary_all()
8872 putXMMReg( gregOfRexRM(pfx,rm), res ); in dis_SSE_E_to_G_unary_all()
8875 nameXMMReg(gregOfRexRM(pfx,rm)) ); in dis_SSE_E_to_G_unary_all()
8885 Prefix pfx, Long delta, in dis_SSE_E_to_G_unary_lo32() argument
8898 assign( oldG0, getXMMReg(gregOfRexRM(pfx,rm)) ); in dis_SSE_E_to_G_unary_lo32()
8904 getXMMRegLane32(eregOfRexRM(pfx,rm), 0)) ); in dis_SSE_E_to_G_unary_lo32()
8905 putXMMReg( gregOfRexRM(pfx,rm), unop(op, mkexpr(oldG1)) ); in dis_SSE_E_to_G_unary_lo32()
8907 nameXMMReg(eregOfRexRM(pfx,rm)), in dis_SSE_E_to_G_unary_lo32()
8908 nameXMMReg(gregOfRexRM(pfx,rm)) ); in dis_SSE_E_to_G_unary_lo32()
8911 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_SSE_E_to_G_unary_lo32()
8916 putXMMReg( gregOfRexRM(pfx,rm), unop(op, mkexpr(oldG1)) ); in dis_SSE_E_to_G_unary_lo32()
8919 nameXMMReg(gregOfRexRM(pfx,rm)) ); in dis_SSE_E_to_G_unary_lo32()
8929 Prefix pfx, Long delta, in dis_SSE_E_to_G_unary_lo64() argument
8942 assign( oldG0, getXMMReg(gregOfRexRM(pfx,rm)) ); in dis_SSE_E_to_G_unary_lo64()
8948 getXMMRegLane64(eregOfRexRM(pfx,rm), 0)) ); in dis_SSE_E_to_G_unary_lo64()
8949 putXMMReg( gregOfRexRM(pfx,rm), unop(op, mkexpr(oldG1)) ); in dis_SSE_E_to_G_unary_lo64()
8951 nameXMMReg(eregOfRexRM(pfx,rm)), in dis_SSE_E_to_G_unary_lo64()
8952 nameXMMReg(gregOfRexRM(pfx,rm)) ); in dis_SSE_E_to_G_unary_lo64()
8955 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_SSE_E_to_G_unary_lo64()
8960 putXMMReg( gregOfRexRM(pfx,rm), unop(op, mkexpr(oldG1)) ); in dis_SSE_E_to_G_unary_lo64()
8963 nameXMMReg(gregOfRexRM(pfx,rm)) ); in dis_SSE_E_to_G_unary_lo64()
8975 Prefix pfx, Long delta, in dis_SSEint_E_to_G() argument
8984 IRExpr* gpart = getXMMReg(gregOfRexRM(pfx,rm)); in dis_SSEint_E_to_G()
8987 epart = getXMMReg(eregOfRexRM(pfx,rm)); in dis_SSEint_E_to_G()
8989 nameXMMReg(eregOfRexRM(pfx,rm)), in dis_SSEint_E_to_G()
8990 nameXMMReg(gregOfRexRM(pfx,rm)) ); in dis_SSEint_E_to_G()
8993 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_SSEint_E_to_G()
8997 nameXMMReg(gregOfRexRM(pfx,rm)) ); in dis_SSEint_E_to_G()
9000 putXMMReg( gregOfRexRM(pfx,rm), in dis_SSEint_E_to_G()
9130 Prefix pfx, Long delta, in dis_SSE_cmp_E_to_G() argument
9151 assign( plain, binop(op, getXMMReg(gregOfRexRM(pfx,rm)), in dis_SSE_cmp_E_to_G()
9152 getXMMReg(eregOfRexRM(pfx,rm))) ); in dis_SSE_cmp_E_to_G()
9156 nameXMMReg(eregOfRexRM(pfx,rm)), in dis_SSE_cmp_E_to_G()
9157 nameXMMReg(gregOfRexRM(pfx,rm)) ); in dis_SSE_cmp_E_to_G()
9159 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_SSE_cmp_E_to_G()
9168 getXMMReg(gregOfRexRM(pfx,rm)), in dis_SSE_cmp_E_to_G()
9181 nameXMMReg(gregOfRexRM(pfx,rm)) ); in dis_SSE_cmp_E_to_G()
9185 putXMMReg( gregOfRexRM(pfx,rm), in dis_SSE_cmp_E_to_G()
9191 putXMMReg( gregOfRexRM(pfx,rm), in dis_SSE_cmp_E_to_G()
9195 putXMMReg( gregOfRexRM(pfx,rm), mkexpr(plain) ); in dis_SSE_cmp_E_to_G()
9206 Prefix pfx, Long delta, in dis_SSE_shiftG_byE() argument
9219 assign( amt, getXMMRegLane64(eregOfRexRM(pfx,rm), 0) ); in dis_SSE_shiftG_byE()
9221 nameXMMReg(eregOfRexRM(pfx,rm)), in dis_SSE_shiftG_byE()
9222 nameXMMReg(gregOfRexRM(pfx,rm)) ); in dis_SSE_shiftG_byE()
9225 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_SSE_shiftG_byE()
9229 nameXMMReg(gregOfRexRM(pfx,rm)) ); in dis_SSE_shiftG_byE()
9232 assign( g0, getXMMReg(gregOfRexRM(pfx,rm)) ); in dis_SSE_shiftG_byE()
9272 putXMMReg( gregOfRexRM(pfx,rm), mkexpr(g1) ); in dis_SSE_shiftG_byE()
9280 ULong dis_SSE_shiftE_imm ( Prefix pfx, in dis_SSE_shiftE_imm() argument
9295 nameXMMReg(eregOfRexRM(pfx,rm)) ); in dis_SSE_shiftE_imm()
9296 assign( e0, getXMMReg(eregOfRexRM(pfx,rm)) ); in dis_SSE_shiftE_imm()
9327 putXMMReg( eregOfRexRM(pfx,rm), mkexpr(e1) ); in dis_SSE_shiftE_imm()
9957 static Long dis_COMISD ( const VexAbiInfo* vbi, Prefix pfx, in dis_COMISD() argument
9968 assign( argR, getXMMRegLane64F( eregOfRexRM(pfx,modrm), in dis_COMISD()
9973 nameXMMReg(eregOfRexRM(pfx,modrm)), in dis_COMISD()
9974 nameXMMReg(gregOfRexRM(pfx,modrm)) ); in dis_COMISD()
9976 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_COMISD()
9982 nameXMMReg(gregOfRexRM(pfx,modrm)) ); in dis_COMISD()
9984 assign( argL, getXMMRegLane64F( gregOfRexRM(pfx,modrm), in dis_COMISD()
10000 static Long dis_COMISS ( const VexAbiInfo* vbi, Prefix pfx, in dis_COMISS() argument
10011 assign( argR, getXMMRegLane32F( eregOfRexRM(pfx,modrm), in dis_COMISS()
10016 nameXMMReg(eregOfRexRM(pfx,modrm)), in dis_COMISS()
10017 nameXMMReg(gregOfRexRM(pfx,modrm)) ); in dis_COMISS()
10019 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_COMISS()
10025 nameXMMReg(gregOfRexRM(pfx,modrm)) ); in dis_COMISS()
10027 assign( argL, getXMMRegLane32F( gregOfRexRM(pfx,modrm), in dis_COMISS()
10045 static Long dis_PSHUFD_32x4 ( const VexAbiInfo* vbi, Prefix pfx, in dis_PSHUFD_32x4() argument
10056 assign( sV, getXMMReg(eregOfRexRM(pfx,modrm)) ); in dis_PSHUFD_32x4()
10060 nameXMMReg(eregOfRexRM(pfx,modrm)), in dis_PSHUFD_32x4()
10061 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_PSHUFD_32x4()
10063 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, in dis_PSHUFD_32x4()
10070 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_PSHUFD_32x4()
10086 (gregOfRexRM(pfx,modrm), mkexpr(dV)); in dis_PSHUFD_32x4()
10091 static Long dis_PSHUFD_32x8 ( const VexAbiInfo* vbi, Prefix pfx, Long delta ) in dis_PSHUFD_32x8() argument
10099 UInt rG = gregOfRexRM(pfx,modrm); in dis_PSHUFD_32x8()
10101 UInt rE = eregOfRexRM(pfx,modrm); in dis_PSHUFD_32x8()
10107 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, in dis_PSHUFD_32x8()
10226 static Long dis_CVTxSD2SI ( const VexAbiInfo* vbi, Prefix pfx, in dis_CVTxSD2SI() argument
10240 assign(f64lo, getXMMRegLane64F(eregOfRexRM(pfx,modrm), 0)); in dis_CVTxSD2SI()
10242 nameXMMReg(eregOfRexRM(pfx,modrm)), in dis_CVTxSD2SI()
10243 nameIReg(sz, gregOfRexRM(pfx,modrm), in dis_CVTxSD2SI()
10246 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_CVTxSD2SI()
10251 nameIReg(sz, gregOfRexRM(pfx,modrm), in dis_CVTxSD2SI()
10262 putIReg32( gregOfRexRM(pfx,modrm), in dis_CVTxSD2SI()
10266 putIReg64( gregOfRexRM(pfx,modrm), in dis_CVTxSD2SI()
10274 static Long dis_CVTxSS2SI ( const VexAbiInfo* vbi, Prefix pfx, in dis_CVTxSS2SI() argument
10288 assign(f32lo, getXMMRegLane32F(eregOfRexRM(pfx,modrm), 0)); in dis_CVTxSS2SI()
10290 nameXMMReg(eregOfRexRM(pfx,modrm)), in dis_CVTxSS2SI()
10291 nameIReg(sz, gregOfRexRM(pfx,modrm), in dis_CVTxSS2SI()
10294 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_CVTxSS2SI()
10299 nameIReg(sz, gregOfRexRM(pfx,modrm), in dis_CVTxSS2SI()
10310 putIReg32( gregOfRexRM(pfx,modrm), in dis_CVTxSS2SI()
10316 putIReg64( gregOfRexRM(pfx,modrm), in dis_CVTxSS2SI()
10326 static Long dis_CVTPS2PD_128 ( const VexAbiInfo* vbi, Prefix pfx, in dis_CVTPS2PD_128() argument
10335 UInt rG = gregOfRexRM(pfx,modrm); in dis_CVTPS2PD_128()
10337 UInt rE = eregOfRexRM(pfx,modrm); in dis_CVTPS2PD_128()
10344 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_CVTPS2PD_128()
10361 static Long dis_CVTPS2PD_256 ( const VexAbiInfo* vbi, Prefix pfx, in dis_CVTPS2PD_256() argument
10372 UInt rG = gregOfRexRM(pfx,modrm); in dis_CVTPS2PD_256()
10374 UInt rE = eregOfRexRM(pfx,modrm); in dis_CVTPS2PD_256()
10382 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_CVTPS2PD_256()
10402 static Long dis_CVTPD2PS_128 ( const VexAbiInfo* vbi, Prefix pfx, in dis_CVTPD2PS_128() argument
10409 UInt rG = gregOfRexRM(pfx,modrm); in dis_CVTPD2PS_128()
10413 UInt rE = eregOfRexRM(pfx,modrm); in dis_CVTPD2PS_128()
10419 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_CVTPD2PS_128()
10447 static Long dis_CVTxPS2DQ_128 ( const VexAbiInfo* vbi, Prefix pfx, in dis_CVTxPS2DQ_128() argument
10456 UInt rG = gregOfRexRM(pfx,modrm); in dis_CVTxPS2DQ_128()
10460 UInt rE = eregOfRexRM(pfx,modrm); in dis_CVTxPS2DQ_128()
10466 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_CVTxPS2DQ_128()
10497 static Long dis_CVTxPS2DQ_256 ( const VexAbiInfo* vbi, Prefix pfx, in dis_CVTxPS2DQ_256() argument
10506 UInt rG = gregOfRexRM(pfx,modrm); in dis_CVTxPS2DQ_256()
10510 UInt rE = eregOfRexRM(pfx,modrm); in dis_CVTxPS2DQ_256()
10516 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_CVTxPS2DQ_256()
10549 static Long dis_CVTxPD2DQ_128 ( const VexAbiInfo* vbi, Prefix pfx, in dis_CVTxPD2DQ_128() argument
10558 UInt rG = gregOfRexRM(pfx,modrm); in dis_CVTxPD2DQ_128()
10562 UInt rE = eregOfRexRM(pfx,modrm); in dis_CVTxPD2DQ_128()
10568 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_CVTxPD2DQ_128()
10604 static Long dis_CVTxPD2DQ_256 ( const VexAbiInfo* vbi, Prefix pfx, in dis_CVTxPD2DQ_256() argument
10613 UInt rG = gregOfRexRM(pfx,modrm); in dis_CVTxPD2DQ_256()
10617 UInt rE = eregOfRexRM(pfx,modrm); in dis_CVTxPD2DQ_256()
10623 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_CVTxPD2DQ_256()
10658 static Long dis_CVTDQ2PS_128 ( const VexAbiInfo* vbi, Prefix pfx, in dis_CVTDQ2PS_128() argument
10667 UInt rG = gregOfRexRM(pfx,modrm); in dis_CVTDQ2PS_128()
10671 UInt rE = eregOfRexRM(pfx,modrm); in dis_CVTDQ2PS_128()
10677 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_CVTDQ2PS_128()
10706 static Long dis_CVTDQ2PS_256 ( const VexAbiInfo* vbi, Prefix pfx, in dis_CVTDQ2PS_256() argument
10715 UInt rG = gregOfRexRM(pfx,modrm); in dis_CVTDQ2PS_256()
10719 UInt rE = eregOfRexRM(pfx,modrm); in dis_CVTDQ2PS_256()
10724 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_CVTDQ2PS_256()
10759 static Long dis_PMOVMSKB_128 ( const VexAbiInfo* vbi, Prefix pfx, in dis_PMOVMSKB_128() argument
10764 UInt rE = eregOfRexRM(pfx,modrm); in dis_PMOVMSKB_128()
10765 UInt rG = gregOfRexRM(pfx,modrm); in dis_PMOVMSKB_128()
10778 static Long dis_PMOVMSKB_256 ( const VexAbiInfo* vbi, Prefix pfx, in dis_PMOVMSKB_256() argument
10783 UInt rE = eregOfRexRM(pfx,modrm); in dis_PMOVMSKB_256()
10784 UInt rG = gregOfRexRM(pfx,modrm); in dis_PMOVMSKB_256()
11233 static Long dis_PSHUFxW_128 ( const VexAbiInfo* vbi, Prefix pfx, in dis_PSHUFxW_128() argument
11240 UInt rG = gregOfRexRM(pfx,modrm); in dis_PSHUFxW_128()
11250 UInt rE = eregOfRexRM(pfx,modrm); in dis_PSHUFxW_128()
11258 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_PSHUFxW_128()
11288 static Long dis_PSHUFxW_256 ( const VexAbiInfo* vbi, Prefix pfx, in dis_PSHUFxW_256() argument
11295 UInt rG = gregOfRexRM(pfx,modrm); in dis_PSHUFxW_256()
11304 UInt rE = eregOfRexRM(pfx,modrm); in dis_PSHUFxW_256()
11311 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_PSHUFxW_256()
11335 static Long dis_PEXTRW_128_EregOnly_toG ( const VexAbiInfo* vbi, Prefix pfx, in dis_PEXTRW_128_EregOnly_toG() argument
11340 UInt rG = gregOfRexRM(pfx,modrm); in dis_PEXTRW_128_EregOnly_toG()
11346 UInt rE = eregOfRexRM(pfx,modrm); in dis_PEXTRW_128_EregOnly_toG()
11374 static Long dis_CVTDQ2PD_128 ( const VexAbiInfo* vbi, Prefix pfx, in dis_CVTDQ2PD_128() argument
11382 UInt rG = gregOfRexRM(pfx,modrm); in dis_CVTDQ2PD_128()
11385 UInt rE = eregOfRexRM(pfx,modrm); in dis_CVTDQ2PD_128()
11390 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_CVTDQ2PD_128()
11409 static Long dis_STMXCSR ( const VexAbiInfo* vbi, Prefix pfx, in dis_STMXCSR() argument
11417 vassert(gregOfRexRM(pfx,modrm) == 3); /* ditto */ in dis_STMXCSR()
11419 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_STMXCSR()
11441 static Long dis_LDMXCSR ( const VexAbiInfo* vbi, Prefix pfx, in dis_LDMXCSR() argument
11449 vassert(gregOfRexRM(pfx,modrm) == 2); /* ditto */ in dis_LDMXCSR()
11454 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_LDMXCSR()
11559 static Long dis_MASKMOVDQU ( const VexAbiInfo* vbi, Prefix pfx, in dis_MASKMOVDQU() argument
11568 UInt rG = gregOfRexRM(pfx,modrm); in dis_MASKMOVDQU()
11569 UInt rE = eregOfRexRM(pfx,modrm); in dis_MASKMOVDQU()
11571 assign( addr, handleAddrOverrides( vbi, pfx, getIReg64(R_RDI) )); in dis_MASKMOVDQU()
11580 getXMMRegLane64( eregOfRexRM(pfx,modrm), 1 ), in dis_MASKMOVDQU()
11583 getXMMRegLane64( eregOfRexRM(pfx,modrm), 0 ), in dis_MASKMOVDQU()
11602 static Long dis_MOVMSKPS_128 ( const VexAbiInfo* vbi, Prefix pfx, in dis_MOVMSKPS_128() argument
11606 UInt rG = gregOfRexRM(pfx,modrm); in dis_MOVMSKPS_128()
11607 UInt rE = eregOfRexRM(pfx,modrm); in dis_MOVMSKPS_128()
11634 static Long dis_MOVMSKPS_256 ( const VexAbiInfo* vbi, Prefix pfx, Long delta ) in dis_MOVMSKPS_256() argument
11637 UInt rG = gregOfRexRM(pfx,modrm); in dis_MOVMSKPS_256()
11638 UInt rE = eregOfRexRM(pfx,modrm); in dis_MOVMSKPS_256()
11684 static Long dis_MOVMSKPD_128 ( const VexAbiInfo* vbi, Prefix pfx, in dis_MOVMSKPD_128() argument
11688 UInt rG = gregOfRexRM(pfx,modrm); in dis_MOVMSKPD_128()
11689 UInt rE = eregOfRexRM(pfx,modrm); in dis_MOVMSKPD_128()
11706 static Long dis_MOVMSKPD_256 ( const VexAbiInfo* vbi, Prefix pfx, Long delta ) in dis_MOVMSKPD_256() argument
11709 UInt rG = gregOfRexRM(pfx,modrm); in dis_MOVMSKPD_256()
11710 UInt rE = eregOfRexRM(pfx,modrm); in dis_MOVMSKPD_256()
11741 Prefix pfx, Int sz, Long deltaIN, in dis_ESC_0F__SSE2() argument
11764 if (have66noF2noF3(pfx) in dis_ESC_0F__SSE2()
11769 putXMMReg( gregOfRexRM(pfx,modrm), in dis_ESC_0F__SSE2()
11770 getXMMReg( eregOfRexRM(pfx,modrm) )); in dis_ESC_0F__SSE2()
11771 DIP("movupd %s,%s\n", nameXMMReg(eregOfRexRM(pfx,modrm)), in dis_ESC_0F__SSE2()
11772 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
11775 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
11776 putXMMReg( gregOfRexRM(pfx,modrm), in dis_ESC_0F__SSE2()
11779 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
11787 if (haveF2no66noF3(pfx) in dis_ESC_0F__SSE2()
11791 putXMMRegLane64( gregOfRexRM(pfx,modrm), 0, in dis_ESC_0F__SSE2()
11792 getXMMRegLane64( eregOfRexRM(pfx,modrm), 0 )); in dis_ESC_0F__SSE2()
11793 DIP("movsd %s,%s\n", nameXMMReg(eregOfRexRM(pfx,modrm)), in dis_ESC_0F__SSE2()
11794 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
11797 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
11798 putXMMReg( gregOfRexRM(pfx,modrm), mkV128(0) ); in dis_ESC_0F__SSE2()
11799 putXMMRegLane64( gregOfRexRM(pfx,modrm), 0, in dis_ESC_0F__SSE2()
11802 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
11809 if (haveF3no66noF2(pfx) in dis_ESC_0F__SSE2()
11813 putXMMRegLane32( gregOfRexRM(pfx,modrm), 0, in dis_ESC_0F__SSE2()
11814 getXMMRegLane32( eregOfRexRM(pfx,modrm), 0 )); in dis_ESC_0F__SSE2()
11815 DIP("movss %s,%s\n", nameXMMReg(eregOfRexRM(pfx,modrm)), in dis_ESC_0F__SSE2()
11816 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
11819 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
11820 putXMMReg( gregOfRexRM(pfx,modrm), mkV128(0) ); in dis_ESC_0F__SSE2()
11821 putXMMRegLane32( gregOfRexRM(pfx,modrm), 0, in dis_ESC_0F__SSE2()
11824 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
11830 if (haveNo66noF2noF3(pfx) in dis_ESC_0F__SSE2()
11834 putXMMReg( gregOfRexRM(pfx,modrm), in dis_ESC_0F__SSE2()
11835 getXMMReg( eregOfRexRM(pfx,modrm) )); in dis_ESC_0F__SSE2()
11836 DIP("movups %s,%s\n", nameXMMReg(eregOfRexRM(pfx,modrm)), in dis_ESC_0F__SSE2()
11837 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
11840 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
11841 putXMMReg( gregOfRexRM(pfx,modrm), in dis_ESC_0F__SSE2()
11844 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
11854 if (haveF2no66noF3(pfx) in dis_ESC_0F__SSE2()
11858 putXMMRegLane64( eregOfRexRM(pfx,modrm), 0, in dis_ESC_0F__SSE2()
11859 getXMMRegLane64( gregOfRexRM(pfx,modrm), 0 )); in dis_ESC_0F__SSE2()
11860 DIP("movsd %s,%s\n", nameXMMReg(gregOfRexRM(pfx,modrm)), in dis_ESC_0F__SSE2()
11861 nameXMMReg(eregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
11864 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
11866 getXMMRegLane64(gregOfRexRM(pfx,modrm), 0) ); in dis_ESC_0F__SSE2()
11867 DIP("movsd %s,%s\n", nameXMMReg(gregOfRexRM(pfx,modrm)), in dis_ESC_0F__SSE2()
11875 if (haveF3no66noF2(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
11880 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
11882 getXMMRegLane32(gregOfRexRM(pfx,modrm), 0) ); in dis_ESC_0F__SSE2()
11883 DIP("movss %s,%s\n", nameXMMReg(gregOfRexRM(pfx,modrm)), in dis_ESC_0F__SSE2()
11890 if (have66noF2noF3(pfx) in dis_ESC_0F__SSE2()
11894 putXMMReg( eregOfRexRM(pfx,modrm), in dis_ESC_0F__SSE2()
11895 getXMMReg( gregOfRexRM(pfx,modrm) ) ); in dis_ESC_0F__SSE2()
11896 DIP("movupd %s,%s\n", nameXMMReg(gregOfRexRM(pfx,modrm)), in dis_ESC_0F__SSE2()
11897 nameXMMReg(eregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
11900 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
11901 storeLE( mkexpr(addr), getXMMReg(gregOfRexRM(pfx,modrm)) ); in dis_ESC_0F__SSE2()
11902 DIP("movupd %s,%s\n", nameXMMReg(gregOfRexRM(pfx,modrm)), in dis_ESC_0F__SSE2()
11909 if (haveNo66noF2noF3(pfx) in dis_ESC_0F__SSE2()
11915 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
11916 storeLE( mkexpr(addr), getXMMReg(gregOfRexRM(pfx,modrm)) ); in dis_ESC_0F__SSE2()
11917 DIP("movups %s,%s\n", nameXMMReg(gregOfRexRM(pfx,modrm)), in dis_ESC_0F__SSE2()
11928 if (have66noF2noF3(pfx) in dis_ESC_0F__SSE2()
11934 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
11936 putXMMRegLane64( gregOfRexRM(pfx,modrm), in dis_ESC_0F__SSE2()
11940 dis_buf, nameXMMReg( gregOfRexRM(pfx,modrm) )); in dis_ESC_0F__SSE2()
11946 if (haveNo66noF2noF3(pfx) in dis_ESC_0F__SSE2()
11951 putXMMRegLane64( gregOfRexRM(pfx,modrm), in dis_ESC_0F__SSE2()
11953 getXMMRegLane64( eregOfRexRM(pfx,modrm), 1 )); in dis_ESC_0F__SSE2()
11954 DIP("movhlps %s, %s\n", nameXMMReg(eregOfRexRM(pfx,modrm)), in dis_ESC_0F__SSE2()
11955 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
11957 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
11959 putXMMRegLane64( gregOfRexRM(pfx,modrm), 0/*lower lane*/, in dis_ESC_0F__SSE2()
11962 dis_buf, nameXMMReg( gregOfRexRM(pfx,modrm) )); in dis_ESC_0F__SSE2()
11970 if (haveNo66noF2noF3(pfx) in dis_ESC_0F__SSE2()
11974 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
11977 getXMMRegLane64( gregOfRexRM(pfx,modrm), in dis_ESC_0F__SSE2()
11979 DIP("movlps %s, %s\n", nameXMMReg( gregOfRexRM(pfx,modrm) ), in dis_ESC_0F__SSE2()
11987 if (have66noF2noF3(pfx) in dis_ESC_0F__SSE2()
11991 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
11994 getXMMRegLane64( gregOfRexRM(pfx,modrm), in dis_ESC_0F__SSE2()
11996 DIP("movlpd %s, %s\n", nameXMMReg( gregOfRexRM(pfx,modrm) ), in dis_ESC_0F__SSE2()
12009 if (haveNo66noF2noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
12014 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F__SSE2()
12017 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F__SSE2()
12023 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
12036 if (have66noF2noF3(pfx) in dis_ESC_0F__SSE2()
12042 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F__SSE2()
12045 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F__SSE2()
12051 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
12067 if (have66noF2noF3(pfx) in dis_ESC_0F__SSE2()
12073 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
12075 putXMMRegLane64( gregOfRexRM(pfx,modrm), 1/*upper lane*/, in dis_ESC_0F__SSE2()
12078 nameXMMReg( gregOfRexRM(pfx,modrm) )); in dis_ESC_0F__SSE2()
12084 if (haveNo66noF2noF3(pfx) in dis_ESC_0F__SSE2()
12089 putXMMRegLane64( gregOfRexRM(pfx,modrm), 1/*upper lane*/, in dis_ESC_0F__SSE2()
12090 getXMMRegLane64( eregOfRexRM(pfx,modrm), 0 ) ); in dis_ESC_0F__SSE2()
12091 DIP("movhps %s,%s\n", nameXMMReg(eregOfRexRM(pfx,modrm)), in dis_ESC_0F__SSE2()
12092 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
12094 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
12096 putXMMRegLane64( gregOfRexRM(pfx,modrm), 1/*upper lane*/, in dis_ESC_0F__SSE2()
12099 nameXMMReg( gregOfRexRM(pfx,modrm) )); in dis_ESC_0F__SSE2()
12107 if (haveNo66noF2noF3(pfx) in dis_ESC_0F__SSE2()
12111 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
12114 getXMMRegLane64( gregOfRexRM(pfx,modrm), in dis_ESC_0F__SSE2()
12116 DIP("movhps %s,%s\n", nameXMMReg( gregOfRexRM(pfx,modrm) ), in dis_ESC_0F__SSE2()
12124 if (have66noF2noF3(pfx) in dis_ESC_0F__SSE2()
12128 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
12131 getXMMRegLane64( gregOfRexRM(pfx,modrm), in dis_ESC_0F__SSE2()
12133 DIP("movhpd %s,%s\n", nameXMMReg( gregOfRexRM(pfx,modrm) ), in dis_ESC_0F__SSE2()
12146 if (haveNo66noF2noF3(pfx) in dis_ESC_0F__SSE2()
12155 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
12173 if (have66noF2noF3(pfx) in dis_ESC_0F__SSE2()
12177 putXMMReg( gregOfRexRM(pfx,modrm), in dis_ESC_0F__SSE2()
12178 getXMMReg( eregOfRexRM(pfx,modrm) )); in dis_ESC_0F__SSE2()
12179 DIP("movapd %s,%s\n", nameXMMReg(eregOfRexRM(pfx,modrm)), in dis_ESC_0F__SSE2()
12180 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
12183 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
12185 putXMMReg( gregOfRexRM(pfx,modrm), in dis_ESC_0F__SSE2()
12188 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
12194 if (haveNo66noF2noF3(pfx) in dis_ESC_0F__SSE2()
12198 putXMMReg( gregOfRexRM(pfx,modrm), in dis_ESC_0F__SSE2()
12199 getXMMReg( eregOfRexRM(pfx,modrm) )); in dis_ESC_0F__SSE2()
12200 DIP("movaps %s,%s\n", nameXMMReg(eregOfRexRM(pfx,modrm)), in dis_ESC_0F__SSE2()
12201 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
12204 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
12206 putXMMReg( gregOfRexRM(pfx,modrm), in dis_ESC_0F__SSE2()
12209 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
12218 if (haveNo66noF2noF3(pfx) in dis_ESC_0F__SSE2()
12222 putXMMReg( eregOfRexRM(pfx,modrm), in dis_ESC_0F__SSE2()
12223 getXMMReg( gregOfRexRM(pfx,modrm) )); in dis_ESC_0F__SSE2()
12224 DIP("movaps %s,%s\n", nameXMMReg(gregOfRexRM(pfx,modrm)), in dis_ESC_0F__SSE2()
12225 nameXMMReg(eregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
12228 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
12230 storeLE( mkexpr(addr), getXMMReg(gregOfRexRM(pfx,modrm)) ); in dis_ESC_0F__SSE2()
12231 DIP("movaps %s,%s\n", nameXMMReg(gregOfRexRM(pfx,modrm)), in dis_ESC_0F__SSE2()
12238 if (have66noF2noF3(pfx) in dis_ESC_0F__SSE2()
12242 putXMMReg( eregOfRexRM(pfx,modrm), in dis_ESC_0F__SSE2()
12243 getXMMReg( gregOfRexRM(pfx,modrm) ) ); in dis_ESC_0F__SSE2()
12244 DIP("movapd %s,%s\n", nameXMMReg(gregOfRexRM(pfx,modrm)), in dis_ESC_0F__SSE2()
12245 nameXMMReg(eregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
12248 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
12250 storeLE( mkexpr(addr), getXMMReg(gregOfRexRM(pfx,modrm)) ); in dis_ESC_0F__SSE2()
12251 DIP("movapd %s,%s\n", nameXMMReg(gregOfRexRM(pfx,modrm)), in dis_ESC_0F__SSE2()
12262 if (haveNo66noF2noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
12272 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
12274 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
12278 nameXMMReg(gregOfRexRM(pfx,modrm)) ); in dis_ESC_0F__SSE2()
12284 gregOfRexRM(pfx,modrm), 0, in dis_ESC_0F__SSE2()
12291 gregOfRexRM(pfx,modrm), 1, in dis_ESC_0F__SSE2()
12302 if (haveF3no66noF2(pfx) && (sz == 4 || sz == 8)) { in dis_ESC_0F__SSE2()
12309 assign( arg32, getIReg32(eregOfRexRM(pfx,modrm)) ); in dis_ESC_0F__SSE2()
12311 DIP("cvtsi2ss %s,%s\n", nameIReg32(eregOfRexRM(pfx,modrm)), in dis_ESC_0F__SSE2()
12312 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
12314 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
12318 nameXMMReg(gregOfRexRM(pfx,modrm)) ); in dis_ESC_0F__SSE2()
12321 gregOfRexRM(pfx,modrm), 0, in dis_ESC_0F__SSE2()
12329 assign( arg64, getIReg64(eregOfRexRM(pfx,modrm)) ); in dis_ESC_0F__SSE2()
12331 DIP("cvtsi2ssq %s,%s\n", nameIReg64(eregOfRexRM(pfx,modrm)), in dis_ESC_0F__SSE2()
12332 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
12334 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
12338 nameXMMReg(gregOfRexRM(pfx,modrm)) ); in dis_ESC_0F__SSE2()
12341 gregOfRexRM(pfx,modrm), 0, in dis_ESC_0F__SSE2()
12352 if (haveF2no66noF3(pfx) && (sz == 4 || sz == 8)) { in dis_ESC_0F__SSE2()
12357 assign( arg32, getIReg32(eregOfRexRM(pfx,modrm)) ); in dis_ESC_0F__SSE2()
12359 DIP("cvtsi2sdl %s,%s\n", nameIReg32(eregOfRexRM(pfx,modrm)), in dis_ESC_0F__SSE2()
12360 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
12362 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
12366 nameXMMReg(gregOfRexRM(pfx,modrm)) ); in dis_ESC_0F__SSE2()
12368 putXMMRegLane64F( gregOfRexRM(pfx,modrm), 0, in dis_ESC_0F__SSE2()
12375 assign( arg64, getIReg64(eregOfRexRM(pfx,modrm)) ); in dis_ESC_0F__SSE2()
12377 DIP("cvtsi2sdq %s,%s\n", nameIReg64(eregOfRexRM(pfx,modrm)), in dis_ESC_0F__SSE2()
12378 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
12380 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
12384 nameXMMReg(gregOfRexRM(pfx,modrm)) ); in dis_ESC_0F__SSE2()
12387 gregOfRexRM(pfx,modrm), in dis_ESC_0F__SSE2()
12399 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
12414 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
12416 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
12420 nameXMMReg(gregOfRexRM(pfx,modrm)) ); in dis_ESC_0F__SSE2()
12424 gregOfRexRM(pfx,modrm), 0, in dis_ESC_0F__SSE2()
12429 gregOfRexRM(pfx,modrm), 1, in dis_ESC_0F__SSE2()
12440 if ( (haveNo66noF2noF3(pfx) && sz == 4) in dis_ESC_0F__SSE2()
12441 || (have66noF2noF3(pfx) && sz == 2) ) { in dis_ESC_0F__SSE2()
12444 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
12446 storeLE( mkexpr(addr), getXMMReg(gregOfRexRM(pfx,modrm)) ); in dis_ESC_0F__SSE2()
12449 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
12463 if (haveNo66noF2noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
12475 assign(f32lo, getXMMRegLane32F(eregOfRexRM(pfx,modrm), 0)); in dis_ESC_0F__SSE2()
12476 assign(f32hi, getXMMRegLane32F(eregOfRexRM(pfx,modrm), 1)); in dis_ESC_0F__SSE2()
12478 nameXMMReg(eregOfRexRM(pfx,modrm)), in dis_ESC_0F__SSE2()
12481 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
12525 if (haveF3no66noF2(pfx) && (sz == 4 || sz == 8)) { in dis_ESC_0F__SSE2()
12526 delta = dis_CVTxSS2SI( vbi, pfx, delta, False/*!isAvx*/, opc, sz); in dis_ESC_0F__SSE2()
12541 if (haveF2no66noF3(pfx) && (sz == 4 || sz == 8)) { in dis_ESC_0F__SSE2()
12542 delta = dis_CVTxSD2SI( vbi, pfx, delta, False/*!isAvx*/, opc, sz); in dis_ESC_0F__SSE2()
12549 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
12561 assign(f64lo, getXMMRegLane64F(eregOfRexRM(pfx,modrm), 0)); in dis_ESC_0F__SSE2()
12562 assign(f64hi, getXMMRegLane64F(eregOfRexRM(pfx,modrm), 1)); in dis_ESC_0F__SSE2()
12564 nameXMMReg(eregOfRexRM(pfx,modrm)), in dis_ESC_0F__SSE2()
12567 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
12601 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
12602 delta = dis_COMISD( vbi, pfx, delta, False/*!isAvx*/, opc ); in dis_ESC_0F__SSE2()
12607 if (haveNo66noF2noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
12608 delta = dis_COMISS( vbi, pfx, delta, False/*!isAvx*/, opc ); in dis_ESC_0F__SSE2()
12616 if (haveNo66noF2noF3(pfx) && (sz == 4 || sz == 8) in dis_ESC_0F__SSE2()
12636 delta = dis_MOVMSKPS_128( vbi, pfx, delta, False/*!isAvx*/ ); in dis_ESC_0F__SSE2()
12641 if (have66noF2noF3(pfx) && (sz == 2 || sz == 8)) { in dis_ESC_0F__SSE2()
12647 delta = dis_MOVMSKPD_128( vbi, pfx, delta, False/*!isAvx*/ ); in dis_ESC_0F__SSE2()
12654 if (haveF3no66noF2(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
12655 delta = dis_SSE_E_to_G_unary_lo32( vbi, pfx, delta, in dis_ESC_0F__SSE2()
12660 if (haveNo66noF2noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
12661 delta = dis_SSE_E_to_G_unary_all( vbi, pfx, delta, in dis_ESC_0F__SSE2()
12666 if (haveF2no66noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
12667 delta = dis_SSE_E_to_G_unary_lo64( vbi, pfx, delta, in dis_ESC_0F__SSE2()
12672 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
12673 delta = dis_SSE_E_to_G_unary_all( vbi, pfx, delta, in dis_ESC_0F__SSE2()
12681 if (haveF3no66noF2(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
12682 delta = dis_SSE_E_to_G_unary_lo32( vbi, pfx, delta, in dis_ESC_0F__SSE2()
12687 if (haveNo66noF2noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
12688 delta = dis_SSE_E_to_G_unary_all( vbi, pfx, delta, in dis_ESC_0F__SSE2()
12696 if (haveF3no66noF2(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
12697 delta = dis_SSE_E_to_G_unary_lo32( vbi, pfx, delta, in dis_ESC_0F__SSE2()
12702 if (haveNo66noF2noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
12703 delta = dis_SSE_E_to_G_unary_all( vbi, pfx, delta, in dis_ESC_0F__SSE2()
12711 if (haveNo66noF2noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
12712 delta = dis_SSE_E_to_G_all( vbi, pfx, delta, "andps", Iop_AndV128 ); in dis_ESC_0F__SSE2()
12716 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
12717 delta = dis_SSE_E_to_G_all( vbi, pfx, delta, "andpd", Iop_AndV128 ); in dis_ESC_0F__SSE2()
12724 if (haveNo66noF2noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
12725 delta = dis_SSE_E_to_G_all_invG( vbi, pfx, delta, "andnps", in dis_ESC_0F__SSE2()
12730 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
12731 delta = dis_SSE_E_to_G_all_invG( vbi, pfx, delta, "andnpd", in dis_ESC_0F__SSE2()
12739 if (haveNo66noF2noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
12740 delta = dis_SSE_E_to_G_all( vbi, pfx, delta, "orps", Iop_OrV128 ); in dis_ESC_0F__SSE2()
12744 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
12745 delta = dis_SSE_E_to_G_all( vbi, pfx, delta, "orpd", Iop_OrV128 ); in dis_ESC_0F__SSE2()
12752 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
12753 delta = dis_SSE_E_to_G_all( vbi, pfx, delta, "xorpd", Iop_XorV128 ); in dis_ESC_0F__SSE2()
12757 if (haveNo66noF2noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
12758 delta = dis_SSE_E_to_G_all( vbi, pfx, delta, "xorps", Iop_XorV128 ); in dis_ESC_0F__SSE2()
12765 if (haveNo66noF2noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
12766 delta = dis_SSE_E_to_G_all( vbi, pfx, delta, "addps", Iop_Add32Fx4 ); in dis_ESC_0F__SSE2()
12770 if (haveF3no66noF2(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
12771 delta = dis_SSE_E_to_G_lo32( vbi, pfx, delta, "addss", Iop_Add32F0x4 ); in dis_ESC_0F__SSE2()
12775 if (haveF2no66noF3(pfx) in dis_ESC_0F__SSE2()
12777 delta = dis_SSE_E_to_G_lo64( vbi, pfx, delta, "addsd", Iop_Add64F0x2 ); in dis_ESC_0F__SSE2()
12781 if (have66noF2noF3(pfx) in dis_ESC_0F__SSE2()
12783 delta = dis_SSE_E_to_G_all( vbi, pfx, delta, "addpd", Iop_Add64Fx2 ); in dis_ESC_0F__SSE2()
12790 if (haveF2no66noF3(pfx) in dis_ESC_0F__SSE2()
12792 delta = dis_SSE_E_to_G_lo64( vbi, pfx, delta, "mulsd", Iop_Mul64F0x2 ); in dis_ESC_0F__SSE2()
12796 if (haveF3no66noF2(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
12797 delta = dis_SSE_E_to_G_lo32( vbi, pfx, delta, "mulss", Iop_Mul32F0x4 ); in dis_ESC_0F__SSE2()
12801 if (haveNo66noF2noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
12802 delta = dis_SSE_E_to_G_all( vbi, pfx, delta, "mulps", Iop_Mul32Fx4 ); in dis_ESC_0F__SSE2()
12806 if (have66noF2noF3(pfx) in dis_ESC_0F__SSE2()
12808 delta = dis_SSE_E_to_G_all( vbi, pfx, delta, "mulpd", Iop_Mul64Fx2 ); in dis_ESC_0F__SSE2()
12816 if (haveNo66noF2noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
12817 delta = dis_CVTPS2PD_128( vbi, pfx, delta, False/*!isAvx*/ ); in dis_ESC_0F__SSE2()
12822 if (haveF3no66noF2(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
12828 assign(f32lo, getXMMRegLane32F(eregOfRexRM(pfx,modrm), 0)); in dis_ESC_0F__SSE2()
12829 DIP("cvtss2sd %s,%s\n", nameXMMReg(eregOfRexRM(pfx,modrm)), in dis_ESC_0F__SSE2()
12830 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
12832 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
12836 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
12839 putXMMRegLane64F( gregOfRexRM(pfx,modrm), 0, in dis_ESC_0F__SSE2()
12846 if (haveF2no66noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
12853 assign(f64lo, getXMMRegLane64F(eregOfRexRM(pfx,modrm), 0)); in dis_ESC_0F__SSE2()
12854 DIP("cvtsd2ss %s,%s\n", nameXMMReg(eregOfRexRM(pfx,modrm)), in dis_ESC_0F__SSE2()
12855 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
12857 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
12861 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
12866 gregOfRexRM(pfx,modrm), 0, in dis_ESC_0F__SSE2()
12877 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
12878 delta = dis_CVTPD2PS_128( vbi, pfx, delta, False/*!isAvx*/ ); in dis_ESC_0F__SSE2()
12888 if ( (have66noF2noF3(pfx) && sz == 2) in dis_ESC_0F__SSE2()
12889 || (haveF3no66noF2(pfx) && sz == 4) ) { in dis_ESC_0F__SSE2()
12891 delta = dis_CVTxPS2DQ_128( vbi, pfx, delta, False/*!isAvx*/, r2zero ); in dis_ESC_0F__SSE2()
12896 if (haveNo66noF2noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
12897 delta = dis_CVTDQ2PS_128( vbi, pfx, delta, False/*!isAvx*/ ); in dis_ESC_0F__SSE2()
12904 if (haveF3no66noF2(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
12905 delta = dis_SSE_E_to_G_lo32( vbi, pfx, delta, "subss", Iop_Sub32F0x4 ); in dis_ESC_0F__SSE2()
12909 if (haveF2no66noF3(pfx) in dis_ESC_0F__SSE2()
12911 delta = dis_SSE_E_to_G_lo64( vbi, pfx, delta, "subsd", Iop_Sub64F0x2 ); in dis_ESC_0F__SSE2()
12915 if (haveNo66noF2noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
12916 delta = dis_SSE_E_to_G_all( vbi, pfx, delta, "subps", Iop_Sub32Fx4 ); in dis_ESC_0F__SSE2()
12920 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
12921 delta = dis_SSE_E_to_G_all( vbi, pfx, delta, "subpd", Iop_Sub64Fx2 ); in dis_ESC_0F__SSE2()
12928 if (haveNo66noF2noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
12929 delta = dis_SSE_E_to_G_all( vbi, pfx, delta, "minps", Iop_Min32Fx4 ); in dis_ESC_0F__SSE2()
12933 if (haveF3no66noF2(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
12934 delta = dis_SSE_E_to_G_lo32( vbi, pfx, delta, "minss", Iop_Min32F0x4 ); in dis_ESC_0F__SSE2()
12938 if (haveF2no66noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
12939 delta = dis_SSE_E_to_G_lo64( vbi, pfx, delta, "minsd", Iop_Min64F0x2 ); in dis_ESC_0F__SSE2()
12943 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
12944 delta = dis_SSE_E_to_G_all( vbi, pfx, delta, "minpd", Iop_Min64Fx2 ); in dis_ESC_0F__SSE2()
12951 if (haveF2no66noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
12952 delta = dis_SSE_E_to_G_lo64( vbi, pfx, delta, "divsd", Iop_Div64F0x2 ); in dis_ESC_0F__SSE2()
12956 if (haveNo66noF2noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
12957 delta = dis_SSE_E_to_G_all( vbi, pfx, delta, "divps", Iop_Div32Fx4 ); in dis_ESC_0F__SSE2()
12961 if (haveF3no66noF2(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
12962 delta = dis_SSE_E_to_G_lo32( vbi, pfx, delta, "divss", Iop_Div32F0x4 ); in dis_ESC_0F__SSE2()
12966 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
12967 delta = dis_SSE_E_to_G_all( vbi, pfx, delta, "divpd", Iop_Div64Fx2 ); in dis_ESC_0F__SSE2()
12974 if (haveNo66noF2noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
12975 delta = dis_SSE_E_to_G_all( vbi, pfx, delta, "maxps", Iop_Max32Fx4 ); in dis_ESC_0F__SSE2()
12979 if (haveF3no66noF2(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
12980 delta = dis_SSE_E_to_G_lo32( vbi, pfx, delta, "maxss", Iop_Max32F0x4 ); in dis_ESC_0F__SSE2()
12984 if (haveF2no66noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
12985 delta = dis_SSE_E_to_G_lo64( vbi, pfx, delta, "maxsd", Iop_Max64F0x2 ); in dis_ESC_0F__SSE2()
12989 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
12990 delta = dis_SSE_E_to_G_all( vbi, pfx, delta, "maxpd", Iop_Max64Fx2 ); in dis_ESC_0F__SSE2()
12997 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
12998 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F__SSE2()
13007 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
13008 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F__SSE2()
13017 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
13018 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F__SSE2()
13027 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
13028 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F__SSE2()
13037 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
13038 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F__SSE2()
13046 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
13047 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F__SSE2()
13055 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
13056 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F__SSE2()
13064 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
13065 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F__SSE2()
13074 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
13075 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F__SSE2()
13084 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
13085 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F__SSE2()
13094 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
13095 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F__SSE2()
13104 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
13105 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F__SSE2()
13114 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
13115 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F__SSE2()
13124 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
13125 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F__SSE2()
13137 if (have66noF2noF3(pfx)) { in dis_ESC_0F__SSE2()
13145 gregOfRexRM(pfx,modrm), in dis_ESC_0F__SSE2()
13146 unop( Iop_32UtoV128, getIReg32(eregOfRexRM(pfx,modrm)) ) in dis_ESC_0F__SSE2()
13148 DIP("movd %s, %s\n", nameIReg32(eregOfRexRM(pfx,modrm)), in dis_ESC_0F__SSE2()
13149 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
13152 gregOfRexRM(pfx,modrm), in dis_ESC_0F__SSE2()
13153 unop( Iop_64UtoV128, getIReg64(eregOfRexRM(pfx,modrm)) ) in dis_ESC_0F__SSE2()
13155 DIP("movq %s, %s\n", nameIReg64(eregOfRexRM(pfx,modrm)), in dis_ESC_0F__SSE2()
13156 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
13159 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
13162 gregOfRexRM(pfx,modrm), in dis_ESC_0F__SSE2()
13168 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
13175 if (have66noF2noF3(pfx) in dis_ESC_0F__SSE2()
13180 putXMMReg( gregOfRexRM(pfx,modrm), in dis_ESC_0F__SSE2()
13181 getXMMReg( eregOfRexRM(pfx,modrm) )); in dis_ESC_0F__SSE2()
13182 DIP("movdqa %s,%s\n", nameXMMReg(eregOfRexRM(pfx,modrm)), in dis_ESC_0F__SSE2()
13183 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
13186 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
13188 putXMMReg( gregOfRexRM(pfx,modrm), in dis_ESC_0F__SSE2()
13191 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
13196 if (haveF3no66noF2(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
13200 putXMMReg( gregOfRexRM(pfx,modrm), in dis_ESC_0F__SSE2()
13201 getXMMReg( eregOfRexRM(pfx,modrm) )); in dis_ESC_0F__SSE2()
13202 DIP("movdqu %s,%s\n", nameXMMReg(eregOfRexRM(pfx,modrm)), in dis_ESC_0F__SSE2()
13203 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
13206 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
13207 putXMMReg( gregOfRexRM(pfx,modrm), in dis_ESC_0F__SSE2()
13210 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
13219 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
13220 delta = dis_PSHUFD_32x4( vbi, pfx, delta, False/*!writesYmm*/); in dis_ESC_0F__SSE2()
13225 if (haveNo66noF2noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
13241 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, in dis_ESC_0F__SSE2()
13263 if (haveF2no66noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
13264 delta = dis_PSHUFxW_128( vbi, pfx, delta, in dis_ESC_0F__SSE2()
13270 if (haveF3no66noF2(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
13271 delta = dis_PSHUFxW_128( vbi, pfx, delta, in dis_ESC_0F__SSE2()
13279 if (have66noF2noF3(pfx) && sz == 2 in dis_ESC_0F__SSE2()
13282 delta = dis_SSE_shiftE_imm( pfx, delta, "psrlw", Iop_ShrN16x8 ); in dis_ESC_0F__SSE2()
13286 if (have66noF2noF3(pfx) && sz == 2 in dis_ESC_0F__SSE2()
13289 delta = dis_SSE_shiftE_imm( pfx, delta, "psraw", Iop_SarN16x8 ); in dis_ESC_0F__SSE2()
13293 if (have66noF2noF3(pfx) && sz == 2 in dis_ESC_0F__SSE2()
13296 delta = dis_SSE_shiftE_imm( pfx, delta, "psllw", Iop_ShlN16x8 ); in dis_ESC_0F__SSE2()
13303 if (have66noF2noF3(pfx) && sz == 2 in dis_ESC_0F__SSE2()
13306 delta = dis_SSE_shiftE_imm( pfx, delta, "psrld", Iop_ShrN32x4 ); in dis_ESC_0F__SSE2()
13310 if (have66noF2noF3(pfx) && sz == 2 in dis_ESC_0F__SSE2()
13313 delta = dis_SSE_shiftE_imm( pfx, delta, "psrad", Iop_SarN32x4 ); in dis_ESC_0F__SSE2()
13317 if (have66noF2noF3(pfx) && sz == 2 in dis_ESC_0F__SSE2()
13320 delta = dis_SSE_shiftE_imm( pfx, delta, "pslld", Iop_ShlN32x4 ); in dis_ESC_0F__SSE2()
13328 if (have66noF2noF3(pfx) && sz == 2 in dis_ESC_0F__SSE2()
13332 Int reg = eregOfRexRM(pfx,getUChar(delta)); in dis_ESC_0F__SSE2()
13342 if (have66noF2noF3(pfx) && sz == 2 in dis_ESC_0F__SSE2()
13346 Int reg = eregOfRexRM(pfx,getUChar(delta)); in dis_ESC_0F__SSE2()
13356 if (have66noF2noF3(pfx) && sz == 2 in dis_ESC_0F__SSE2()
13359 delta = dis_SSE_shiftE_imm( pfx, delta, "psrlq", Iop_ShrN64x2 ); in dis_ESC_0F__SSE2()
13363 if (have66noF2noF3(pfx) && sz == 2 in dis_ESC_0F__SSE2()
13366 delta = dis_SSE_shiftE_imm( pfx, delta, "psllq", Iop_ShlN64x2 ); in dis_ESC_0F__SSE2()
13373 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
13374 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F__SSE2()
13382 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
13383 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F__SSE2()
13391 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
13392 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F__SSE2()
13401 if (haveF3no66noF2(pfx) in dis_ESC_0F__SSE2()
13405 putXMMRegLane64( gregOfRexRM(pfx,modrm), 0, in dis_ESC_0F__SSE2()
13406 getXMMRegLane64( eregOfRexRM(pfx,modrm), 0 )); in dis_ESC_0F__SSE2()
13408 putXMMRegLane64( gregOfRexRM(pfx,modrm), 1, mkU64(0) ); in dis_ESC_0F__SSE2()
13409 DIP("movsd %s,%s\n", nameXMMReg(eregOfRexRM(pfx,modrm)), in dis_ESC_0F__SSE2()
13410 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
13413 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
13414 putXMMReg( gregOfRexRM(pfx,modrm), mkV128(0) ); in dis_ESC_0F__SSE2()
13415 putXMMRegLane64( gregOfRexRM(pfx,modrm), 0, in dis_ESC_0F__SSE2()
13418 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
13425 if (have66noF2noF3(pfx) && (sz == 2 || sz == 8)) { in dis_ESC_0F__SSE2()
13431 putIReg32( eregOfRexRM(pfx,modrm), in dis_ESC_0F__SSE2()
13432 getXMMRegLane32(gregOfRexRM(pfx,modrm), 0) ); in dis_ESC_0F__SSE2()
13433 DIP("movd %s, %s\n", nameXMMReg(gregOfRexRM(pfx,modrm)), in dis_ESC_0F__SSE2()
13434 nameIReg32(eregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
13436 putIReg64( eregOfRexRM(pfx,modrm), in dis_ESC_0F__SSE2()
13437 getXMMRegLane64(gregOfRexRM(pfx,modrm), 0) ); in dis_ESC_0F__SSE2()
13438 DIP("movq %s, %s\n", nameXMMReg(gregOfRexRM(pfx,modrm)), in dis_ESC_0F__SSE2()
13439 nameIReg64(eregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
13442 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
13446 ? getXMMRegLane32(gregOfRexRM(pfx,modrm),0) in dis_ESC_0F__SSE2()
13447 : getXMMRegLane64(gregOfRexRM(pfx,modrm),0) ); in dis_ESC_0F__SSE2()
13449 nameXMMReg(gregOfRexRM(pfx,modrm)), dis_buf); in dis_ESC_0F__SSE2()
13457 if (haveF3no66noF2(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
13462 putXMMReg( eregOfRexRM(pfx,modrm), in dis_ESC_0F__SSE2()
13463 getXMMReg(gregOfRexRM(pfx,modrm)) ); in dis_ESC_0F__SSE2()
13464 DIP("movdqu %s, %s\n", nameXMMReg(gregOfRexRM(pfx,modrm)), in dis_ESC_0F__SSE2()
13465 nameXMMReg(eregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
13467 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
13469 storeLE( mkexpr(addr), getXMMReg(gregOfRexRM(pfx,modrm)) ); in dis_ESC_0F__SSE2()
13470 DIP("movdqu %s, %s\n", nameXMMReg(gregOfRexRM(pfx,modrm)), dis_buf); in dis_ESC_0F__SSE2()
13475 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
13479 putXMMReg( eregOfRexRM(pfx,modrm), in dis_ESC_0F__SSE2()
13480 getXMMReg(gregOfRexRM(pfx,modrm)) ); in dis_ESC_0F__SSE2()
13481 DIP("movdqa %s, %s\n", nameXMMReg(gregOfRexRM(pfx,modrm)), in dis_ESC_0F__SSE2()
13482 nameXMMReg(eregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
13484 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
13487 storeLE( mkexpr(addr), getXMMReg(gregOfRexRM(pfx,modrm)) ); in dis_ESC_0F__SSE2()
13488 DIP("movdqa %s, %s\n", nameXMMReg(gregOfRexRM(pfx,modrm)), dis_buf); in dis_ESC_0F__SSE2()
13496 if (haveNo66noF2noF3(pfx) in dis_ESC_0F__SSE2()
13509 if (haveNo66noF2noF3(pfx) in dis_ESC_0F__SSE2()
13523 if (haveNo66noF2noF3(pfx) in dis_ESC_0F__SSE2()
13534 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
13553 if (haveNo66noF2noF3(pfx) in dis_ESC_0F__SSE2()
13556 delta = dis_STMXCSR(vbi, pfx, delta, False/*!isAvx*/); in dis_ESC_0F__SSE2()
13560 if (haveNo66noF2noF3(pfx) in dis_ESC_0F__SSE2()
13563 delta = dis_LDMXCSR(vbi, pfx, delta, False/*!isAvx*/); in dis_ESC_0F__SSE2()
13573 if (haveNo66noF2noF3(pfx) && (sz == 4 || sz == 8) in dis_ESC_0F__SSE2()
13575 && gregOfRexRM(pfx,getUChar(delta)) == 0) { in dis_ESC_0F__SSE2()
13580 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
13650 if (haveNo66noF2noF3(pfx) && (sz == 4 || sz == 8) in dis_ESC_0F__SSE2()
13652 && gregOfRexRM(pfx,getUChar(delta)) == 1) { in dis_ESC_0F__SSE2()
13657 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
13732 if (haveNo66noF2noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
13734 delta = dis_SSE_cmp_E_to_G( vbi, pfx, delta, "cmpps", True, 4 ); in dis_ESC_0F__SSE2()
13738 if (haveF3no66noF2(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
13740 delta = dis_SSE_cmp_E_to_G( vbi, pfx, delta, "cmpss", False, 4 ); in dis_ESC_0F__SSE2()
13744 if (haveF2no66noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
13746 delta = dis_SSE_cmp_E_to_G( vbi, pfx, delta, "cmpsd", False, 8 ); in dis_ESC_0F__SSE2()
13750 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
13752 delta = dis_SSE_cmp_E_to_G( vbi, pfx, delta, "cmppd", True, 8 ); in dis_ESC_0F__SSE2()
13759 if (haveNo66noF2noF3(pfx) && (sz == 4 || sz == 8)) { in dis_ESC_0F__SSE2()
13762 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
13763 storeLE( mkexpr(addr), getIRegG(sz, pfx, modrm) ); in dis_ESC_0F__SSE2()
13765 nameIRegG(sz, pfx, modrm)); in dis_ESC_0F__SSE2()
13777 if (haveNo66noF2noF3(pfx) in dis_ESC_0F__SSE2()
13793 assign(t4, getIReg16(eregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
13797 nameIReg16(eregOfRexRM(pfx,modrm)), in dis_ESC_0F__SSE2()
13800 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F__SSE2()
13821 if (have66noF2noF3(pfx) in dis_ESC_0F__SSE2()
13826 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F__SSE2()
13828 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F__SSE2()
13835 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, in dis_ESC_0F__SSE2()
13855 if (haveNo66noF2noF3(pfx) && (sz == 4 || sz == 8)) { in dis_ESC_0F__SSE2()
13871 putIReg64(gregOfRexRM(pfx,modrm), unop(Iop_16Uto64, mkexpr(t5))); in dis_ESC_0F__SSE2()
13873 putIReg32(gregOfRexRM(pfx,modrm), unop(Iop_16Uto32, mkexpr(t5))); in dis_ESC_0F__SSE2()
13877 sz==8 ? nameIReg64(gregOfRexRM(pfx,modrm)) in dis_ESC_0F__SSE2()
13878 : nameIReg32(gregOfRexRM(pfx,modrm)) in dis_ESC_0F__SSE2()
13890 if (have66noF2noF3(pfx) in dis_ESC_0F__SSE2()
13893 delta = dis_PEXTRW_128_EregOnly_toG( vbi, pfx, delta, in dis_ESC_0F__SSE2()
13902 if (haveNo66noF2noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
13907 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F__SSE2()
13910 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F__SSE2()
13916 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F__SSE2()
13923 putXMMReg( gregOfRexRM(pfx,modrm), mkexpr(res) ); in dis_ESC_0F__SSE2()
13927 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
13933 assign( dV, getXMMReg(gregOfRexRM(pfx,modrm)) ); in dis_ESC_0F__SSE2()
13936 assign( sV, getXMMReg(eregOfRexRM(pfx,modrm)) ); in dis_ESC_0F__SSE2()
13940 nameXMMReg(eregOfRexRM(pfx,modrm)), in dis_ESC_0F__SSE2()
13941 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
13943 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F__SSE2()
13949 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
13953 putXMMReg( gregOfRexRM(pfx,modrm), mkexpr(res) ); in dis_ESC_0F__SSE2()
13960 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
13961 delta = dis_SSE_shiftG_byE( vbi, pfx, delta, "psrlw", Iop_ShrN16x8 ); in dis_ESC_0F__SSE2()
13968 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
13969 delta = dis_SSE_shiftG_byE( vbi, pfx, delta, "psrld", Iop_ShrN32x4 ); in dis_ESC_0F__SSE2()
13976 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
13977 delta = dis_SSE_shiftG_byE( vbi, pfx, delta, "psrlq", Iop_ShrN64x2 ); in dis_ESC_0F__SSE2()
13984 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
13985 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F__SSE2()
13991 if (haveNo66noF2noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
13994 vbi, pfx, delta, opc, "paddq", False ); in dis_ESC_0F__SSE2()
14001 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
14002 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F__SSE2()
14011 if (haveF3no66noF2(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
14015 putXMMReg( gregOfRexRM(pfx,modrm), in dis_ESC_0F__SSE2()
14018 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
14026 if (have66noF2noF3(pfx) in dis_ESC_0F__SSE2()
14033 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
14035 getXMMRegLane64( gregOfRexRM(pfx,modrm), 0 )); in dis_ESC_0F__SSE2()
14036 DIP("movq %s,%s\n", nameXMMReg(gregOfRexRM(pfx,modrm)), dis_buf ); in dis_ESC_0F__SSE2()
14042 if (haveF2no66noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
14047 getXMMRegLane64( eregOfRexRM(pfx,modrm), 0 )); in dis_ESC_0F__SSE2()
14048 DIP("movdq2q %s,%s\n", nameXMMReg(eregOfRexRM(pfx,modrm)), in dis_ESC_0F__SSE2()
14062 if (have66noF2noF3(pfx) in dis_ESC_0F__SSE2()
14065 delta = dis_PMOVMSKB_128( vbi, pfx, delta, False/*!isAvx*/ ); in dis_ESC_0F__SSE2()
14072 if (haveNo66noF2noF3(pfx) in dis_ESC_0F__SSE2()
14081 putIReg32(gregOfRexRM(pfx,modrm), mkexpr(t1)); in dis_ESC_0F__SSE2()
14083 nameIReg32(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
14093 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
14094 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F__SSE2()
14102 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
14103 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F__SSE2()
14112 if (haveNo66noF2noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
14115 vbi, pfx, delta, opc, "pminub", False ); in dis_ESC_0F__SSE2()
14119 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
14120 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F__SSE2()
14128 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
14129 delta = dis_SSE_E_to_G_all( vbi, pfx, delta, "pand", Iop_AndV128 ); in dis_ESC_0F__SSE2()
14136 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
14137 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F__SSE2()
14145 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
14146 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F__SSE2()
14155 if (haveNo66noF2noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
14158 vbi, pfx, delta, opc, "pmaxub", False ); in dis_ESC_0F__SSE2()
14162 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
14163 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F__SSE2()
14171 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
14172 delta = dis_SSE_E_to_G_all_invG( vbi, pfx, delta, "pandn", Iop_AndV128 ); in dis_ESC_0F__SSE2()
14180 if (haveNo66noF2noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
14183 vbi, pfx, delta, opc, "pavgb", False ); in dis_ESC_0F__SSE2()
14187 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
14188 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F__SSE2()
14196 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
14197 delta = dis_SSE_shiftG_byE( vbi, pfx, delta, "psraw", Iop_SarN16x8 ); in dis_ESC_0F__SSE2()
14204 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
14205 delta = dis_SSE_shiftG_byE( vbi, pfx, delta, "psrad", Iop_SarN32x4 ); in dis_ESC_0F__SSE2()
14213 if (haveNo66noF2noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
14216 vbi, pfx, delta, opc, "pavgw", False ); in dis_ESC_0F__SSE2()
14220 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
14221 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F__SSE2()
14230 if (haveNo66noF2noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
14233 vbi, pfx, delta, opc, "pmuluh", False ); in dis_ESC_0F__SSE2()
14237 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
14238 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F__SSE2()
14246 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
14247 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F__SSE2()
14259 if ( (haveF2no66noF3(pfx) && sz == 4) in dis_ESC_0F__SSE2()
14260 || (have66noF2noF3(pfx) && sz == 2) ) { in dis_ESC_0F__SSE2()
14261 delta = dis_CVTxPD2DQ_128( vbi, pfx, delta, False/*!isAvx*/, in dis_ESC_0F__SSE2()
14267 if (haveF3no66noF2(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
14268 delta = dis_CVTDQ2PD_128(vbi, pfx, delta, False/*!isAvx*/); in dis_ESC_0F__SSE2()
14280 if (haveNo66noF2noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
14284 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
14294 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
14297 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
14299 storeLE( mkexpr(addr), getXMMReg(gregOfRexRM(pfx,modrm)) ); in dis_ESC_0F__SSE2()
14301 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE2()
14311 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
14312 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F__SSE2()
14320 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
14321 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F__SSE2()
14330 if (haveNo66noF2noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
14333 vbi, pfx, delta, opc, "pminsw", False ); in dis_ESC_0F__SSE2()
14337 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
14338 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F__SSE2()
14346 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
14347 delta = dis_SSE_E_to_G_all( vbi, pfx, delta, "por", Iop_OrV128 ); in dis_ESC_0F__SSE2()
14354 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
14355 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F__SSE2()
14363 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
14364 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F__SSE2()
14373 if (haveNo66noF2noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
14376 vbi, pfx, delta, opc, "pmaxsw", False ); in dis_ESC_0F__SSE2()
14380 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
14381 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F__SSE2()
14389 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
14390 delta = dis_SSE_E_to_G_all( vbi, pfx, delta, "pxor", Iop_XorV128 ); in dis_ESC_0F__SSE2()
14397 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
14398 delta = dis_SSE_shiftG_byE( vbi, pfx, delta, "psllw", Iop_ShlN16x8 ); in dis_ESC_0F__SSE2()
14405 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
14406 delta = dis_SSE_shiftG_byE( vbi, pfx, delta, "pslld", Iop_ShlN32x4 ); in dis_ESC_0F__SSE2()
14413 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
14414 delta = dis_SSE_shiftG_byE( vbi, pfx, delta, "psllq", Iop_ShlN64x2 ); in dis_ESC_0F__SSE2()
14423 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
14427 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F__SSE2()
14430 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F__SSE2()
14435 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
14446 if (haveNo66noF2noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
14462 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
14480 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
14484 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F__SSE2()
14486 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F__SSE2()
14491 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
14505 if (haveNo66noF2noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
14508 vbi, pfx, delta, opc, "psadbw", False ); in dis_ESC_0F__SSE2()
14513 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
14517 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F__SSE2()
14519 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F__SSE2()
14524 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE2()
14539 if (haveNo66noF2noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
14541 delta = dis_MMX( &ok, vbi, pfx, sz, delta-1 ); in dis_ESC_0F__SSE2()
14545 if (have66noF2noF3(pfx) && sz == 2 && epartIsReg(getUChar(delta))) { in dis_ESC_0F__SSE2()
14546 delta = dis_MASKMOVDQU( vbi, pfx, delta, False/*!isAvx*/ ); in dis_ESC_0F__SSE2()
14553 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
14554 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F__SSE2()
14562 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
14563 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F__SSE2()
14571 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
14572 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F__SSE2()
14580 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
14581 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F__SSE2()
14587 if (haveNo66noF2noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE2()
14590 vbi, pfx, delta, opc, "psubq", False ); in dis_ESC_0F__SSE2()
14597 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
14598 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F__SSE2()
14606 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
14607 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F__SSE2()
14615 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE2()
14616 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F__SSE2()
14643 static Long dis_MOVDDUP_128 ( const VexAbiInfo* vbi, Prefix pfx, in dis_MOVDDUP_128() argument
14652 UInt rG = gregOfRexRM(pfx,modrm); in dis_MOVDDUP_128()
14654 UInt rE = eregOfRexRM(pfx,modrm); in dis_MOVDDUP_128()
14661 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_MOVDDUP_128()
14673 static Long dis_MOVDDUP_256 ( const VexAbiInfo* vbi, Prefix pfx, in dis_MOVDDUP_256() argument
14682 UInt rG = gregOfRexRM(pfx,modrm); in dis_MOVDDUP_256()
14684 UInt rE = eregOfRexRM(pfx,modrm); in dis_MOVDDUP_256()
14690 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_MOVDDUP_256()
14705 static Long dis_MOVSxDUP_128 ( const VexAbiInfo* vbi, Prefix pfx, in dis_MOVSxDUP_128() argument
14713 UInt rG = gregOfRexRM(pfx,modrm); in dis_MOVSxDUP_128()
14717 UInt rE = eregOfRexRM(pfx,modrm); in dis_MOVSxDUP_128()
14723 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_MOVSxDUP_128()
14739 static Long dis_MOVSxDUP_256 ( const VexAbiInfo* vbi, Prefix pfx, in dis_MOVSxDUP_256() argument
14747 UInt rG = gregOfRexRM(pfx,modrm); in dis_MOVSxDUP_256()
14751 UInt rE = eregOfRexRM(pfx,modrm); in dis_MOVSxDUP_256()
14757 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_MOVSxDUP_256()
14820 Prefix pfx, Int sz, Long deltaIN ) in dis_ESC_0F__SSE3() argument
14837 if (haveF3no66noF2(pfx) && sz == 4) { in dis_ESC_0F__SSE3()
14838 delta = dis_MOVSxDUP_128( vbi, pfx, delta, False/*!isAvx*/, in dis_ESC_0F__SSE3()
14844 if (haveF2no66noF3(pfx) in dis_ESC_0F__SSE3()
14846 delta = dis_MOVDDUP_128( vbi, pfx, delta, False/*!isAvx*/ ); in dis_ESC_0F__SSE3()
14854 if (haveF3no66noF2(pfx) && sz == 4) { in dis_ESC_0F__SSE3()
14855 delta = dis_MOVSxDUP_128( vbi, pfx, delta, False/*!isAvx*/, in dis_ESC_0F__SSE3()
14865 if (haveF2no66noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE3()
14871 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F__SSE3()
14873 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F__SSE3()
14878 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE3()
14890 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE3()
14896 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F__SSE3()
14898 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F__SSE3()
14903 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE3()
14917 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F__SSE3()
14921 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F__SSE3()
14923 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F__SSE3()
14928 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE3()
14939 if (haveF2no66noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE3()
14943 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F__SSE3()
14947 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F__SSE3()
14952 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE3()
14966 if (haveF2no66noF3(pfx) && sz == 4) { in dis_ESC_0F__SSE3()
14971 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__SSE3()
14972 putXMMReg( gregOfRexRM(pfx,modrm), in dis_ESC_0F__SSE3()
14975 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__SSE3()
15109 static Long dis_PHADD_128 ( const VexAbiInfo* vbi, Prefix pfx, Long delta, in dis_PHADD_128() argument
15126 UInt rG = gregOfRexRM(pfx,modrm); in dis_PHADD_128()
15127 UInt rV = isAvx ? getVexNvvvv(pfx) : rG; in dis_PHADD_128()
15146 UInt rE = eregOfRexRM(pfx,modrm); in dis_PHADD_128()
15152 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_PHADD_128()
15183 static Long dis_PHADD_256 ( const VexAbiInfo* vbi, Prefix pfx, Long delta, in dis_PHADD_256() argument
15198 UInt rG = gregOfRexRM(pfx,modrm); in dis_PHADD_256()
15199 UInt rV = getVexNvvvv(pfx); in dis_PHADD_256()
15218 UInt rE = eregOfRexRM(pfx,modrm); in dis_PHADD_256()
15223 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_PHADD_256()
15301 Prefix pfx, Int sz, Long deltaIN ) in dis_ESC_0F38__SupSSE3() argument
15317 if (have66noF2noF3(pfx) in dis_ESC_0F38__SupSSE3()
15323 assign( dV, getXMMReg(gregOfRexRM(pfx,modrm)) ); in dis_ESC_0F38__SupSSE3()
15326 assign( sV, getXMMReg(eregOfRexRM(pfx,modrm)) ); in dis_ESC_0F38__SupSSE3()
15328 DIP("pshufb %s,%s\n", nameXMMReg(eregOfRexRM(pfx,modrm)), in dis_ESC_0F38__SupSSE3()
15329 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F38__SupSSE3()
15331 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__SupSSE3()
15336 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F38__SupSSE3()
15340 putXMMReg(gregOfRexRM(pfx,modrm), mkexpr(res)); in dis_ESC_0F38__SupSSE3()
15344 if (haveNo66noF2noF3(pfx) && sz == 4) { in dis_ESC_0F38__SupSSE3()
15358 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__SupSSE3()
15401 if (have66noF2noF3(pfx) in dis_ESC_0F38__SupSSE3()
15403 delta = dis_PHADD_128( vbi, pfx, delta, False/*isAvx*/, opc ); in dis_ESC_0F38__SupSSE3()
15419 if (haveNo66noF2noF3(pfx) && sz == 4) { in dis_ESC_0F38__SupSSE3()
15452 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__SupSSE3()
15473 if (have66noF2noF3(pfx) in dis_ESC_0F38__SupSSE3()
15478 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F38__SupSSE3()
15483 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F38__SupSSE3()
15488 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__SupSSE3()
15500 if (haveNo66noF2noF3(pfx) && sz == 4) { in dis_ESC_0F38__SupSSE3()
15518 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__SupSSE3()
15556 if (have66noF2noF3(pfx) in dis_ESC_0F38__SupSSE3()
15575 assign( dV, getXMMReg(gregOfRexRM(pfx,modrm)) ); in dis_ESC_0F38__SupSSE3()
15578 assign( sV, getXMMReg(eregOfRexRM(pfx,modrm)) ); in dis_ESC_0F38__SupSSE3()
15580 DIP("psign%s %s,%s\n", str, nameXMMReg(eregOfRexRM(pfx,modrm)), in dis_ESC_0F38__SupSSE3()
15581 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F38__SupSSE3()
15583 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__SupSSE3()
15588 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F38__SupSSE3()
15597 gregOfRexRM(pfx,modrm), in dis_ESC_0F38__SupSSE3()
15608 if (haveNo66noF2noF3(pfx) && sz == 4) { in dis_ESC_0F38__SupSSE3()
15631 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__SupSSE3()
15649 if (have66noF2noF3(pfx) in dis_ESC_0F38__SupSSE3()
15659 assign( dV, getXMMReg(gregOfRexRM(pfx,modrm)) ); in dis_ESC_0F38__SupSSE3()
15662 assign( sV, getXMMReg(eregOfRexRM(pfx,modrm)) ); in dis_ESC_0F38__SupSSE3()
15664 DIP("pmulhrsw %s,%s\n", nameXMMReg(eregOfRexRM(pfx,modrm)), in dis_ESC_0F38__SupSSE3()
15665 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F38__SupSSE3()
15667 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__SupSSE3()
15672 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F38__SupSSE3()
15681 gregOfRexRM(pfx,modrm), in dis_ESC_0F38__SupSSE3()
15691 if (haveNo66noF2noF3(pfx) && sz == 4) { in dis_ESC_0F38__SupSSE3()
15705 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__SupSSE3()
15726 if (have66noF2noF3(pfx) in dis_ESC_0F38__SupSSE3()
15741 assign( sV, getXMMReg(eregOfRexRM(pfx,modrm)) ); in dis_ESC_0F38__SupSSE3()
15743 DIP("pabs%s %s,%s\n", str, nameXMMReg(eregOfRexRM(pfx,modrm)), in dis_ESC_0F38__SupSSE3()
15744 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F38__SupSSE3()
15746 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__SupSSE3()
15751 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F38__SupSSE3()
15754 putXMMReg( gregOfRexRM(pfx,modrm), in dis_ESC_0F38__SupSSE3()
15761 if (haveNo66noF2noF3(pfx) && sz == 4) { in dis_ESC_0F38__SupSSE3()
15782 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__SupSSE3()
15820 Prefix pfx, Int sz, Long deltaIN ) in dis_ESC_0F3A__SupSSE3() argument
15837 if (have66noF2noF3(pfx) in dis_ESC_0F3A__SupSSE3()
15843 assign( dV, getXMMReg(gregOfRexRM(pfx,modrm)) ); in dis_ESC_0F3A__SupSSE3()
15846 assign( sV, getXMMReg(eregOfRexRM(pfx,modrm)) ); in dis_ESC_0F3A__SupSSE3()
15850 nameXMMReg(eregOfRexRM(pfx,modrm)), in dis_ESC_0F3A__SupSSE3()
15851 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F3A__SupSSE3()
15853 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F3A__SupSSE3()
15860 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F3A__SupSSE3()
15864 putXMMReg( gregOfRexRM(pfx,modrm), mkexpr(res) ); in dis_ESC_0F3A__SupSSE3()
15868 if (haveNo66noF2noF3(pfx) && sz == 4) { in dis_ESC_0F3A__SupSSE3()
15885 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F3A__SupSSE3()
15947 Prefix pfx, Int sz, Long deltaIN ) in dis_ESC_0F__SSE4() argument
15966 if (haveF3noF2(pfx) /* so both 66 and REX.W are possibilities */ in dis_ESC_0F__SSE4()
15972 assign(src, getIRegE(sz, pfx, modrm)); in dis_ESC_0F__SSE4()
15974 DIP("popcnt%c %s, %s\n", nameISize(sz), nameIRegE(sz, pfx, modrm), in dis_ESC_0F__SSE4()
15975 nameIRegG(sz, pfx, modrm)); in dis_ESC_0F__SSE4()
15977 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0); in dis_ESC_0F__SSE4()
15981 nameIRegG(sz, pfx, modrm)); in dis_ESC_0F__SSE4()
15985 putIRegG(sz, pfx, modrm, mkexpr(result)); in dis_ESC_0F__SSE4()
16010 if (haveF3noF2(pfx) /* so both 66 and 48 are possibilities */ in dis_ESC_0F__SSE4()
16017 assign(src, getIRegE(sz, pfx, modrm)); in dis_ESC_0F__SSE4()
16019 DIP("tzcnt%c %s, %s\n", nameISize(sz), nameIRegE(sz, pfx, modrm), in dis_ESC_0F__SSE4()
16020 nameIRegG(sz, pfx, modrm)); in dis_ESC_0F__SSE4()
16022 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0); in dis_ESC_0F__SSE4()
16026 nameIRegG(sz, pfx, modrm)); in dis_ESC_0F__SSE4()
16030 putIRegG(sz, pfx, modrm, mkexpr(res)); in dis_ESC_0F__SSE4()
16071 if (haveF3noF2(pfx) /* so both 66 and 48 are possibilities */ in dis_ESC_0F__SSE4()
16078 assign(src, getIRegE(sz, pfx, modrm)); in dis_ESC_0F__SSE4()
16080 DIP("lzcnt%c %s, %s\n", nameISize(sz), nameIRegE(sz, pfx, modrm), in dis_ESC_0F__SSE4()
16081 nameIRegG(sz, pfx, modrm)); in dis_ESC_0F__SSE4()
16083 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0); in dis_ESC_0F__SSE4()
16087 nameIRegG(sz, pfx, modrm)); in dis_ESC_0F__SSE4()
16091 putIRegG(sz, pfx, modrm, mkexpr(res)); in dis_ESC_0F__SSE4()
16199 static Long dis_VBLENDV_128 ( const VexAbiInfo* vbi, Prefix pfx, Long delta, in dis_VBLENDV_128() argument
16206 UInt rG = gregOfRexRM(pfx, modrm); in dis_VBLENDV_128()
16207 UInt rV = getVexNvvvv(pfx); in dis_VBLENDV_128()
16214 UInt rE = eregOfRexRM(pfx, modrm); in dis_VBLENDV_128()
16222 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_VBLENDV_128()
16238 static Long dis_VBLENDV_256 ( const VexAbiInfo* vbi, Prefix pfx, Long delta, in dis_VBLENDV_256() argument
16245 UInt rG = gregOfRexRM(pfx, modrm); in dis_VBLENDV_256()
16246 UInt rV = getVexNvvvv(pfx); in dis_VBLENDV_256()
16253 UInt rE = eregOfRexRM(pfx, modrm); in dis_VBLENDV_256()
16261 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_VBLENDV_256()
16377 static Long dis_xTESTy_128 ( const VexAbiInfo* vbi, Prefix pfx, in dis_xTESTy_128() argument
16384 UInt rG = gregOfRexRM(pfx, modrm); in dis_xTESTy_128()
16389 UInt rE = eregOfRexRM(pfx, modrm); in dis_xTESTy_128()
16397 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_xTESTy_128()
16430 static Long dis_xTESTy_256 ( const VexAbiInfo* vbi, Prefix pfx, in dis_xTESTy_256() argument
16437 UInt rG = gregOfRexRM(pfx, modrm); in dis_xTESTy_256()
16442 UInt rE = eregOfRexRM(pfx, modrm); in dis_xTESTy_256()
16449 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_xTESTy_256()
16488 static Long dis_PMOVxXBW_128 ( const VexAbiInfo* vbi, Prefix pfx, in dis_PMOVxXBW_128() argument
16498 UInt rG = gregOfRexRM(pfx, modrm); in dis_PMOVxXBW_128()
16500 UInt rE = eregOfRexRM(pfx, modrm); in dis_PMOVxXBW_128()
16505 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_PMOVxXBW_128()
16531 static Long dis_PMOVxXBW_256 ( const VexAbiInfo* vbi, Prefix pfx, in dis_PMOVxXBW_256() argument
16540 UInt rG = gregOfRexRM(pfx, modrm); in dis_PMOVxXBW_256()
16542 UInt rE = eregOfRexRM(pfx, modrm); in dis_PMOVxXBW_256()
16547 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_PMOVxXBW_256()
16571 static Long dis_PMOVxXWD_128 ( const VexAbiInfo* vbi, Prefix pfx, in dis_PMOVxXWD_128() argument
16581 UInt rG = gregOfRexRM(pfx, modrm); in dis_PMOVxXWD_128()
16584 UInt rE = eregOfRexRM(pfx, modrm); in dis_PMOVxXWD_128()
16589 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_PMOVxXWD_128()
16604 ( gregOfRexRM(pfx, modrm), res ); in dis_PMOVxXWD_128()
16610 static Long dis_PMOVxXWD_256 ( const VexAbiInfo* vbi, Prefix pfx, in dis_PMOVxXWD_256() argument
16619 UInt rG = gregOfRexRM(pfx, modrm); in dis_PMOVxXWD_256()
16622 UInt rE = eregOfRexRM(pfx, modrm); in dis_PMOVxXWD_256()
16627 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_PMOVxXWD_256()
16649 static Long dis_PMOVSXWQ_128 ( const VexAbiInfo* vbi, Prefix pfx, in dis_PMOVSXWQ_128() argument
16658 UInt rG = gregOfRexRM(pfx, modrm); in dis_PMOVSXWQ_128()
16661 UInt rE = eregOfRexRM(pfx, modrm); in dis_PMOVSXWQ_128()
16666 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_PMOVSXWQ_128()
16682 static Long dis_PMOVSXWQ_256 ( const VexAbiInfo* vbi, Prefix pfx, Long delta ) in dis_PMOVSXWQ_256() argument
16689 UInt rG = gregOfRexRM(pfx, modrm); in dis_PMOVSXWQ_256()
16694 UInt rE = eregOfRexRM(pfx, modrm); in dis_PMOVSXWQ_256()
16699 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_PMOVSXWQ_256()
16717 static Long dis_PMOVZXWQ_128 ( const VexAbiInfo* vbi, Prefix pfx, in dis_PMOVZXWQ_128() argument
16726 UInt rG = gregOfRexRM(pfx, modrm); in dis_PMOVZXWQ_128()
16729 UInt rE = eregOfRexRM(pfx, modrm); in dis_PMOVZXWQ_128()
16734 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_PMOVZXWQ_128()
16753 static Long dis_PMOVZXWQ_256 ( const VexAbiInfo* vbi, Prefix pfx, in dis_PMOVZXWQ_256() argument
16761 UInt rG = gregOfRexRM(pfx, modrm); in dis_PMOVZXWQ_256()
16764 UInt rE = eregOfRexRM(pfx, modrm); in dis_PMOVZXWQ_256()
16769 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_PMOVZXWQ_256()
16793 static Long dis_PMOVxXDQ_128 ( const VexAbiInfo* vbi, Prefix pfx, in dis_PMOVxXDQ_128() argument
16804 UInt rG = gregOfRexRM(pfx, modrm); in dis_PMOVxXDQ_128()
16810 UInt rE = eregOfRexRM(pfx, modrm); in dis_PMOVxXDQ_128()
16816 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_PMOVxXDQ_128()
16840 static Long dis_PMOVxXDQ_256 ( const VexAbiInfo* vbi, Prefix pfx, in dis_PMOVxXDQ_256() argument
16849 UInt rG = gregOfRexRM(pfx, modrm); in dis_PMOVxXDQ_256()
16855 UInt rE = eregOfRexRM(pfx, modrm); in dis_PMOVxXDQ_256()
16860 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_PMOVxXDQ_256()
16893 static Long dis_PMOVxXBD_128 ( const VexAbiInfo* vbi, Prefix pfx, in dis_PMOVxXBD_128() argument
16903 UInt rG = gregOfRexRM(pfx, modrm); in dis_PMOVxXBD_128()
16905 UInt rE = eregOfRexRM(pfx, modrm); in dis_PMOVxXBD_128()
16910 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_PMOVxXBD_128()
16936 static Long dis_PMOVxXBD_256 ( const VexAbiInfo* vbi, Prefix pfx, in dis_PMOVxXBD_256() argument
16945 UInt rG = gregOfRexRM(pfx, modrm); in dis_PMOVxXBD_256()
16947 UInt rE = eregOfRexRM(pfx, modrm); in dis_PMOVxXBD_256()
16952 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_PMOVxXBD_256()
16983 static Long dis_PMOVSXBQ_128 ( const VexAbiInfo* vbi, Prefix pfx, in dis_PMOVSXBQ_128() argument
16992 UInt rG = gregOfRexRM(pfx, modrm); in dis_PMOVSXBQ_128()
16994 UInt rE = eregOfRexRM(pfx, modrm); in dis_PMOVSXBQ_128()
16999 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_PMOVSXBQ_128()
17016 static Long dis_PMOVSXBQ_256 ( const VexAbiInfo* vbi, Prefix pfx, in dis_PMOVSXBQ_256() argument
17024 UInt rG = gregOfRexRM(pfx, modrm); in dis_PMOVSXBQ_256()
17026 UInt rE = eregOfRexRM(pfx, modrm); in dis_PMOVSXBQ_256()
17031 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_PMOVSXBQ_256()
17062 static Long dis_PMOVZXBQ_128 ( const VexAbiInfo* vbi, Prefix pfx, in dis_PMOVZXBQ_128() argument
17071 UInt rG = gregOfRexRM(pfx, modrm); in dis_PMOVZXBQ_128()
17073 UInt rE = eregOfRexRM(pfx, modrm); in dis_PMOVZXBQ_128()
17078 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_PMOVZXBQ_128()
17101 static Long dis_PMOVZXBQ_256 ( const VexAbiInfo* vbi, Prefix pfx, in dis_PMOVZXBQ_256() argument
17109 UInt rG = gregOfRexRM(pfx, modrm); in dis_PMOVZXBQ_256()
17111 UInt rE = eregOfRexRM(pfx, modrm); in dis_PMOVZXBQ_256()
17116 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_PMOVZXBQ_256()
17145 static Long dis_PHMINPOSUW_128 ( const VexAbiInfo* vbi, Prefix pfx, in dis_PHMINPOSUW_128() argument
17157 UInt rG = gregOfRexRM(pfx,modrm); in dis_PHMINPOSUW_128()
17159 UInt rE = eregOfRexRM(pfx,modrm); in dis_PHMINPOSUW_128()
17164 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_PHMINPOSUW_128()
17185 static Long dis_AESx ( const VexAbiInfo* vbi, Prefix pfx, in dis_AESx() argument
17192 UInt rG = gregOfRexRM(pfx, modrm); in dis_AESx()
17194 UInt regNoR = (isAvx && opc != 0xDB) ? getVexNvvvv(pfx) : rG; in dis_AESx()
17203 regNoL = eregOfRexRM(pfx, modrm); in dis_AESx()
17207 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_AESx()
17277 static Long dis_AESKEYGENASSIST ( const VexAbiInfo* vbi, Prefix pfx, in dis_AESKEYGENASSIST() argument
17285 UInt regNoR = gregOfRexRM(pfx, modrm); in dis_AESKEYGENASSIST()
17291 regNoL = eregOfRexRM(pfx, modrm); in dis_AESKEYGENASSIST()
17296 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_AESKEYGENASSIST()
17346 Prefix pfx, Int sz, Long deltaIN ) in dis_ESC_0F38__SSE4() argument
17369 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F38__SSE4()
17393 assign(vecE, getXMMReg(eregOfRexRM(pfx, modrm))); in dis_ESC_0F38__SSE4()
17396 nameXMMReg( eregOfRexRM(pfx, modrm) ), in dis_ESC_0F38__SSE4()
17397 nameXMMReg( gregOfRexRM(pfx, modrm) ) ); in dis_ESC_0F38__SSE4()
17399 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__SSE4()
17404 dis_buf, nameXMMReg( gregOfRexRM(pfx, modrm) ) ); in dis_ESC_0F38__SSE4()
17407 assign(vecG, getXMMReg(gregOfRexRM(pfx, modrm))); in dis_ESC_0F38__SSE4()
17411 putXMMReg(gregOfRexRM(pfx, modrm), mkexpr(res)); in dis_ESC_0F38__SSE4()
17420 if (have66noF2noF3(pfx) in dis_ESC_0F38__SSE4()
17422 delta = dis_xTESTy_128( vbi, pfx, delta, False/*!isAvx*/, 0 ); in dis_ESC_0F38__SSE4()
17430 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F38__SSE4()
17431 delta = dis_PMOVxXBW_128( vbi, pfx, delta, in dis_ESC_0F38__SSE4()
17440 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F38__SSE4()
17441 delta = dis_PMOVxXBD_128( vbi, pfx, delta, in dis_ESC_0F38__SSE4()
17450 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F38__SSE4()
17451 delta = dis_PMOVSXBQ_128( vbi, pfx, delta, False/*!isAvx*/ ); in dis_ESC_0F38__SSE4()
17459 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F38__SSE4()
17460 delta = dis_PMOVxXWD_128(vbi, pfx, delta, in dis_ESC_0F38__SSE4()
17469 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F38__SSE4()
17470 delta = dis_PMOVSXWQ_128( vbi, pfx, delta, False/*!isAvx*/ ); in dis_ESC_0F38__SSE4()
17478 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F38__SSE4()
17479 delta = dis_PMOVxXDQ_128( vbi, pfx, delta, in dis_ESC_0F38__SSE4()
17491 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F38__SSE4()
17495 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F38__SSE4()
17498 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F38__SSE4()
17503 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__SSE4()
17517 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F38__SSE4()
17519 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F38__SSE4()
17529 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F38__SSE4()
17532 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__SSE4()
17534 putXMMReg( gregOfRexRM(pfx,modrm), in dis_ESC_0F38__SSE4()
17537 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F38__SSE4()
17547 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F38__SSE4()
17555 assign( argL, getXMMReg( eregOfRexRM(pfx, modrm) ) ); in dis_ESC_0F38__SSE4()
17558 nameXMMReg( eregOfRexRM(pfx, modrm) ), in dis_ESC_0F38__SSE4()
17559 nameXMMReg( gregOfRexRM(pfx, modrm) ) ); in dis_ESC_0F38__SSE4()
17561 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__SSE4()
17566 dis_buf, nameXMMReg( gregOfRexRM(pfx, modrm) ) ); in dis_ESC_0F38__SSE4()
17569 assign(argR, getXMMReg( gregOfRexRM(pfx, modrm) )); in dis_ESC_0F38__SSE4()
17571 putXMMReg( gregOfRexRM(pfx, modrm), in dis_ESC_0F38__SSE4()
17582 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F38__SSE4()
17583 delta = dis_PMOVxXBW_128( vbi, pfx, delta, in dis_ESC_0F38__SSE4()
17592 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F38__SSE4()
17593 delta = dis_PMOVxXBD_128( vbi, pfx, delta, in dis_ESC_0F38__SSE4()
17602 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F38__SSE4()
17603 delta = dis_PMOVZXBQ_128( vbi, pfx, delta, False/*!isAvx*/ ); in dis_ESC_0F38__SSE4()
17611 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F38__SSE4()
17612 delta = dis_PMOVxXWD_128( vbi, pfx, delta, in dis_ESC_0F38__SSE4()
17621 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F38__SSE4()
17622 delta = dis_PMOVZXWQ_128( vbi, pfx, delta, False/*!isAvx*/ ); in dis_ESC_0F38__SSE4()
17630 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F38__SSE4()
17631 delta = dis_PMOVxXDQ_128( vbi, pfx, delta, in dis_ESC_0F38__SSE4()
17641 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F38__SSE4()
17643 delta = dis_SSEint_E_to_G( vbi, pfx, delta, in dis_ESC_0F38__SSE4()
17654 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F38__SSE4()
17658 vbi, pfx, delta, in dis_ESC_0F38__SSE4()
17674 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F38__SSE4()
17678 vbi, pfx, delta, in dis_ESC_0F38__SSE4()
17694 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F38__SSE4()
17698 vbi, pfx, delta, in dis_ESC_0F38__SSE4()
17714 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F38__SSE4()
17718 vbi, pfx, delta, in dis_ESC_0F38__SSE4()
17730 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F38__SSE4()
17738 assign( argL, getXMMReg( eregOfRexRM(pfx, modrm) ) ); in dis_ESC_0F38__SSE4()
17741 nameXMMReg( eregOfRexRM(pfx, modrm) ), in dis_ESC_0F38__SSE4()
17742 nameXMMReg( gregOfRexRM(pfx, modrm) ) ); in dis_ESC_0F38__SSE4()
17744 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__SSE4()
17749 dis_buf, nameXMMReg( gregOfRexRM(pfx, modrm) ) ); in dis_ESC_0F38__SSE4()
17752 assign(argR, getXMMReg( gregOfRexRM(pfx, modrm) )); in dis_ESC_0F38__SSE4()
17754 putXMMReg( gregOfRexRM(pfx, modrm), in dis_ESC_0F38__SSE4()
17764 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F38__SSE4()
17765 delta = dis_PHMINPOSUW_128( vbi, pfx, delta, False/*!isAvx*/ ); in dis_ESC_0F38__SSE4()
17781 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F38__SSE4()
17782 delta = dis_AESx( vbi, pfx, delta, False/*!isAvx*/, opc ); in dis_ESC_0F38__SSE4()
17793 if (haveF2noF3(pfx) in dis_ESC_0F38__SSE4()
17794 && (opc == 0xF1 || (opc == 0xF0 && !have66(pfx)))) { in dis_ESC_0F38__SSE4()
17806 assign(valE, getIRegE(sz, pfx, modrm)); in dis_ESC_0F38__SSE4()
17808 DIP("crc32b %s,%s\n", nameIRegE(sz, pfx, modrm), in dis_ESC_0F38__SSE4()
17809 nameIRegG(1==getRexW(pfx) ? 8 : 4, pfx, modrm)); in dis_ESC_0F38__SSE4()
17811 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__SSE4()
17815 nameIRegG(1==getRexW(pfx) ? 8 : 4, pfx, modrm)); in dis_ESC_0F38__SSE4()
17823 assign(valG0, binop(Iop_And64, getIRegG(8, pfx, modrm), in dis_ESC_0F38__SSE4()
17845 putIRegG(4, pfx, modrm, unop(Iop_64to32, mkexpr(valG1))); in dis_ESC_0F38__SSE4()
17871 static Long dis_PEXTRW ( const VexAbiInfo* vbi, Prefix pfx, in dis_PEXTRW() argument
17882 UInt rG = gregOfRexRM(pfx,modrm); in dis_PEXTRW()
17888 vassert(0==getRexW(pfx)); /* ensured by caller */ in dis_PEXTRW()
17895 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_PEXTRW()
17912 UInt rE = eregOfRexRM(pfx,modrm); in dis_PEXTRW()
17926 static Long dis_PEXTRD ( const VexAbiInfo* vbi, Prefix pfx, in dis_PEXTRD() argument
17943 vassert(0==getRexW(pfx)); /* ensured by caller */ in dis_PEXTRD()
17945 assign( xmm_vec, getXMMReg( gregOfRexRM(pfx,modrm) ) ); in dis_PEXTRD()
17951 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_PEXTRD()
17964 putIReg32( eregOfRexRM(pfx,modrm), mkexpr(src_dword) ); in dis_PEXTRD()
17967 nameXMMReg( gregOfRexRM(pfx, modrm) ), in dis_PEXTRD()
17968 nameIReg32( eregOfRexRM(pfx, modrm) ) ); in dis_PEXTRD()
17973 imm8_10, nameXMMReg( gregOfRexRM(pfx, modrm) ), dis_buf ); in dis_PEXTRD()
17979 static Long dis_PEXTRQ ( const VexAbiInfo* vbi, Prefix pfx, in dis_PEXTRQ() argument
17992 vassert(1==getRexW(pfx)); /* ensured by caller */ in dis_PEXTRQ()
17994 assign( xmm_vec, getXMMReg( gregOfRexRM(pfx,modrm) ) ); in dis_PEXTRQ()
17999 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_PEXTRQ()
18012 putIReg64( eregOfRexRM(pfx,modrm), mkexpr(src_qword) ); in dis_PEXTRQ()
18015 nameXMMReg( gregOfRexRM(pfx, modrm) ), in dis_PEXTRQ()
18016 nameIReg64( eregOfRexRM(pfx, modrm) ) ); in dis_PEXTRQ()
18021 imm8_0, nameXMMReg( gregOfRexRM(pfx, modrm) ), dis_buf ); in dis_PEXTRQ()
18160 static Long dis_PCMPxSTRx ( const VexAbiInfo* vbi, Prefix pfx, in dis_PCMPxSTRx() argument
18180 regNoL = eregOfRexRM(pfx, modrm); in dis_PCMPxSTRx()
18181 regNoR = gregOfRexRM(pfx, modrm); in dis_PCMPxSTRx()
18186 regNoR = gregOfRexRM(pfx, modrm); in dis_PCMPxSTRx()
18187 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_PCMPxSTRx()
18403 static Long dis_PEXTRB_128_GtoE ( const VexAbiInfo* vbi, Prefix pfx, in dis_PEXTRB_128_GtoE() argument
18416 assign( xmm_vec, getXMMReg( gregOfRexRM(pfx,modrm) ) ); in dis_PEXTRB_128_GtoE()
18423 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_PEXTRB_128_GtoE()
18437 putIReg64( eregOfRexRM(pfx,modrm), in dis_PEXTRB_128_GtoE()
18442 nameXMMReg( gregOfRexRM(pfx, modrm) ), in dis_PEXTRB_128_GtoE()
18443 nameIReg64( eregOfRexRM(pfx, modrm) ) ); in dis_PEXTRB_128_GtoE()
18448 imm8, nameXMMReg( gregOfRexRM(pfx, modrm) ), dis_buf ); in dis_PEXTRB_128_GtoE()
18579 static Long dis_EXTRACTPS ( const VexAbiInfo* vbi, Prefix pfx, in dis_EXTRACTPS() argument
18589 UInt rG = gregOfRexRM(pfx,modrm); in dis_EXTRACTPS()
18599 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_EXTRACTPS()
18612 UInt rE = eregOfRexRM(pfx,modrm); in dis_EXTRACTPS()
18659 Prefix pfx, Int sz, Long deltaIN ) in dis_ESC_0F3A__SSE4() argument
18675 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F3A__SSE4()
18692 getXMMRegLane32F( eregOfRexRM(pfx, modrm), 0 ) ); in dis_ESC_0F3A__SSE4()
18694 getXMMRegLane32F( eregOfRexRM(pfx, modrm), 1 ) ); in dis_ESC_0F3A__SSE4()
18696 getXMMRegLane32F( eregOfRexRM(pfx, modrm), 2 ) ); in dis_ESC_0F3A__SSE4()
18698 getXMMRegLane32F( eregOfRexRM(pfx, modrm), 3 ) ); in dis_ESC_0F3A__SSE4()
18703 imm, nameXMMReg( eregOfRexRM(pfx, modrm) ), in dis_ESC_0F3A__SSE4()
18704 nameXMMReg( gregOfRexRM(pfx, modrm) ) ); in dis_ESC_0F3A__SSE4()
18706 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F3A__SSE4()
18720 imm, dis_buf, nameXMMReg( gregOfRexRM(pfx, modrm) ) ); in dis_ESC_0F3A__SSE4()
18734 putXMMRegLane32F( gregOfRexRM(pfx, modrm), 0, mkexpr(res0) ); in dis_ESC_0F3A__SSE4()
18735 putXMMRegLane32F( gregOfRexRM(pfx, modrm), 1, mkexpr(res1) ); in dis_ESC_0F3A__SSE4()
18736 putXMMRegLane32F( gregOfRexRM(pfx, modrm), 2, mkexpr(res2) ); in dis_ESC_0F3A__SSE4()
18737 putXMMRegLane32F( gregOfRexRM(pfx, modrm), 3, mkexpr(res3) ); in dis_ESC_0F3A__SSE4()
18745 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F3A__SSE4()
18758 getXMMRegLane64F( eregOfRexRM(pfx, modrm), 0 ) ); in dis_ESC_0F3A__SSE4()
18760 getXMMRegLane64F( eregOfRexRM(pfx, modrm), 1 ) ); in dis_ESC_0F3A__SSE4()
18765 imm, nameXMMReg( eregOfRexRM(pfx, modrm) ), in dis_ESC_0F3A__SSE4()
18766 nameXMMReg( gregOfRexRM(pfx, modrm) ) ); in dis_ESC_0F3A__SSE4()
18768 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F3A__SSE4()
18778 imm, dis_buf, nameXMMReg( gregOfRexRM(pfx, modrm) ) ); in dis_ESC_0F3A__SSE4()
18790 putXMMRegLane64F( gregOfRexRM(pfx, modrm), 0, mkexpr(res0) ); in dis_ESC_0F3A__SSE4()
18791 putXMMRegLane64F( gregOfRexRM(pfx, modrm), 1, mkexpr(res1) ); in dis_ESC_0F3A__SSE4()
18802 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F3A__SSE4()
18813 isD ? getXMMRegLane64F( eregOfRexRM(pfx, modrm), 0 ) in dis_ESC_0F3A__SSE4()
18814 : getXMMRegLane32F( eregOfRexRM(pfx, modrm), 0 ) ); in dis_ESC_0F3A__SSE4()
18820 imm, nameXMMReg( eregOfRexRM(pfx, modrm) ), in dis_ESC_0F3A__SSE4()
18821 nameXMMReg( gregOfRexRM(pfx, modrm) ) ); in dis_ESC_0F3A__SSE4()
18823 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F3A__SSE4()
18830 imm, dis_buf, nameXMMReg( gregOfRexRM(pfx, modrm) ) ); in dis_ESC_0F3A__SSE4()
18843 putXMMRegLane64F( gregOfRexRM(pfx, modrm), 0, mkexpr(res) ); in dis_ESC_0F3A__SSE4()
18845 putXMMRegLane32F( gregOfRexRM(pfx, modrm), 0, mkexpr(res) ); in dis_ESC_0F3A__SSE4()
18854 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F3A__SSE4()
18862 assign( dst_vec, getXMMReg( gregOfRexRM(pfx, modrm) ) ); in dis_ESC_0F3A__SSE4()
18866 assign( src_vec, getXMMReg( eregOfRexRM(pfx, modrm) ) ); in dis_ESC_0F3A__SSE4()
18869 nameXMMReg( eregOfRexRM(pfx, modrm) ), in dis_ESC_0F3A__SSE4()
18870 nameXMMReg( gregOfRexRM(pfx, modrm) ) ); in dis_ESC_0F3A__SSE4()
18872 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, in dis_ESC_0F3A__SSE4()
18879 imm8, dis_buf, nameXMMReg( gregOfRexRM(pfx, modrm) ) ); in dis_ESC_0F3A__SSE4()
18882 putXMMReg( gregOfRexRM(pfx, modrm), in dis_ESC_0F3A__SSE4()
18891 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F3A__SSE4()
18898 assign( dst_vec, getXMMReg( gregOfRexRM(pfx, modrm) ) ); in dis_ESC_0F3A__SSE4()
18902 assign( src_vec, getXMMReg( eregOfRexRM(pfx, modrm) ) ); in dis_ESC_0F3A__SSE4()
18905 nameXMMReg( eregOfRexRM(pfx, modrm) ), in dis_ESC_0F3A__SSE4()
18906 nameXMMReg( gregOfRexRM(pfx, modrm) ) ); in dis_ESC_0F3A__SSE4()
18908 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, in dis_ESC_0F3A__SSE4()
18915 imm8, dis_buf, nameXMMReg( gregOfRexRM(pfx, modrm) ) ); in dis_ESC_0F3A__SSE4()
18918 putXMMReg( gregOfRexRM(pfx, modrm), in dis_ESC_0F3A__SSE4()
18927 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F3A__SSE4()
18935 assign( dst_vec, getXMMReg( gregOfRexRM(pfx, modrm) ) ); in dis_ESC_0F3A__SSE4()
18939 assign( src_vec, getXMMReg( eregOfRexRM(pfx, modrm) ) ); in dis_ESC_0F3A__SSE4()
18942 nameXMMReg( eregOfRexRM(pfx, modrm) ), in dis_ESC_0F3A__SSE4()
18943 nameXMMReg( gregOfRexRM(pfx, modrm) ) ); in dis_ESC_0F3A__SSE4()
18945 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, in dis_ESC_0F3A__SSE4()
18952 imm8, dis_buf, nameXMMReg( gregOfRexRM(pfx, modrm) ) ); in dis_ESC_0F3A__SSE4()
18955 putXMMReg( gregOfRexRM(pfx, modrm), in dis_ESC_0F3A__SSE4()
18965 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F3A__SSE4()
18966 delta = dis_PEXTRB_128_GtoE( vbi, pfx, delta, False/*!isAvx*/ ); in dis_ESC_0F3A__SSE4()
18975 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F3A__SSE4()
18976 delta = dis_PEXTRW( vbi, pfx, delta, False/*!isAvx*/ ); in dis_ESC_0F3A__SSE4()
18986 if (have66noF2noF3(pfx) in dis_ESC_0F3A__SSE4()
18988 delta = dis_PEXTRD( vbi, pfx, delta, False/*!isAvx*/ ); in dis_ESC_0F3A__SSE4()
18995 if (have66noF2noF3(pfx) in dis_ESC_0F3A__SSE4()
18997 delta = dis_PEXTRQ( vbi, pfx, delta, False/*!isAvx*/); in dis_ESC_0F3A__SSE4()
19007 if (have66noF2noF3(pfx) in dis_ESC_0F3A__SSE4()
19009 delta = dis_EXTRACTPS( vbi, pfx, delta, False/*!isAvx*/ ); in dis_ESC_0F3A__SSE4()
19017 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F3A__SSE4()
19021 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__SSE4()
19023 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F3A__SSE4()
19030 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F3A__SSE4()
19038 assign(src_vec, getXMMReg( gregOfRexRM(pfx, modrm) )); in dis_ESC_0F3A__SSE4()
19048 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F3A__SSE4()
19054 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__SSE4()
19057 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F3A__SSE4()
19068 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F3A__SSE4()
19087 if (have66noF2noF3(pfx) in dis_ESC_0F3A__SSE4()
19092 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__SSE4()
19095 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F3A__SSE4()
19102 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F3A__SSE4()
19118 if (have66noF2noF3(pfx) in dis_ESC_0F3A__SSE4()
19123 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__SSE4()
19126 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F3A__SSE4()
19133 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F3A__SSE4()
19152 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F3A__SSE4()
19157 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__SSE4()
19160 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F3A__SSE4()
19167 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, in dis_ESC_0F3A__SSE4()
19185 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F3A__SSE4()
19190 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__SSE4()
19193 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F3A__SSE4()
19200 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, in dis_ESC_0F3A__SSE4()
19218 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F3A__SSE4()
19223 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__SSE4()
19228 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F3A__SSE4()
19236 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, in dis_ESC_0F3A__SSE4()
19255 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F3A__SSE4()
19261 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__SSE4()
19266 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F3A__SSE4()
19273 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, in dis_ESC_0F3A__SSE4()
19299 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F3A__SSE4()
19301 delta = dis_PCMPxSTRx( vbi, pfx, delta, False/*!isAvx*/, opc ); in dis_ESC_0F3A__SSE4()
19309 if (have66noF2noF3(pfx) && sz == 2) { in dis_ESC_0F3A__SSE4()
19310 delta = dis_AESKEYGENASSIST( vbi, pfx, delta, False/*!isAvx*/ ); in dis_ESC_0F3A__SSE4()
19346 Prefix pfx, Int sz, Long deltaIN in dis_ESC_NONE() argument
19375 Bool validF2orF3 = haveF2orF3(pfx) ? False : True; in dis_ESC_NONE()
19386 && haveF2orF3(pfx) && !haveF2andF3(pfx) && haveLOCK(pfx)) { in dis_ESC_NONE()
19403 delta = dis_op2_G_E ( vbi, pfx, False, Iop_Add8, True, 1, delta, "add" ); in dis_ESC_NONE()
19407 delta = dis_op2_G_E ( vbi, pfx, False, Iop_Add8, True, sz, delta, "add" ); in dis_ESC_NONE()
19411 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
19412 delta = dis_op2_E_G ( vbi, pfx, False, Iop_Add8, True, 1, delta, "add" ); in dis_ESC_NONE()
19415 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
19416 delta = dis_op2_E_G ( vbi, pfx, False, Iop_Add8, True, sz, delta, "add" ); in dis_ESC_NONE()
19420 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
19424 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
19430 delta = dis_op2_G_E ( vbi, pfx, False, Iop_Or8, True, 1, delta, "or" ); in dis_ESC_NONE()
19434 delta = dis_op2_G_E ( vbi, pfx, False, Iop_Or8, True, sz, delta, "or" ); in dis_ESC_NONE()
19438 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
19439 delta = dis_op2_E_G ( vbi, pfx, False, Iop_Or8, True, 1, delta, "or" ); in dis_ESC_NONE()
19442 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
19443 delta = dis_op2_E_G ( vbi, pfx, False, Iop_Or8, True, sz, delta, "or" ); in dis_ESC_NONE()
19447 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
19451 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
19457 delta = dis_op2_G_E ( vbi, pfx, True, Iop_Add8, True, 1, delta, "adc" ); in dis_ESC_NONE()
19461 delta = dis_op2_G_E ( vbi, pfx, True, Iop_Add8, True, sz, delta, "adc" ); in dis_ESC_NONE()
19465 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
19466 delta = dis_op2_E_G ( vbi, pfx, True, Iop_Add8, True, 1, delta, "adc" ); in dis_ESC_NONE()
19469 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
19470 delta = dis_op2_E_G ( vbi, pfx, True, Iop_Add8, True, sz, delta, "adc" ); in dis_ESC_NONE()
19474 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
19478 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
19484 delta = dis_op2_G_E ( vbi, pfx, True, Iop_Sub8, True, 1, delta, "sbb" ); in dis_ESC_NONE()
19488 delta = dis_op2_G_E ( vbi, pfx, True, Iop_Sub8, True, sz, delta, "sbb" ); in dis_ESC_NONE()
19492 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
19493 delta = dis_op2_E_G ( vbi, pfx, True, Iop_Sub8, True, 1, delta, "sbb" ); in dis_ESC_NONE()
19496 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
19497 delta = dis_op2_E_G ( vbi, pfx, True, Iop_Sub8, True, sz, delta, "sbb" ); in dis_ESC_NONE()
19501 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
19505 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
19511 delta = dis_op2_G_E ( vbi, pfx, False, Iop_And8, True, 1, delta, "and" ); in dis_ESC_NONE()
19515 delta = dis_op2_G_E ( vbi, pfx, False, Iop_And8, True, sz, delta, "and" ); in dis_ESC_NONE()
19519 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
19520 delta = dis_op2_E_G ( vbi, pfx, False, Iop_And8, True, 1, delta, "and" ); in dis_ESC_NONE()
19523 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
19524 delta = dis_op2_E_G ( vbi, pfx, False, Iop_And8, True, sz, delta, "and" ); in dis_ESC_NONE()
19528 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
19532 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
19538 delta = dis_op2_G_E ( vbi, pfx, False, Iop_Sub8, True, 1, delta, "sub" ); in dis_ESC_NONE()
19542 delta = dis_op2_G_E ( vbi, pfx, False, Iop_Sub8, True, sz, delta, "sub" ); in dis_ESC_NONE()
19546 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
19547 delta = dis_op2_E_G ( vbi, pfx, False, Iop_Sub8, True, 1, delta, "sub" ); in dis_ESC_NONE()
19550 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
19551 delta = dis_op2_E_G ( vbi, pfx, False, Iop_Sub8, True, sz, delta, "sub" ); in dis_ESC_NONE()
19555 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
19559 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
19565 delta = dis_op2_G_E ( vbi, pfx, False, Iop_Xor8, True, 1, delta, "xor" ); in dis_ESC_NONE()
19569 delta = dis_op2_G_E ( vbi, pfx, False, Iop_Xor8, True, sz, delta, "xor" ); in dis_ESC_NONE()
19573 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
19574 delta = dis_op2_E_G ( vbi, pfx, False, Iop_Xor8, True, 1, delta, "xor" ); in dis_ESC_NONE()
19577 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
19578 delta = dis_op2_E_G ( vbi, pfx, False, Iop_Xor8, True, sz, delta, "xor" ); in dis_ESC_NONE()
19582 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
19586 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
19591 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
19592 delta = dis_op2_G_E ( vbi, pfx, False, Iop_Sub8, False, 1, delta, "cmp" ); in dis_ESC_NONE()
19595 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
19596 delta = dis_op2_G_E ( vbi, pfx, False, Iop_Sub8, False, sz, delta, "cmp" ); in dis_ESC_NONE()
19600 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
19601 delta = dis_op2_E_G ( vbi, pfx, False, Iop_Sub8, False, 1, delta, "cmp" ); in dis_ESC_NONE()
19604 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
19605 delta = dis_op2_E_G ( vbi, pfx, False, Iop_Sub8, False, sz, delta, "cmp" ); in dis_ESC_NONE()
19609 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
19613 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
19628 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
19635 assign(t1, getIRegRexB(sz, pfx, opc-0x50)); in dis_ESC_NONE()
19639 DIP("push%c %s\n", nameISize(sz), nameIRegRexB(sz,pfx,opc-0x50)); in dis_ESC_NONE()
19650 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
19659 putIRegRexB(sz, pfx, opc-0x58, mkexpr(t1)); in dis_ESC_NONE()
19660 DIP("pop%c %s\n", nameISize(sz), nameIRegRexB(sz,pfx,opc-0x58)); in dis_ESC_NONE()
19664 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
19665 if (haveREX(pfx) && 1==getRexW(pfx)) { in dis_ESC_NONE()
19671 putIRegG(8, pfx, modrm, in dis_ESC_NONE()
19673 getIRegE(4, pfx, modrm))); in dis_ESC_NONE()
19675 nameIRegE(4, pfx, modrm), in dis_ESC_NONE()
19676 nameIRegG(8, pfx, modrm)); in dis_ESC_NONE()
19679 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_NONE()
19681 putIRegG(8, pfx, modrm, in dis_ESC_NONE()
19685 nameIRegG(8, pfx, modrm)); in dis_ESC_NONE()
19693 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
19701 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
19702 delta = dis_imul_I_E_G ( vbi, pfx, sz, delta, sz ); in dis_ESC_NONE()
19706 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
19726 delta = dis_imul_I_E_G ( vbi, pfx, sz, delta, 1 ); in dis_ESC_NONE()
19747 if (haveF3(pfx)) goto decode_failure; in dis_ESC_NONE()
19748 if (haveF2(pfx)) DIP("bnd ; "); /* MPX bnd prefix. */ in dis_ESC_NONE()
19810 if (epartIsReg(modrm) && haveF2orF3(pfx)) in dis_ESC_NONE()
19812 if (!epartIsReg(modrm) && haveF2andF3(pfx)) in dis_ESC_NONE()
19814 if (!epartIsReg(modrm) && haveF2orF3(pfx) && !haveLOCK(pfx)) in dis_ESC_NONE()
19816 am_sz = lengthAMode(pfx,delta); in dis_ESC_NONE()
19820 delta = dis_Grp1 ( vbi, pfx, delta, modrm, am_sz, d_sz, sz, d64 ); in dis_ESC_NONE()
19826 if (epartIsReg(modrm) && haveF2orF3(pfx)) in dis_ESC_NONE()
19828 if (!epartIsReg(modrm) && haveF2andF3(pfx)) in dis_ESC_NONE()
19830 if (!epartIsReg(modrm) && haveF2orF3(pfx) && !haveLOCK(pfx)) in dis_ESC_NONE()
19832 am_sz = lengthAMode(pfx,delta); in dis_ESC_NONE()
19835 delta = dis_Grp1 ( vbi, pfx, delta, modrm, am_sz, d_sz, sz, d64 ); in dis_ESC_NONE()
19839 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
19841 am_sz = lengthAMode(pfx,delta); in dis_ESC_NONE()
19844 delta = dis_Grp1 ( vbi, pfx, delta, modrm, am_sz, d_sz, sz, d64 ); in dis_ESC_NONE()
19848 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
19849 delta = dis_op2_E_G ( vbi, pfx, False, Iop_And8, False, 1, delta, "test" ); in dis_ESC_NONE()
19853 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
19854 delta = dis_op2_E_G ( vbi, pfx, False, Iop_And8, False, sz, delta, "test" ); in dis_ESC_NONE()
19869 if (haveF2orF3(pfx)) { in dis_ESC_NONE()
19873 if (haveF2andF3(pfx)) in dis_ESC_NONE()
19880 assign(t1, getIRegE(sz, pfx, modrm)); in dis_ESC_NONE()
19881 assign(t2, getIRegG(sz, pfx, modrm)); in dis_ESC_NONE()
19882 putIRegG(sz, pfx, modrm, mkexpr(t1)); in dis_ESC_NONE()
19883 putIRegE(sz, pfx, modrm, mkexpr(t2)); in dis_ESC_NONE()
19886 nameISize(sz), nameIRegG(sz, pfx, modrm), in dis_ESC_NONE()
19887 nameIRegE(sz, pfx, modrm)); in dis_ESC_NONE()
19890 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_NONE()
19892 assign( t2, getIRegG(sz, pfx, modrm) ); in dis_ESC_NONE()
19895 putIRegG( sz, pfx, modrm, mkexpr(t1) ); in dis_ESC_NONE()
19898 nameIRegG(sz, pfx, modrm), dis_buf); in dis_ESC_NONE()
19905 delta = dis_mov_G_E(vbi, pfx, 1, delta, &ok); in dis_ESC_NONE()
19913 delta = dis_mov_G_E(vbi, pfx, sz, delta, &ok); in dis_ESC_NONE()
19919 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
19920 delta = dis_mov_E_G(vbi, pfx, 1, delta); in dis_ESC_NONE()
19924 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
19925 delta = dis_mov_E_G(vbi, pfx, sz, delta); in dis_ESC_NONE()
19929 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
19938 addr = disAMode ( &alen, vbi, clearSegBits(pfx), delta, dis_buf, 0 ); in dis_ESC_NONE()
19943 putIRegG( sz, pfx, modrm, in dis_ESC_NONE()
19949 nameIRegG(sz,pfx,modrm)); in dis_ESC_NONE()
19957 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
19983 addr = disAMode ( &len, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_NONE()
19994 if (!have66(pfx) && !haveF2(pfx) && haveF3(pfx)) { in dis_ESC_NONE()
20004 !haveF2orF3(pfx) in dis_ESC_NONE()
20006 && getRexB(pfx)==0 ) { in dis_ESC_NONE()
20019 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20020 codegen_xchg_rAX_Reg ( pfx, sz, opc - 0x90 ); in dis_ESC_NONE()
20024 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20043 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20065 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20121 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20194 if (have66orF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20204 assign( addr, handleAddrOverrides(vbi, pfx, mkU64(d64)) ); in dis_ESC_NONE()
20207 segRegTxt(pfx), d64, in dis_ESC_NONE()
20212 if (have66orF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20222 assign( addr, handleAddrOverrides(vbi, pfx, mkU64(d64)) ); in dis_ESC_NONE()
20225 segRegTxt(pfx), d64); in dis_ESC_NONE()
20231 if (haveF3(pfx) && !haveF2(pfx)) { in dis_ESC_NONE()
20236 guest_RIP_bbstart+delta, "rep movs", pfx ); in dis_ESC_NONE()
20241 if (!haveF3(pfx) && !haveF2(pfx)) { in dis_ESC_NONE()
20244 dis_string_op( dis_MOVS, sz, "movs", pfx ); in dis_ESC_NONE()
20252 if (haveF3(pfx) && !haveF2(pfx)) { in dis_ESC_NONE()
20257 guest_RIP_bbstart+delta, "repe cmps", pfx ); in dis_ESC_NONE()
20266 if (haveF3(pfx) && !haveF2(pfx)) { in dis_ESC_NONE()
20271 guest_RIP_bbstart+delta, "rep stos", pfx ); in dis_ESC_NONE()
20276 if (!haveF3(pfx) && !haveF2(pfx)) { in dis_ESC_NONE()
20279 dis_string_op( dis_STOS, sz, "stos", pfx ); in dis_ESC_NONE()
20285 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20289 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20295 dis_string_op( dis_LODS, ( opc == 0xAC ? 1 : sz ), "lods", pfx ); in dis_ESC_NONE()
20301 if (haveF2(pfx) && !haveF3(pfx)) { in dis_ESC_NONE()
20306 guest_RIP_bbstart+delta, "repne scas", pfx ); in dis_ESC_NONE()
20311 if (!haveF2(pfx) && haveF3(pfx)) { in dis_ESC_NONE()
20316 guest_RIP_bbstart+delta, "repe scas", pfx ); in dis_ESC_NONE()
20321 if (!haveF2(pfx) && !haveF3(pfx)) { in dis_ESC_NONE()
20324 dis_string_op( dis_SCAS, sz, "scas", pfx ); in dis_ESC_NONE()
20338 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20341 putIRegRexB(1, pfx, opc-0xB0, mkU8(d64)); in dis_ESC_NONE()
20342 DIP("movb $%lld,%s\n", d64, nameIRegRexB(1,pfx,opc-0xB0)); in dis_ESC_NONE()
20355 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20359 putIRegRexB(8, pfx, opc-0xB8, mkU64(d64)); in dis_ESC_NONE()
20361 nameIRegRexB(8,pfx,opc-0xB8)); in dis_ESC_NONE()
20365 putIRegRexB(sz, pfx, opc-0xB8, in dis_ESC_NONE()
20369 nameIRegRexB(sz,pfx,opc-0xB8)); in dis_ESC_NONE()
20375 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20377 am_sz = lengthAMode(pfx,delta); in dis_ESC_NONE()
20381 delta = dis_Grp2 ( vbi, pfx, delta, modrm, am_sz, d_sz, sz, in dis_ESC_NONE()
20389 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20391 am_sz = lengthAMode(pfx,delta); in dis_ESC_NONE()
20394 delta = dis_Grp2 ( vbi, pfx, delta, modrm, am_sz, d_sz, sz, in dis_ESC_NONE()
20401 if (have66orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20402 if (haveF2(pfx)) DIP("bnd ; "); /* MPX bnd prefix. */ in dis_ESC_NONE()
20410 if (have66(pfx)) goto decode_failure; in dis_ESC_NONE()
20412 if (haveF2(pfx)) DIP("bnd ; "); /* MPX bnd prefix. */ in dis_ESC_NONE()
20414 DIP(haveF3(pfx) ? "rep ; ret\n" : "ret\n"); in dis_ESC_NONE()
20427 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20431 putIRegE(sz, pfx, modrm, in dis_ESC_NONE()
20435 nameIRegE(sz,pfx,modrm)); in dis_ESC_NONE()
20437 if (haveF2(pfx)) goto decode_failure; in dis_ESC_NONE()
20439 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, in dis_ESC_NONE()
20451 if (opc == 0xC7 && modrm == 0xF8 && !have66orF2orF3(pfx) && sz == 4 in dis_ESC_NONE()
20471 if (opc == 0xC6 && modrm == 0xF8 && !have66orF2orF3(pfx) && sz == 1 in dis_ESC_NONE()
20539 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20541 am_sz = lengthAMode(pfx,delta); in dis_ESC_NONE()
20545 delta = dis_Grp2 ( vbi, pfx, delta, modrm, am_sz, d_sz, sz, in dis_ESC_NONE()
20553 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20555 am_sz = lengthAMode(pfx,delta); in dis_ESC_NONE()
20558 delta = dis_Grp2 ( vbi, pfx, delta, modrm, am_sz, d_sz, sz, in dis_ESC_NONE()
20566 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20568 am_sz = lengthAMode(pfx,delta); in dis_ESC_NONE()
20571 delta = dis_Grp2 ( vbi, pfx, delta, modrm, am_sz, d_sz, sz, in dis_ESC_NONE()
20579 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20581 am_sz = lengthAMode(pfx,delta); in dis_ESC_NONE()
20583 delta = dis_Grp2 ( vbi, pfx, delta, modrm, am_sz, d_sz, sz, in dis_ESC_NONE()
20599 if (haveF2orF3(pfx)) in dis_ESC_NONE()
20628 delta = dis_FPU ( &decode_OK, vbi, pfx, delta ); in dis_ESC_NONE()
20645 if (have66orF2orF3(pfx) || 1==getRexW(pfx)) goto decode_failure; in dis_ESC_NONE()
20652 if (haveASO(pfx)) { in dis_ESC_NONE()
20687 DIP("loop%s%s 0x%llx\n", xtra, haveASO(pfx) ? "l" : "", d64); in dis_ESC_NONE()
20693 if (have66orF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20696 if (haveASO(pfx)) { in dis_ESC_NONE()
20751 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20800 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20816 if (haveF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20817 if (haveF2(pfx)) DIP("bnd ; "); /* MPX bnd prefix. */ in dis_ESC_NONE()
20840 if (haveF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20843 if (haveF2(pfx)) DIP("bnd ; "); /* MPX bnd prefix. */ in dis_ESC_NONE()
20857 if (haveF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20860 if (haveF2(pfx)) DIP("bnd ; "); /* MPX bnd prefix. */ in dis_ESC_NONE()
20910 delta = dis_Grp3 ( vbi, pfx, 1, delta, &decode_OK ); in dis_ESC_NONE()
20919 delta = dis_Grp3 ( vbi, pfx, sz, delta, &decode_OK ); in dis_ESC_NONE()
20925 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20931 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_NONE()
20940 delta = dis_Grp4 ( vbi, pfx, delta, &decode_OK ); in dis_ESC_NONE()
20949 delta = dis_Grp5 ( vbi, pfx, sz, delta, dres, &decode_OK ); in dis_ESC_NONE()
21057 Prefix pfx, Int sz, Long deltaIN in dis_ESC_0F() argument
21086 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F()
21210 if (have66orF2orF3(pfx)) goto decode_failure; in dis_ESC_0F()
21215 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F()
21225 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_0F()
21228 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F()
21243 if (have66orF2orF3(pfx)) goto decode_failure; in dis_ESC_0F()
21268 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_0F()
21269 delta = dis_cmov_E_G(vbi, pfx, sz, (AMD64Condcode)(opc - 0x40), delta); in dis_ESC_0F()
21290 if (haveF3(pfx)) goto decode_failure; in dis_ESC_0F()
21291 if (haveF2(pfx)) DIP("bnd ; "); /* MPX bnd prefix. */ in dis_ESC_0F()
21365 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_0F()
21371 putIRegE(1, pfx, modrm, mkexpr(t1)); in dis_ESC_0F()
21373 nameIRegE(1,pfx,modrm)); in dis_ESC_0F()
21375 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F()
21402 int bnd = gregOfRexRM(pfx,modrm); in dis_ESC_0F()
21405 oper = nameIReg64 (eregOfRexRM(pfx,modrm)); in dis_ESC_0F()
21408 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F()
21413 if (haveF3no66noF2 (pfx)) { in dis_ESC_0F()
21419 } else if (haveF2no66noF3 (pfx)) { in dis_ESC_0F()
21425 } else if (have66noF2noF3 (pfx)) { in dis_ESC_0F()
21431 } else if (haveNo66noF2noF3 (pfx)) { in dis_ESC_0F()
21469 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_0F()
21524 delta = dis_bt_G_E ( vbi, pfx, sz, delta, BtOpNone, &ok ); in dis_ESC_0F()
21531 d64 = delta + lengthAMode(pfx, delta); in dis_ESC_0F()
21534 vbi, pfx, delta, modrm, sz, in dis_ESC_0F()
21542 vbi, pfx, delta, modrm, sz, in dis_ESC_0F()
21551 delta = dis_bt_G_E ( vbi, pfx, sz, delta, BtOpSet, &ok ); in dis_ESC_0F()
21558 d64 = delta + lengthAMode(pfx, delta); in dis_ESC_0F()
21561 vbi, pfx, delta, modrm, sz, in dis_ESC_0F()
21569 vbi, pfx, delta, modrm, sz, in dis_ESC_0F()
21575 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_0F()
21576 delta = dis_mul_E_G ( vbi, pfx, sz, delta ); in dis_ESC_0F()
21582 delta = dis_cmpxchg_G_E ( &ok, vbi, pfx, 1, delta ); in dis_ESC_0F()
21591 delta = dis_cmpxchg_G_E ( &ok, vbi, pfx, sz, delta ); in dis_ESC_0F()
21600 delta = dis_bt_G_E ( vbi, pfx, sz, delta, BtOpReset, &ok ); in dis_ESC_0F()
21606 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_0F()
21609 delta = dis_movx_E_G ( vbi, pfx, delta, 1, sz, False ); in dis_ESC_0F()
21613 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_0F()
21616 delta = dis_movx_E_G ( vbi, pfx, delta, 2, sz, False ); in dis_ESC_0F()
21623 am_sz = lengthAMode(pfx,delta); in dis_ESC_0F()
21625 delta = dis_Grp8_Imm ( vbi, pfx, delta, modrm, am_sz, sz, d64, in dis_ESC_0F()
21636 delta = dis_bt_G_E ( vbi, pfx, sz, delta, BtOpComp, &ok ); in dis_ESC_0F()
21642 if (!haveF2orF3(pfx) in dis_ESC_0F()
21643 || (haveF3noF2(pfx) in dis_ESC_0F()
21647 delta = dis_bs_E_G ( vbi, pfx, sz, delta, True ); in dis_ESC_0F()
21655 if (!haveF2orF3(pfx) in dis_ESC_0F()
21656 || (haveF3noF2(pfx) in dis_ESC_0F()
21660 delta = dis_bs_E_G ( vbi, pfx, sz, delta, False ); in dis_ESC_0F()
21668 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_0F()
21671 delta = dis_movx_E_G ( vbi, pfx, delta, 1, sz, True ); in dis_ESC_0F()
21675 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_0F()
21678 delta = dis_movx_E_G ( vbi, pfx, delta, 2, sz, True ); in dis_ESC_0F()
21683 delta = dis_xadd_G_E ( &decode_OK, vbi, pfx, 1, delta ); in dis_ESC_0F()
21691 delta = dis_xadd_G_E ( &decode_OK, vbi, pfx, sz, delta ); in dis_ESC_0F()
21722 if (have66(pfx)) goto decode_failure; in dis_ESC_0F()
21729 if (haveF2orF3(pfx)) { in dis_ESC_0F()
21734 if (haveF2andF3(pfx) || !haveLOCK(pfx)) goto decode_failure; in dis_ESC_0F()
21737 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F()
21842 if (haveF2orF3(pfx)) goto decode_failure; in dis_ESC_0F()
21847 assign( t1, getIRegRexB(4, pfx, opc-0xC8) ); in dis_ESC_0F()
21849 putIRegRexB(4, pfx, opc-0xC8, mkexpr(t2)); in dis_ESC_0F()
21850 DIP("bswapl %s\n", nameIRegRexB(4, pfx, opc-0xC8)); in dis_ESC_0F()
21856 assign( t1, getIRegRexB(8, pfx, opc-0xC8) ); in dis_ESC_0F()
21858 putIRegRexB(8, pfx, opc-0xC8, mkexpr(t2)); in dis_ESC_0F()
21859 DIP("bswapq %s\n", nameIRegRexB(8, pfx, opc-0xC8)); in dis_ESC_0F()
21873 if (!have66orF2orF3(pfx)) { in dis_ESC_0F()
21950 delta = dis_MMX ( &decode_OK, vbi, pfx, sz, deltaIN ); in dis_ESC_0F()
21977 delta = dis_ESC_0F__SSE2 ( &decode_OK, vbi, pfx, sz, deltaIN, dres ); in dis_ESC_0F()
21987 delta = dis_ESC_0F__SSE3 ( &decode_OK, vbi, pfx, sz, deltaIN ); in dis_ESC_0F()
21998 archinfo, vbi, pfx, sz, deltaIN ); in dis_ESC_0F()
22023 Prefix pfx, Int sz, Long deltaIN in dis_ESC_0F38() argument
22033 if (!haveF2orF3(pfx) && !haveVEX(pfx) in dis_ESC_0F38()
22041 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38()
22048 putIRegG(sz, pfx, modrm, mkexpr(dst)); in dis_ESC_0F38()
22049 DIP("movbe %s,%s\n", dis_buf, nameIRegG(sz, pfx, modrm)); in dis_ESC_0F38()
22051 assign(src, getIRegG(sz, pfx, modrm)); in dis_ESC_0F38()
22054 DIP("movbe %s,%s\n", nameIRegG(sz, pfx, modrm), dis_buf); in dis_ESC_0F38()
22073 delta = dis_ESC_0F38__SupSSE3 ( &decode_OK, vbi, pfx, sz, deltaIN ); in dis_ESC_0F38()
22083 delta = dis_ESC_0F38__SSE4 ( &decode_OK, vbi, pfx, sz, deltaIN ); in dis_ESC_0F38()
22108 Prefix pfx, Int sz, Long deltaIN in dis_ESC_0F3A() argument
22126 delta = dis_ESC_0F3A__SupSSE3 ( &decode_OK, vbi, pfx, sz, deltaIN ); in dis_ESC_0F3A()
22136 delta = dis_ESC_0F3A__SSE4 ( &decode_OK, vbi, pfx, sz, deltaIN ); in dis_ESC_0F3A()
22155 Prefix pfx, Long delta, const HChar* name, in dis_VEX_NDS_128_AnySimdPfx_0F_WIG() argument
22164 UInt rD = gregOfRexRM(pfx, modrm); in dis_VEX_NDS_128_AnySimdPfx_0F_WIG()
22165 UInt rSL = getVexNvvvv(pfx); in dis_VEX_NDS_128_AnySimdPfx_0F_WIG()
22171 vassert(0==getVexL(pfx)/*128*/ && 0==getRexW(pfx)/*WIG?*/); in dis_VEX_NDS_128_AnySimdPfx_0F_WIG()
22177 UInt rSR = eregOfRexRM(pfx, modrm); in dis_VEX_NDS_128_AnySimdPfx_0F_WIG()
22183 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_VEX_NDS_128_AnySimdPfx_0F_WIG()
22223 Prefix pfx, Long delta, const HChar* name, in dis_VEX_NDS_128_AnySimdPfx_0F_WIG_simple() argument
22228 uses_vvvv, vbi, pfx, delta, name, op, NULL, False, False); in dis_VEX_NDS_128_AnySimdPfx_0F_WIG_simple()
22238 Prefix pfx, Long delta, const HChar* name, in dis_VEX_NDS_128_AnySimdPfx_0F_WIG_complex() argument
22243 uses_vvvv, vbi, pfx, delta, name, in dis_VEX_NDS_128_AnySimdPfx_0F_WIG_complex()
22251 Prefix pfx, Long delta, in dis_AVX128_shiftV_byE() argument
22259 UInt rG = gregOfRexRM(pfx,modrm); in dis_AVX128_shiftV_byE()
22260 UInt rV = getVexNvvvv(pfx);; in dis_AVX128_shiftV_byE()
22266 UInt rE = eregOfRexRM(pfx,modrm); in dis_AVX128_shiftV_byE()
22272 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_AVX128_shiftV_byE()
22325 Prefix pfx, Long delta, in dis_AVX256_shiftV_byE() argument
22333 UInt rG = gregOfRexRM(pfx,modrm); in dis_AVX256_shiftV_byE()
22334 UInt rV = getVexNvvvv(pfx);; in dis_AVX256_shiftV_byE()
22340 UInt rE = eregOfRexRM(pfx,modrm); in dis_AVX256_shiftV_byE()
22346 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_AVX256_shiftV_byE()
22401 Prefix pfx, Long delta, in dis_AVX_var_shiftV_byE() argument
22408 UInt rG = gregOfRexRM(pfx,modrm); in dis_AVX_var_shiftV_byE()
22409 UInt rV = getVexNvvvv(pfx);; in dis_AVX_var_shiftV_byE()
22414 UInt rE = eregOfRexRM(pfx,modrm); in dis_AVX_var_shiftV_byE()
22425 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_AVX_var_shiftV_byE()
22511 Long dis_AVX128_shiftE_to_V_imm( Prefix pfx, in dis_AVX128_shiftE_to_V_imm() argument
22518 UInt rD = getVexNvvvv(pfx); in dis_AVX128_shiftE_to_V_imm()
22527 nameXMMReg(eregOfRexRM(pfx,rm)), in dis_AVX128_shiftE_to_V_imm()
22529 assign( e0, getXMMReg(eregOfRexRM(pfx,rm)) ); in dis_AVX128_shiftE_to_V_imm()
22568 Long dis_AVX256_shiftE_to_V_imm( Prefix pfx, in dis_AVX256_shiftE_to_V_imm() argument
22575 UInt rD = getVexNvvvv(pfx); in dis_AVX256_shiftE_to_V_imm()
22584 nameYMMReg(eregOfRexRM(pfx,rm)), in dis_AVX256_shiftE_to_V_imm()
22586 assign( e0, getYMMReg(eregOfRexRM(pfx,rm)) ); in dis_AVX256_shiftE_to_V_imm()
22632 Prefix pfx, Long delta, in dis_AVX128_E_V_to_G_lo64() argument
22639 UInt rG = gregOfRexRM(pfx,rm); in dis_AVX128_E_V_to_G_lo64()
22640 UInt rV = getVexNvvvv(pfx); in dis_AVX128_E_V_to_G_lo64()
22643 UInt rE = eregOfRexRM(pfx,rm); in dis_AVX128_E_V_to_G_lo64()
22652 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_AVX128_E_V_to_G_lo64()
22675 Prefix pfx, Long delta, in dis_AVX128_E_V_to_G_lo64_unary() argument
22682 UInt rG = gregOfRexRM(pfx,rm); in dis_AVX128_E_V_to_G_lo64_unary()
22683 UInt rV = getVexNvvvv(pfx); in dis_AVX128_E_V_to_G_lo64_unary()
22688 UInt rE = eregOfRexRM(pfx,rm); in dis_AVX128_E_V_to_G_lo64_unary()
22694 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_AVX128_E_V_to_G_lo64_unary()
22722 Prefix pfx, Long delta, in dis_AVX128_E_V_to_G_lo32_unary() argument
22729 UInt rG = gregOfRexRM(pfx,rm); in dis_AVX128_E_V_to_G_lo32_unary()
22730 UInt rV = getVexNvvvv(pfx); in dis_AVX128_E_V_to_G_lo32_unary()
22735 UInt rE = eregOfRexRM(pfx,rm); in dis_AVX128_E_V_to_G_lo32_unary()
22741 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_AVX128_E_V_to_G_lo32_unary()
22769 Prefix pfx, Long delta, in dis_AVX128_E_V_to_G_lo32() argument
22776 UInt rG = gregOfRexRM(pfx,rm); in dis_AVX128_E_V_to_G_lo32()
22777 UInt rV = getVexNvvvv(pfx); in dis_AVX128_E_V_to_G_lo32()
22780 UInt rE = eregOfRexRM(pfx,rm); in dis_AVX128_E_V_to_G_lo32()
22789 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_AVX128_E_V_to_G_lo32()
22809 Prefix pfx, Long delta, in dis_AVX128_E_V_to_G() argument
22813 uses_vvvv, vbi, pfx, delta, opname, op, in dis_AVX128_E_V_to_G()
22825 Prefix pfx, Long delta, in dis_AVX128_cmp_V_E_to_G() argument
22839 UInt rG = gregOfRexRM(pfx, rm); in dis_AVX128_cmp_V_E_to_G()
22840 UInt rV = getVexNvvvv(pfx); in dis_AVX128_cmp_V_E_to_G()
22849 UInt rE = eregOfRexRM(pfx,rm); in dis_AVX128_cmp_V_E_to_G()
22856 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_AVX128_cmp_V_E_to_G()
22935 Prefix pfx, Long delta, in dis_AVX256_cmp_V_E_to_G() argument
22949 UInt rG = gregOfRexRM(pfx, rm); in dis_AVX256_cmp_V_E_to_G()
22950 UInt rV = getVexNvvvv(pfx); in dis_AVX256_cmp_V_E_to_G()
22964 UInt rE = eregOfRexRM(pfx,rm); in dis_AVX256_cmp_V_E_to_G()
22971 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_AVX256_cmp_V_E_to_G()
23005 Prefix pfx, Long delta, in dis_AVX128_E_to_G_unary() argument
23015 UInt rG = gregOfRexRM(pfx, rm); in dis_AVX128_E_to_G_unary()
23017 UInt rE = eregOfRexRM(pfx,rm); in dis_AVX128_E_to_G_unary()
23022 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_AVX128_E_to_G_unary()
23038 Prefix pfx, Long delta, in dis_AVX128_E_to_G_unary_all() argument
23046 UInt rG = gregOfRexRM(pfx, rm); in dis_AVX128_E_to_G_unary_all()
23048 UInt rE = eregOfRexRM(pfx,rm); in dis_AVX128_E_to_G_unary_all()
23053 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_AVX128_E_to_G_unary_all()
23074 Prefix pfx, Long delta, const HChar* name, in dis_VEX_NDS_256_AnySimdPfx_0F_WIG() argument
23083 UInt rD = gregOfRexRM(pfx, modrm); in dis_VEX_NDS_256_AnySimdPfx_0F_WIG()
23084 UInt rSL = getVexNvvvv(pfx); in dis_VEX_NDS_256_AnySimdPfx_0F_WIG()
23090 vassert(1==getVexL(pfx)/*256*/ && 0==getRexW(pfx)/*WIG?*/); in dis_VEX_NDS_256_AnySimdPfx_0F_WIG()
23096 UInt rSR = eregOfRexRM(pfx, modrm); in dis_VEX_NDS_256_AnySimdPfx_0F_WIG()
23102 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_VEX_NDS_256_AnySimdPfx_0F_WIG()
23141 Prefix pfx, Long delta, in dis_AVX256_E_V_to_G() argument
23145 uses_vvvv, vbi, pfx, delta, opname, op, in dis_AVX256_E_V_to_G()
23157 Prefix pfx, Long delta, const HChar* name, in dis_VEX_NDS_256_AnySimdPfx_0F_WIG_simple() argument
23162 uses_vvvv, vbi, pfx, delta, name, op, NULL, False, False); in dis_VEX_NDS_256_AnySimdPfx_0F_WIG_simple()
23172 Prefix pfx, Long delta, const HChar* name, in dis_VEX_NDS_256_AnySimdPfx_0F_WIG_complex() argument
23177 uses_vvvv, vbi, pfx, delta, name, in dis_VEX_NDS_256_AnySimdPfx_0F_WIG_complex()
23186 Prefix pfx, Long delta, in dis_AVX256_E_to_G_unary() argument
23196 UInt rG = gregOfRexRM(pfx, rm); in dis_AVX256_E_to_G_unary()
23198 UInt rE = eregOfRexRM(pfx,rm); in dis_AVX256_E_to_G_unary()
23203 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_AVX256_E_to_G_unary()
23219 Prefix pfx, Long delta, in dis_AVX256_E_to_G_unary_all() argument
23227 UInt rG = gregOfRexRM(pfx, rm); in dis_AVX256_E_to_G_unary_all()
23229 UInt rE = eregOfRexRM(pfx,rm); in dis_AVX256_E_to_G_unary_all()
23234 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_AVX256_E_to_G_unary_all()
23247 static Long dis_CVTDQ2PD_256 ( const VexAbiInfo* vbi, Prefix pfx, in dis_CVTDQ2PD_256() argument
23255 UInt rG = gregOfRexRM(pfx,modrm); in dis_CVTDQ2PD_256()
23257 UInt rE = eregOfRexRM(pfx,modrm); in dis_CVTDQ2PD_256()
23262 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_CVTDQ2PD_256()
23283 static Long dis_CVTPD2PS_256 ( const VexAbiInfo* vbi, Prefix pfx, in dis_CVTPD2PS_256() argument
23290 UInt rG = gregOfRexRM(pfx,modrm); in dis_CVTPD2PS_256()
23294 UInt rE = eregOfRexRM(pfx,modrm); in dis_CVTPD2PS_256()
23299 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_CVTPD2PS_256()
23417 Prefix pfx, Int sz, Long deltaIN in dis_ESC_0F__VEX() argument
23434 if (haveF2no66noF3(pfx) && !epartIsReg(getUChar(delta))) { in dis_ESC_0F__VEX()
23436 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
23437 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
23450 if (haveF2no66noF3(pfx) && epartIsReg(getUChar(delta))) { in dis_ESC_0F__VEX()
23452 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
23453 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
23454 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F__VEX()
23469 if (haveF3no66noF2(pfx) && !epartIsReg(getUChar(delta))) { in dis_ESC_0F__VEX()
23471 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
23472 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
23485 if (haveF3no66noF2(pfx) && epartIsReg(getUChar(delta))) { in dis_ESC_0F__VEX()
23487 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
23488 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
23489 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F__VEX()
23504 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
23506 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
23508 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
23513 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
23521 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
23523 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
23525 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
23530 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
23538 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
23540 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
23542 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
23547 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
23555 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
23557 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
23559 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
23564 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
23576 if (haveF2no66noF3(pfx) && !epartIsReg(getUChar(delta))) { in dis_ESC_0F__VEX()
23578 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
23579 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
23588 if (haveF2no66noF3(pfx) && epartIsReg(getUChar(delta))) { in dis_ESC_0F__VEX()
23590 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
23591 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
23592 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F__VEX()
23606 if (haveF3no66noF2(pfx) && !epartIsReg(getUChar(delta))) { in dis_ESC_0F__VEX()
23608 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
23609 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
23618 if (haveF3no66noF2(pfx) && epartIsReg(getUChar(delta))) { in dis_ESC_0F__VEX()
23620 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
23621 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
23622 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F__VEX()
23637 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
23639 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
23641 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
23646 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
23654 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
23656 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
23658 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
23663 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
23671 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
23673 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
23675 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
23680 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
23688 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
23690 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
23692 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
23697 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
23708 if (haveF2no66noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
23709 delta = dis_MOVDDUP_128( vbi, pfx, delta, True/*isAvx*/ ); in dis_ESC_0F__VEX()
23713 if (haveF2no66noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
23714 delta = dis_MOVDDUP_256( vbi, pfx, delta ); in dis_ESC_0F__VEX()
23719 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/ in dis_ESC_0F__VEX()
23722 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
23723 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
23724 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F__VEX()
23740 if ((have66noF2noF3(pfx) || haveNo66noF2noF3(pfx)) in dis_ESC_0F__VEX()
23741 && 0==getVexL(pfx)/*128*/ && !epartIsReg(getUChar(delta))) { in dis_ESC_0F__VEX()
23743 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
23744 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F__VEX()
23745 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
23758 if (haveF3no66noF2(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
23759 delta = dis_MOVSxDUP_128( vbi, pfx, delta, True/*isAvx*/, in dis_ESC_0F__VEX()
23764 if (haveF3no66noF2(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
23765 delta = dis_MOVSxDUP_256( vbi, pfx, delta, True/*isL*/ ); in dis_ESC_0F__VEX()
23775 if ((have66noF2noF3(pfx) || haveNo66noF2noF3(pfx)) in dis_ESC_0F__VEX()
23776 && 0==getVexL(pfx)/*128*/ && !epartIsReg(getUChar(delta))) { in dis_ESC_0F__VEX()
23778 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
23779 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
23791 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
23794 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
23795 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F__VEX()
23800 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
23806 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
23819 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
23822 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
23823 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F__VEX()
23828 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
23834 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
23847 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
23850 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
23851 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F__VEX()
23856 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
23862 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
23875 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
23878 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
23879 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F__VEX()
23884 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
23890 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
23906 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/ in dis_ESC_0F__VEX()
23909 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
23910 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
23911 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F__VEX()
23927 if ((have66noF2noF3(pfx) || haveNo66noF2noF3(pfx)) in dis_ESC_0F__VEX()
23928 && 0==getVexL(pfx)/*128*/ && !epartIsReg(getUChar(delta))) { in dis_ESC_0F__VEX()
23930 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
23931 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F__VEX()
23932 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
23934 DIP("vmovhp%c %s,%s,%s\n", have66(pfx) ? 'd' : 's', in dis_ESC_0F__VEX()
23945 if (haveF3no66noF2(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
23946 delta = dis_MOVSxDUP_128( vbi, pfx, delta, True/*isAvx*/, in dis_ESC_0F__VEX()
23951 if (haveF3no66noF2(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
23952 delta = dis_MOVSxDUP_256( vbi, pfx, delta, False/*!isL*/ ); in dis_ESC_0F__VEX()
23962 if ((have66noF2noF3(pfx) || haveNo66noF2noF3(pfx)) in dis_ESC_0F__VEX()
23963 && 0==getVexL(pfx)/*128*/ && !epartIsReg(getUChar(delta))) { in dis_ESC_0F__VEX()
23965 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
23966 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
23969 DIP("vmovhp%c %s,%s\n", have66(pfx) ? 'd' : 's', in dis_ESC_0F__VEX()
23977 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
23979 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
23981 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
23986 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
23995 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
23997 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
23999 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
24004 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
24013 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
24015 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
24017 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
24022 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
24031 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
24033 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
24035 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
24040 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
24052 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
24054 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
24056 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
24061 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
24070 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
24072 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
24074 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
24079 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
24088 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
24090 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
24092 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
24098 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
24107 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
24109 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
24111 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
24117 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
24131 if (haveF2no66noF3(pfx) && 0==getRexW(pfx)/*W0*/) { in dis_ESC_0F__VEX()
24133 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F__VEX()
24134 UInt rD = gregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
24137 UInt rS = eregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
24143 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
24157 if (haveF2no66noF3(pfx) && 1==getRexW(pfx)/*W1*/) { in dis_ESC_0F__VEX()
24159 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F__VEX()
24160 UInt rD = gregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
24163 UInt rS = eregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
24169 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
24185 if (haveF3no66noF2(pfx) && 1==getRexW(pfx)/*W1*/) { in dis_ESC_0F__VEX()
24187 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F__VEX()
24188 UInt rD = gregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
24191 UInt rS = eregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
24197 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
24215 if (haveF3no66noF2(pfx) && 0==getRexW(pfx)/*W0*/) { in dis_ESC_0F__VEX()
24217 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F__VEX()
24218 UInt rD = gregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
24221 UInt rS = eregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
24227 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
24249 if ((have66noF2noF3(pfx) || haveNo66noF2noF3(pfx)) in dis_ESC_0F__VEX()
24250 && 0==getVexL(pfx)/*128*/ && !epartIsReg(getUChar(delta))) { in dis_ESC_0F__VEX()
24252 UInt rS = gregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
24255 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
24259 DIP("vmovntp%c %s,%s\n", have66(pfx) ? 'd' : 's', in dis_ESC_0F__VEX()
24265 if ((have66noF2noF3(pfx) || haveNo66noF2noF3(pfx)) in dis_ESC_0F__VEX()
24266 && 1==getVexL(pfx)/*256*/ && !epartIsReg(getUChar(delta))) { in dis_ESC_0F__VEX()
24268 UInt rS = gregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
24271 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
24275 DIP("vmovntp%c %s,%s\n", have66(pfx) ? 'd' : 's', in dis_ESC_0F__VEX()
24283 if (haveF2no66noF3(pfx) && 0==getRexW(pfx)/*W0*/) { in dis_ESC_0F__VEX()
24284 delta = dis_CVTxSD2SI( vbi, pfx, delta, True/*isAvx*/, opc, 4); in dis_ESC_0F__VEX()
24288 if (haveF2no66noF3(pfx) && 1==getRexW(pfx)/*W1*/) { in dis_ESC_0F__VEX()
24289 delta = dis_CVTxSD2SI( vbi, pfx, delta, True/*isAvx*/, opc, 8); in dis_ESC_0F__VEX()
24293 if (haveF3no66noF2(pfx) && 0==getRexW(pfx)/*W0*/) { in dis_ESC_0F__VEX()
24294 delta = dis_CVTxSS2SI( vbi, pfx, delta, True/*isAvx*/, opc, 4); in dis_ESC_0F__VEX()
24298 if (haveF3no66noF2(pfx) && 1==getRexW(pfx)/*W1*/) { in dis_ESC_0F__VEX()
24299 delta = dis_CVTxSS2SI( vbi, pfx, delta, True/*isAvx*/, opc, 8); in dis_ESC_0F__VEX()
24306 if (haveF2no66noF3(pfx) && 0==getRexW(pfx)/*W0*/) { in dis_ESC_0F__VEX()
24307 delta = dis_CVTxSD2SI( vbi, pfx, delta, True/*isAvx*/, opc, 4); in dis_ESC_0F__VEX()
24311 if (haveF2no66noF3(pfx) && 1==getRexW(pfx)/*W1*/) { in dis_ESC_0F__VEX()
24312 delta = dis_CVTxSD2SI( vbi, pfx, delta, True/*isAvx*/, opc, 8); in dis_ESC_0F__VEX()
24316 if (haveF3no66noF2(pfx) && 0==getRexW(pfx)/*W0*/) { in dis_ESC_0F__VEX()
24317 delta = dis_CVTxSS2SI( vbi, pfx, delta, True/*isAvx*/, opc, 4); in dis_ESC_0F__VEX()
24321 if (haveF3no66noF2(pfx) && 1==getRexW(pfx)/*W1*/) { in dis_ESC_0F__VEX()
24322 delta = dis_CVTxSS2SI( vbi, pfx, delta, True/*isAvx*/, opc, 8); in dis_ESC_0F__VEX()
24331 if (have66noF2noF3(pfx)) { in dis_ESC_0F__VEX()
24332 delta = dis_COMISD( vbi, pfx, delta, True/*isAvx*/, opc ); in dis_ESC_0F__VEX()
24337 if (haveNo66noF2noF3(pfx)) { in dis_ESC_0F__VEX()
24338 delta = dis_COMISS( vbi, pfx, delta, True/*isAvx*/, opc ); in dis_ESC_0F__VEX()
24345 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
24346 delta = dis_MOVMSKPD_128( vbi, pfx, delta, True/*isAvx*/ ); in dis_ESC_0F__VEX()
24350 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
24351 delta = dis_MOVMSKPD_256( vbi, pfx, delta ); in dis_ESC_0F__VEX()
24355 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
24356 delta = dis_MOVMSKPS_128( vbi, pfx, delta, True/*isAvx*/ ); in dis_ESC_0F__VEX()
24360 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
24361 delta = dis_MOVMSKPS_256( vbi, pfx, delta ); in dis_ESC_0F__VEX()
24368 if (haveF3no66noF2(pfx)) { in dis_ESC_0F__VEX()
24370 uses_vvvv, vbi, pfx, delta, "vsqrtss", Iop_Sqrt32F0x4 ); in dis_ESC_0F__VEX()
24374 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
24376 uses_vvvv, vbi, pfx, delta, "vsqrtps", Iop_Sqrt32Fx4 ); in dis_ESC_0F__VEX()
24380 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
24382 uses_vvvv, vbi, pfx, delta, "vsqrtps", Iop_Sqrt32Fx8 ); in dis_ESC_0F__VEX()
24386 if (haveF2no66noF3(pfx)) { in dis_ESC_0F__VEX()
24388 uses_vvvv, vbi, pfx, delta, "vsqrtsd", Iop_Sqrt64F0x2 ); in dis_ESC_0F__VEX()
24392 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
24394 uses_vvvv, vbi, pfx, delta, "vsqrtpd", Iop_Sqrt64Fx2 ); in dis_ESC_0F__VEX()
24398 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
24400 uses_vvvv, vbi, pfx, delta, "vsqrtpd", Iop_Sqrt64Fx4 ); in dis_ESC_0F__VEX()
24407 if (haveF3no66noF2(pfx)) { in dis_ESC_0F__VEX()
24409 uses_vvvv, vbi, pfx, delta, "vrsqrtss", in dis_ESC_0F__VEX()
24414 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
24416 uses_vvvv, vbi, pfx, delta, "vrsqrtps", Iop_RSqrtEst32Fx4 ); in dis_ESC_0F__VEX()
24420 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
24422 uses_vvvv, vbi, pfx, delta, "vrsqrtps", Iop_RSqrtEst32Fx8 ); in dis_ESC_0F__VEX()
24429 if (haveF3no66noF2(pfx)) { in dis_ESC_0F__VEX()
24431 uses_vvvv, vbi, pfx, delta, "vrcpss", Iop_RecipEst32F0x4 ); in dis_ESC_0F__VEX()
24435 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
24437 uses_vvvv, vbi, pfx, delta, "vrcpps", Iop_RecipEst32Fx4 ); in dis_ESC_0F__VEX()
24441 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
24443 uses_vvvv, vbi, pfx, delta, "vrcpps", Iop_RecipEst32Fx8 ); in dis_ESC_0F__VEX()
24451 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
24453 uses_vvvv, vbi, pfx, delta, "vandpd", Iop_AndV128 ); in dis_ESC_0F__VEX()
24458 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
24460 uses_vvvv, vbi, pfx, delta, "vandpd", Iop_AndV256 ); in dis_ESC_0F__VEX()
24464 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
24466 uses_vvvv, vbi, pfx, delta, "vandps", Iop_AndV128 ); in dis_ESC_0F__VEX()
24470 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
24472 uses_vvvv, vbi, pfx, delta, "vandps", Iop_AndV256 ); in dis_ESC_0F__VEX()
24480 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
24482 uses_vvvv, vbi, pfx, delta, "vandpd", Iop_AndV128, in dis_ESC_0F__VEX()
24487 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
24489 uses_vvvv, vbi, pfx, delta, "vandpd", Iop_AndV256, in dis_ESC_0F__VEX()
24494 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
24496 uses_vvvv, vbi, pfx, delta, "vandps", Iop_AndV128, in dis_ESC_0F__VEX()
24501 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
24503 uses_vvvv, vbi, pfx, delta, "vandps", Iop_AndV256, in dis_ESC_0F__VEX()
24512 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
24514 uses_vvvv, vbi, pfx, delta, "vorpd", Iop_OrV128 ); in dis_ESC_0F__VEX()
24519 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
24521 uses_vvvv, vbi, pfx, delta, "vorpd", Iop_OrV256 ); in dis_ESC_0F__VEX()
24526 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
24528 uses_vvvv, vbi, pfx, delta, "vorps", Iop_OrV128 ); in dis_ESC_0F__VEX()
24533 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
24535 uses_vvvv, vbi, pfx, delta, "vorps", Iop_OrV256 ); in dis_ESC_0F__VEX()
24543 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
24545 uses_vvvv, vbi, pfx, delta, "vxorpd", Iop_XorV128 ); in dis_ESC_0F__VEX()
24550 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
24552 uses_vvvv, vbi, pfx, delta, "vxorpd", Iop_XorV256 ); in dis_ESC_0F__VEX()
24557 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
24559 uses_vvvv, vbi, pfx, delta, "vxorps", Iop_XorV128 ); in dis_ESC_0F__VEX()
24564 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
24566 uses_vvvv, vbi, pfx, delta, "vxorps", Iop_XorV256 ); in dis_ESC_0F__VEX()
24573 if (haveF2no66noF3(pfx)) { in dis_ESC_0F__VEX()
24575 uses_vvvv, vbi, pfx, delta, "vaddsd", Iop_Add64F0x2 ); in dis_ESC_0F__VEX()
24579 if (haveF3no66noF2(pfx)) { in dis_ESC_0F__VEX()
24581 uses_vvvv, vbi, pfx, delta, "vaddss", Iop_Add32F0x4 ); in dis_ESC_0F__VEX()
24585 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
24587 uses_vvvv, vbi, pfx, delta, "vaddps", Iop_Add32Fx4 ); in dis_ESC_0F__VEX()
24591 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
24593 uses_vvvv, vbi, pfx, delta, "vaddps", Iop_Add32Fx8 ); in dis_ESC_0F__VEX()
24597 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
24599 uses_vvvv, vbi, pfx, delta, "vaddpd", Iop_Add64Fx2 ); in dis_ESC_0F__VEX()
24603 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
24605 uses_vvvv, vbi, pfx, delta, "vaddpd", Iop_Add64Fx4 ); in dis_ESC_0F__VEX()
24612 if (haveF2no66noF3(pfx)) { in dis_ESC_0F__VEX()
24614 uses_vvvv, vbi, pfx, delta, "vmulsd", Iop_Mul64F0x2 ); in dis_ESC_0F__VEX()
24618 if (haveF3no66noF2(pfx)) { in dis_ESC_0F__VEX()
24620 uses_vvvv, vbi, pfx, delta, "vmulss", Iop_Mul32F0x4 ); in dis_ESC_0F__VEX()
24624 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
24626 uses_vvvv, vbi, pfx, delta, "vmulps", Iop_Mul32Fx4 ); in dis_ESC_0F__VEX()
24630 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
24632 uses_vvvv, vbi, pfx, delta, "vmulps", Iop_Mul32Fx8 ); in dis_ESC_0F__VEX()
24636 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
24638 uses_vvvv, vbi, pfx, delta, "vmulpd", Iop_Mul64Fx2 ); in dis_ESC_0F__VEX()
24642 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
24644 uses_vvvv, vbi, pfx, delta, "vmulpd", Iop_Mul64Fx4 ); in dis_ESC_0F__VEX()
24651 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
24652 delta = dis_CVTPS2PD_128( vbi, pfx, delta, True/*isAvx*/ ); in dis_ESC_0F__VEX()
24656 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
24657 delta = dis_CVTPS2PD_256( vbi, pfx, delta ); in dis_ESC_0F__VEX()
24661 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
24662 delta = dis_CVTPD2PS_128( vbi, pfx, delta, True/*isAvx*/ ); in dis_ESC_0F__VEX()
24666 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
24667 delta = dis_CVTPD2PS_256( vbi, pfx, delta ); in dis_ESC_0F__VEX()
24671 if (haveF2no66noF3(pfx)) { in dis_ESC_0F__VEX()
24673 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F__VEX()
24674 UInt rD = gregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
24679 UInt rS = eregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
24685 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
24701 if (haveF3no66noF2(pfx)) { in dis_ESC_0F__VEX()
24703 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F__VEX()
24704 UInt rD = gregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
24707 UInt rS = eregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
24713 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
24730 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
24731 delta = dis_CVTxPS2DQ_128( vbi, pfx, delta, in dis_ESC_0F__VEX()
24736 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
24737 delta = dis_CVTxPS2DQ_256( vbi, pfx, delta, in dis_ESC_0F__VEX()
24742 if (haveF3no66noF2(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
24743 delta = dis_CVTxPS2DQ_128( vbi, pfx, delta, in dis_ESC_0F__VEX()
24748 if (haveF3no66noF2(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
24749 delta = dis_CVTxPS2DQ_256( vbi, pfx, delta, in dis_ESC_0F__VEX()
24754 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
24755 delta = dis_CVTDQ2PS_128 ( vbi, pfx, delta, True/*isAvx*/ ); in dis_ESC_0F__VEX()
24759 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
24760 delta = dis_CVTDQ2PS_256 ( vbi, pfx, delta ); in dis_ESC_0F__VEX()
24767 if (haveF2no66noF3(pfx)) { in dis_ESC_0F__VEX()
24769 uses_vvvv, vbi, pfx, delta, "vsubsd", Iop_Sub64F0x2 ); in dis_ESC_0F__VEX()
24773 if (haveF3no66noF2(pfx)) { in dis_ESC_0F__VEX()
24775 uses_vvvv, vbi, pfx, delta, "vsubss", Iop_Sub32F0x4 ); in dis_ESC_0F__VEX()
24779 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
24781 uses_vvvv, vbi, pfx, delta, "vsubps", Iop_Sub32Fx4 ); in dis_ESC_0F__VEX()
24785 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
24787 uses_vvvv, vbi, pfx, delta, "vsubps", Iop_Sub32Fx8 ); in dis_ESC_0F__VEX()
24791 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
24793 uses_vvvv, vbi, pfx, delta, "vsubpd", Iop_Sub64Fx2 ); in dis_ESC_0F__VEX()
24797 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
24799 uses_vvvv, vbi, pfx, delta, "vsubpd", Iop_Sub64Fx4 ); in dis_ESC_0F__VEX()
24806 if (haveF2no66noF3(pfx)) { in dis_ESC_0F__VEX()
24808 uses_vvvv, vbi, pfx, delta, "vminsd", Iop_Min64F0x2 ); in dis_ESC_0F__VEX()
24812 if (haveF3no66noF2(pfx)) { in dis_ESC_0F__VEX()
24814 uses_vvvv, vbi, pfx, delta, "vminss", Iop_Min32F0x4 ); in dis_ESC_0F__VEX()
24818 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
24820 uses_vvvv, vbi, pfx, delta, "vminps", Iop_Min32Fx4 ); in dis_ESC_0F__VEX()
24824 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
24826 uses_vvvv, vbi, pfx, delta, "vminps", Iop_Min32Fx8 ); in dis_ESC_0F__VEX()
24830 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
24832 uses_vvvv, vbi, pfx, delta, "vminpd", Iop_Min64Fx2 ); in dis_ESC_0F__VEX()
24836 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
24838 uses_vvvv, vbi, pfx, delta, "vminpd", Iop_Min64Fx4 ); in dis_ESC_0F__VEX()
24845 if (haveF2no66noF3(pfx)) { in dis_ESC_0F__VEX()
24847 uses_vvvv, vbi, pfx, delta, "vdivsd", Iop_Div64F0x2 ); in dis_ESC_0F__VEX()
24851 if (haveF3no66noF2(pfx)) { in dis_ESC_0F__VEX()
24853 uses_vvvv, vbi, pfx, delta, "vdivss", Iop_Div32F0x4 ); in dis_ESC_0F__VEX()
24857 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
24859 uses_vvvv, vbi, pfx, delta, "vdivps", Iop_Div32Fx4 ); in dis_ESC_0F__VEX()
24863 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
24865 uses_vvvv, vbi, pfx, delta, "vdivps", Iop_Div32Fx8 ); in dis_ESC_0F__VEX()
24869 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
24871 uses_vvvv, vbi, pfx, delta, "vdivpd", Iop_Div64Fx2 ); in dis_ESC_0F__VEX()
24875 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
24877 uses_vvvv, vbi, pfx, delta, "vdivpd", Iop_Div64Fx4 ); in dis_ESC_0F__VEX()
24884 if (haveF2no66noF3(pfx)) { in dis_ESC_0F__VEX()
24886 uses_vvvv, vbi, pfx, delta, "vmaxsd", Iop_Max64F0x2 ); in dis_ESC_0F__VEX()
24890 if (haveF3no66noF2(pfx)) { in dis_ESC_0F__VEX()
24892 uses_vvvv, vbi, pfx, delta, "vmaxss", Iop_Max32F0x4 ); in dis_ESC_0F__VEX()
24896 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
24898 uses_vvvv, vbi, pfx, delta, "vmaxps", Iop_Max32Fx4 ); in dis_ESC_0F__VEX()
24902 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
24904 uses_vvvv, vbi, pfx, delta, "vmaxps", Iop_Max32Fx8 ); in dis_ESC_0F__VEX()
24908 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
24910 uses_vvvv, vbi, pfx, delta, "vmaxpd", Iop_Max64Fx2 ); in dis_ESC_0F__VEX()
24914 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
24916 uses_vvvv, vbi, pfx, delta, "vmaxpd", Iop_Max64Fx4 ); in dis_ESC_0F__VEX()
24924 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
24926 uses_vvvv, vbi, pfx, delta, "vpunpcklbw", in dis_ESC_0F__VEX()
24933 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
24935 uses_vvvv, vbi, pfx, delta, "vpunpcklbw", in dis_ESC_0F__VEX()
24944 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
24946 uses_vvvv, vbi, pfx, delta, "vpunpcklwd", in dis_ESC_0F__VEX()
24953 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
24955 uses_vvvv, vbi, pfx, delta, "vpunpcklwd", in dis_ESC_0F__VEX()
24964 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
24966 uses_vvvv, vbi, pfx, delta, "vpunpckldq", in dis_ESC_0F__VEX()
24973 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
24975 uses_vvvv, vbi, pfx, delta, "vpunpckldq", in dis_ESC_0F__VEX()
24984 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
24986 uses_vvvv, vbi, pfx, delta, "vpacksswb", in dis_ESC_0F__VEX()
24993 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
24995 uses_vvvv, vbi, pfx, delta, "vpacksswb", in dis_ESC_0F__VEX()
25004 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
25006 uses_vvvv, vbi, pfx, delta, "vpcmpgtb", Iop_CmpGT8Sx16 ); in dis_ESC_0F__VEX()
25011 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
25013 uses_vvvv, vbi, pfx, delta, "vpcmpgtb", Iop_CmpGT8Sx32 ); in dis_ESC_0F__VEX()
25021 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
25023 uses_vvvv, vbi, pfx, delta, "vpcmpgtw", Iop_CmpGT16Sx8 ); in dis_ESC_0F__VEX()
25028 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
25030 uses_vvvv, vbi, pfx, delta, "vpcmpgtw", Iop_CmpGT16Sx16 ); in dis_ESC_0F__VEX()
25038 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
25040 uses_vvvv, vbi, pfx, delta, "vpcmpgtd", Iop_CmpGT32Sx4 ); in dis_ESC_0F__VEX()
25045 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
25047 uses_vvvv, vbi, pfx, delta, "vpcmpgtd", Iop_CmpGT32Sx8 ); in dis_ESC_0F__VEX()
25055 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
25057 uses_vvvv, vbi, pfx, delta, "vpackuswb", in dis_ESC_0F__VEX()
25064 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
25066 uses_vvvv, vbi, pfx, delta, "vpackuswb", in dis_ESC_0F__VEX()
25075 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
25077 uses_vvvv, vbi, pfx, delta, "vpunpckhbw", in dis_ESC_0F__VEX()
25084 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
25086 uses_vvvv, vbi, pfx, delta, "vpunpckhbw", in dis_ESC_0F__VEX()
25095 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
25097 uses_vvvv, vbi, pfx, delta, "vpunpckhwd", in dis_ESC_0F__VEX()
25104 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
25106 uses_vvvv, vbi, pfx, delta, "vpunpckhwd", in dis_ESC_0F__VEX()
25115 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
25117 uses_vvvv, vbi, pfx, delta, "vpunpckhdq", in dis_ESC_0F__VEX()
25124 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
25126 uses_vvvv, vbi, pfx, delta, "vpunpckhdq", in dis_ESC_0F__VEX()
25135 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
25137 uses_vvvv, vbi, pfx, delta, "vpackssdw", in dis_ESC_0F__VEX()
25144 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
25146 uses_vvvv, vbi, pfx, delta, "vpackssdw", in dis_ESC_0F__VEX()
25155 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
25157 uses_vvvv, vbi, pfx, delta, "vpunpcklqdq", in dis_ESC_0F__VEX()
25164 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
25166 uses_vvvv, vbi, pfx, delta, "vpunpcklqdq", in dis_ESC_0F__VEX()
25175 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
25177 uses_vvvv, vbi, pfx, delta, "vpunpckhqdq", in dis_ESC_0F__VEX()
25184 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
25186 uses_vvvv, vbi, pfx, delta, "vpunpckhqdq", in dis_ESC_0F__VEX()
25194 if (have66noF2noF3(pfx) in dis_ESC_0F__VEX()
25195 && 0==getVexL(pfx)/*128*/ && 0==getRexW(pfx)/*W0*/) { in dis_ESC_0F__VEX()
25201 gregOfRexRM(pfx,modrm), in dis_ESC_0F__VEX()
25202 unop( Iop_32UtoV128, getIReg32(eregOfRexRM(pfx,modrm)) ) in dis_ESC_0F__VEX()
25204 DIP("vmovd %s, %s\n", nameIReg32(eregOfRexRM(pfx,modrm)), in dis_ESC_0F__VEX()
25205 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__VEX()
25207 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
25210 gregOfRexRM(pfx,modrm), in dis_ESC_0F__VEX()
25214 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__VEX()
25219 if (have66noF2noF3(pfx) in dis_ESC_0F__VEX()
25220 && 0==getVexL(pfx)/*128*/ && 1==getRexW(pfx)/*W1*/) { in dis_ESC_0F__VEX()
25226 gregOfRexRM(pfx,modrm), in dis_ESC_0F__VEX()
25227 unop( Iop_64UtoV128, getIReg64(eregOfRexRM(pfx,modrm)) ) in dis_ESC_0F__VEX()
25229 DIP("vmovq %s, %s\n", nameIReg64(eregOfRexRM(pfx,modrm)), in dis_ESC_0F__VEX()
25230 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__VEX()
25232 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
25235 gregOfRexRM(pfx,modrm), in dis_ESC_0F__VEX()
25239 nameXMMReg(gregOfRexRM(pfx,modrm))); in dis_ESC_0F__VEX()
25248 if ((have66noF2noF3(pfx) || haveF3no66noF2(pfx)) in dis_ESC_0F__VEX()
25249 && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
25251 UInt rD = gregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
25253 Bool isA = have66noF2noF3(pfx); in dis_ESC_0F__VEX()
25256 UInt rS = eregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
25261 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
25273 if ((have66noF2noF3(pfx) || haveF3no66noF2(pfx)) in dis_ESC_0F__VEX()
25274 && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
25276 UInt rD = gregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
25278 Bool isA = have66noF2noF3(pfx); in dis_ESC_0F__VEX()
25281 UInt rS = eregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
25286 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
25300 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
25301 delta = dis_PSHUFD_32x4( vbi, pfx, delta, True/*writesYmm*/); in dis_ESC_0F__VEX()
25305 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
25306 delta = dis_PSHUFD_32x8( vbi, pfx, delta); in dis_ESC_0F__VEX()
25310 if (haveF2no66noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
25311 delta = dis_PSHUFxW_128( vbi, pfx, delta, in dis_ESC_0F__VEX()
25316 if (haveF2no66noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
25317 delta = dis_PSHUFxW_256( vbi, pfx, delta, False/*!xIsH*/ ); in dis_ESC_0F__VEX()
25321 if (haveF3no66noF2(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
25322 delta = dis_PSHUFxW_128( vbi, pfx, delta, in dis_ESC_0F__VEX()
25327 if (haveF3no66noF2(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
25328 delta = dis_PSHUFxW_256( vbi, pfx, delta, True/*xIsH*/ ); in dis_ESC_0F__VEX()
25337 if (have66noF2noF3(pfx) in dis_ESC_0F__VEX()
25338 && 0==getVexL(pfx)/*128*/ in dis_ESC_0F__VEX()
25341 delta = dis_AVX128_shiftE_to_V_imm( pfx, delta, in dis_ESC_0F__VEX()
25347 delta = dis_AVX128_shiftE_to_V_imm( pfx, delta, in dis_ESC_0F__VEX()
25353 delta = dis_AVX128_shiftE_to_V_imm( pfx, delta, in dis_ESC_0F__VEX()
25363 if (have66noF2noF3(pfx) in dis_ESC_0F__VEX()
25364 && 1==getVexL(pfx)/*256*/ in dis_ESC_0F__VEX()
25367 delta = dis_AVX256_shiftE_to_V_imm( pfx, delta, in dis_ESC_0F__VEX()
25373 delta = dis_AVX256_shiftE_to_V_imm( pfx, delta, in dis_ESC_0F__VEX()
25379 delta = dis_AVX256_shiftE_to_V_imm( pfx, delta, in dis_ESC_0F__VEX()
25392 if (have66noF2noF3(pfx) in dis_ESC_0F__VEX()
25393 && 0==getVexL(pfx)/*128*/ in dis_ESC_0F__VEX()
25396 delta = dis_AVX128_shiftE_to_V_imm( pfx, delta, in dis_ESC_0F__VEX()
25402 delta = dis_AVX128_shiftE_to_V_imm( pfx, delta, in dis_ESC_0F__VEX()
25408 delta = dis_AVX128_shiftE_to_V_imm( pfx, delta, in dis_ESC_0F__VEX()
25418 if (have66noF2noF3(pfx) in dis_ESC_0F__VEX()
25419 && 1==getVexL(pfx)/*256*/ in dis_ESC_0F__VEX()
25422 delta = dis_AVX256_shiftE_to_V_imm( pfx, delta, in dis_ESC_0F__VEX()
25428 delta = dis_AVX256_shiftE_to_V_imm( pfx, delta, in dis_ESC_0F__VEX()
25434 delta = dis_AVX256_shiftE_to_V_imm( pfx, delta, in dis_ESC_0F__VEX()
25448 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/ in dis_ESC_0F__VEX()
25450 Int rS = eregOfRexRM(pfx,getUChar(delta)); in dis_ESC_0F__VEX()
25451 Int rD = getVexNvvvv(pfx); in dis_ESC_0F__VEX()
25472 delta = dis_AVX128_shiftE_to_V_imm( pfx, delta, in dis_ESC_0F__VEX()
25478 delta = dis_AVX128_shiftE_to_V_imm( pfx, delta, in dis_ESC_0F__VEX()
25489 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/ in dis_ESC_0F__VEX()
25491 Int rS = eregOfRexRM(pfx,getUChar(delta)); in dis_ESC_0F__VEX()
25492 Int rD = getVexNvvvv(pfx); in dis_ESC_0F__VEX()
25520 delta = dis_AVX256_shiftE_to_V_imm( pfx, delta, in dis_ESC_0F__VEX()
25526 delta = dis_AVX256_shiftE_to_V_imm( pfx, delta, in dis_ESC_0F__VEX()
25538 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
25540 uses_vvvv, vbi, pfx, delta, "vpcmpeqb", Iop_CmpEQ8x16 ); in dis_ESC_0F__VEX()
25545 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
25547 uses_vvvv, vbi, pfx, delta, "vpcmpeqb", Iop_CmpEQ8x32 ); in dis_ESC_0F__VEX()
25555 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
25557 uses_vvvv, vbi, pfx, delta, "vpcmpeqw", Iop_CmpEQ16x8 ); in dis_ESC_0F__VEX()
25562 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
25564 uses_vvvv, vbi, pfx, delta, "vpcmpeqw", Iop_CmpEQ16x16 ); in dis_ESC_0F__VEX()
25572 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
25574 uses_vvvv, vbi, pfx, delta, "vpcmpeqd", Iop_CmpEQ32x4 ); in dis_ESC_0F__VEX()
25579 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
25581 uses_vvvv, vbi, pfx, delta, "vpcmpeqd", Iop_CmpEQ32x8 ); in dis_ESC_0F__VEX()
25588 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
25599 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
25615 if (haveF2no66noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
25621 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
25622 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F__VEX()
25624 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
25630 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
25643 if (haveF2no66noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
25650 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
25651 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F__VEX()
25654 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
25660 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
25677 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
25683 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
25684 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F__VEX()
25686 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
25692 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
25705 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
25712 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
25713 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F__VEX()
25716 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
25722 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
25745 if (haveF3no66noF2(pfx) in dis_ESC_0F__VEX()
25746 && 0==getVexL(pfx)/*128*/ && 0==getRexW(pfx)/*W0*/) { in dis_ESC_0F__VEX()
25749 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
25751 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
25756 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
25769 if (have66noF2noF3(pfx) in dis_ESC_0F__VEX()
25770 && 0==getVexL(pfx)/*128*/ && 1==getRexW(pfx)/*W1*/) { in dis_ESC_0F__VEX()
25772 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
25774 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
25779 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
25788 if (have66noF2noF3(pfx) in dis_ESC_0F__VEX()
25789 && 0==getVexL(pfx)/*128*/ && 0==getRexW(pfx)/*W0*/) { in dis_ESC_0F__VEX()
25791 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
25793 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
25798 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
25810 if ((have66noF2noF3(pfx) || haveF3no66noF2(pfx)) in dis_ESC_0F__VEX()
25811 && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
25813 UInt rS = gregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
25815 Bool isA = have66noF2noF3(pfx); in dis_ESC_0F__VEX()
25819 UInt rD = eregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
25824 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
25835 if ((have66noF2noF3(pfx) || haveF3no66noF2(pfx)) in dis_ESC_0F__VEX()
25836 && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
25838 UInt rS = gregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
25840 Bool isA = have66noF2noF3(pfx); in dis_ESC_0F__VEX()
25844 UInt rD = eregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
25849 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
25862 if (haveNo66noF2noF3(pfx) in dis_ESC_0F__VEX()
25863 && 0==getVexL(pfx)/*LZ*/ in dis_ESC_0F__VEX()
25864 && 0==getRexW(pfx) /* be paranoid -- Intel docs don't require this */ in dis_ESC_0F__VEX()
25867 delta = dis_STMXCSR(vbi, pfx, delta, True/*isAvx*/); in dis_ESC_0F__VEX()
25871 if (haveNo66noF2noF3(pfx) in dis_ESC_0F__VEX()
25872 && 0==getVexL(pfx)/*LZ*/ in dis_ESC_0F__VEX()
25873 && 0==getRexW(pfx) /* be paranoid -- Intel docs don't require this */ in dis_ESC_0F__VEX()
25876 delta = dis_LDMXCSR(vbi, pfx, delta, True/*isAvx*/); in dis_ESC_0F__VEX()
25884 if (haveF2no66noF3(pfx)) { in dis_ESC_0F__VEX()
25886 delta = dis_AVX128_cmp_V_E_to_G( uses_vvvv, vbi, pfx, delta, in dis_ESC_0F__VEX()
25894 if (haveF3no66noF2(pfx)) { in dis_ESC_0F__VEX()
25896 delta = dis_AVX128_cmp_V_E_to_G( uses_vvvv, vbi, pfx, delta, in dis_ESC_0F__VEX()
25904 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
25906 delta = dis_AVX128_cmp_V_E_to_G( uses_vvvv, vbi, pfx, delta, in dis_ESC_0F__VEX()
25914 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
25916 delta = dis_AVX256_cmp_V_E_to_G( uses_vvvv, vbi, pfx, delta, in dis_ESC_0F__VEX()
25923 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
25925 delta = dis_AVX128_cmp_V_E_to_G( uses_vvvv, vbi, pfx, delta, in dis_ESC_0F__VEX()
25933 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
25935 delta = dis_AVX256_cmp_V_E_to_G( uses_vvvv, vbi, pfx, delta, in dis_ESC_0F__VEX()
25944 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
25946 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
25947 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F__VEX()
25954 getIReg32(eregOfRexRM(pfx,modrm))) ); in dis_ESC_0F__VEX()
25957 nameIReg32( eregOfRexRM(pfx, modrm) ), nameXMMReg(rG) ); in dis_ESC_0F__VEX()
25959 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F__VEX()
25978 if (have66noF2noF3(pfx) in dis_ESC_0F__VEX()
25979 && 0==getVexL(pfx)/*128*/ && 0==getRexW(pfx)/*W0*/) { in dis_ESC_0F__VEX()
25981 delta = dis_PEXTRW_128_EregOnly_toG( vbi, pfx, delta, in dis_ESC_0F__VEX()
25991 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
25996 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
25997 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F__VEX()
26000 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
26007 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F__VEX()
26021 if (haveNo66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
26026 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
26027 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F__VEX()
26030 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
26037 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F__VEX()
26051 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
26056 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
26057 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F__VEX()
26060 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
26067 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F__VEX()
26081 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
26086 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
26087 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F__VEX()
26090 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
26097 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F__VEX()
26113 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
26115 uses_vvvv, vbi, pfx, delta, in dis_ESC_0F__VEX()
26120 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
26122 uses_vvvv, vbi, pfx, delta, in dis_ESC_0F__VEX()
26127 if (haveF2no66noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
26129 uses_vvvv, vbi, pfx, delta, in dis_ESC_0F__VEX()
26134 if (haveF2no66noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
26136 uses_vvvv, vbi, pfx, delta, in dis_ESC_0F__VEX()
26144 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
26145 delta = dis_AVX128_shiftV_byE( vbi, pfx, delta, in dis_ESC_0F__VEX()
26152 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
26153 delta = dis_AVX256_shiftV_byE( vbi, pfx, delta, in dis_ESC_0F__VEX()
26163 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
26164 delta = dis_AVX128_shiftV_byE( vbi, pfx, delta, in dis_ESC_0F__VEX()
26170 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
26171 delta = dis_AVX256_shiftV_byE( vbi, pfx, delta, in dis_ESC_0F__VEX()
26180 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
26181 delta = dis_AVX128_shiftV_byE( vbi, pfx, delta, in dis_ESC_0F__VEX()
26187 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
26188 delta = dis_AVX256_shiftV_byE( vbi, pfx, delta, in dis_ESC_0F__VEX()
26198 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
26200 uses_vvvv, vbi, pfx, delta, "vpaddq", Iop_Add64x2 ); in dis_ESC_0F__VEX()
26205 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
26207 uses_vvvv, vbi, pfx, delta, "vpaddq", Iop_Add64x4 ); in dis_ESC_0F__VEX()
26214 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
26216 uses_vvvv, vbi, pfx, delta, "vpmullw", Iop_Mul16x8 ); in dis_ESC_0F__VEX()
26220 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
26222 uses_vvvv, vbi, pfx, delta, "vpmullw", Iop_Mul16x16 ); in dis_ESC_0F__VEX()
26232 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/ in dis_ESC_0F__VEX()
26233 && 0==getRexW(pfx)/*this might be redundant, dunno*/) { in dis_ESC_0F__VEX()
26235 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
26240 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
26251 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
26252 delta = dis_PMOVMSKB_128( vbi, pfx, delta, True/*isAvx*/ ); in dis_ESC_0F__VEX()
26256 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
26257 delta = dis_PMOVMSKB_256( vbi, pfx, delta ); in dis_ESC_0F__VEX()
26264 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
26266 uses_vvvv, vbi, pfx, delta, "vpsubusb", Iop_QSub8Ux16 ); in dis_ESC_0F__VEX()
26270 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
26272 uses_vvvv, vbi, pfx, delta, "vpsubusb", Iop_QSub8Ux32 ); in dis_ESC_0F__VEX()
26279 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
26281 uses_vvvv, vbi, pfx, delta, "vpsubusw", Iop_QSub16Ux8 ); in dis_ESC_0F__VEX()
26285 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
26287 uses_vvvv, vbi, pfx, delta, "vpsubusw", Iop_QSub16Ux16 ); in dis_ESC_0F__VEX()
26294 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
26296 uses_vvvv, vbi, pfx, delta, "vpminub", Iop_Min8Ux16 ); in dis_ESC_0F__VEX()
26300 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
26302 uses_vvvv, vbi, pfx, delta, "vpminub", Iop_Min8Ux32 ); in dis_ESC_0F__VEX()
26310 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
26312 uses_vvvv, vbi, pfx, delta, "vpand", Iop_AndV128 ); in dis_ESC_0F__VEX()
26317 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
26319 uses_vvvv, vbi, pfx, delta, "vpand", Iop_AndV256 ); in dis_ESC_0F__VEX()
26326 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
26328 uses_vvvv, vbi, pfx, delta, "vpaddusb", Iop_QAdd8Ux16 ); in dis_ESC_0F__VEX()
26332 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
26334 uses_vvvv, vbi, pfx, delta, "vpaddusb", Iop_QAdd8Ux32 ); in dis_ESC_0F__VEX()
26341 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
26343 uses_vvvv, vbi, pfx, delta, "vpaddusw", Iop_QAdd16Ux8 ); in dis_ESC_0F__VEX()
26347 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
26349 uses_vvvv, vbi, pfx, delta, "vpaddusw", Iop_QAdd16Ux16 ); in dis_ESC_0F__VEX()
26356 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
26358 uses_vvvv, vbi, pfx, delta, "vpmaxub", Iop_Max8Ux16 ); in dis_ESC_0F__VEX()
26362 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
26364 uses_vvvv, vbi, pfx, delta, "vpmaxub", Iop_Max8Ux32 ); in dis_ESC_0F__VEX()
26372 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
26374 uses_vvvv, vbi, pfx, delta, "vpandn", Iop_AndV128, in dis_ESC_0F__VEX()
26380 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
26382 uses_vvvv, vbi, pfx, delta, "vpandn", Iop_AndV256, in dis_ESC_0F__VEX()
26390 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
26392 uses_vvvv, vbi, pfx, delta, "vpavgb", Iop_Avg8Ux16 ); in dis_ESC_0F__VEX()
26396 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
26398 uses_vvvv, vbi, pfx, delta, "vpavgb", Iop_Avg8Ux32 ); in dis_ESC_0F__VEX()
26405 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
26406 delta = dis_AVX128_shiftV_byE( vbi, pfx, delta, in dis_ESC_0F__VEX()
26412 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
26413 delta = dis_AVX256_shiftV_byE( vbi, pfx, delta, in dis_ESC_0F__VEX()
26422 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
26423 delta = dis_AVX128_shiftV_byE( vbi, pfx, delta, in dis_ESC_0F__VEX()
26429 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
26430 delta = dis_AVX256_shiftV_byE( vbi, pfx, delta, in dis_ESC_0F__VEX()
26439 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
26441 uses_vvvv, vbi, pfx, delta, "vpavgw", Iop_Avg16Ux8 ); in dis_ESC_0F__VEX()
26445 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
26447 uses_vvvv, vbi, pfx, delta, "vpavgw", Iop_Avg16Ux16 ); in dis_ESC_0F__VEX()
26454 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
26456 uses_vvvv, vbi, pfx, delta, "vpmulhuw", Iop_MulHi16Ux8 ); in dis_ESC_0F__VEX()
26460 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
26462 uses_vvvv, vbi, pfx, delta, "vpmulhuw", Iop_MulHi16Ux16 ); in dis_ESC_0F__VEX()
26469 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
26471 uses_vvvv, vbi, pfx, delta, "vpmulhw", Iop_MulHi16Sx8 ); in dis_ESC_0F__VEX()
26475 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
26477 uses_vvvv, vbi, pfx, delta, "vpmulhw", Iop_MulHi16Sx16 ); in dis_ESC_0F__VEX()
26484 if (haveF3no66noF2(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
26485 delta = dis_CVTDQ2PD_128(vbi, pfx, delta, True/*isAvx*/); in dis_ESC_0F__VEX()
26489 if (haveF3no66noF2(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
26490 delta = dis_CVTDQ2PD_256(vbi, pfx, delta); in dis_ESC_0F__VEX()
26494 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
26495 delta = dis_CVTxPD2DQ_128(vbi, pfx, delta, True/*isAvx*/, in dis_ESC_0F__VEX()
26500 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
26501 delta = dis_CVTxPD2DQ_256(vbi, pfx, delta, True/*r2zero*/); in dis_ESC_0F__VEX()
26505 if (haveF2no66noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
26506 delta = dis_CVTxPD2DQ_128(vbi, pfx, delta, True/*isAvx*/, in dis_ESC_0F__VEX()
26511 if (haveF2no66noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
26512 delta = dis_CVTxPD2DQ_256(vbi, pfx, delta, False/*!r2zero*/); in dis_ESC_0F__VEX()
26519 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
26521 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
26523 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
26533 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
26535 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F__VEX()
26537 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
26550 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
26552 uses_vvvv, vbi, pfx, delta, "vpsubsb", Iop_QSub8Sx16 ); in dis_ESC_0F__VEX()
26556 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
26558 uses_vvvv, vbi, pfx, delta, "vpsubsb", Iop_QSub8Sx32 ); in dis_ESC_0F__VEX()
26565 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
26567 uses_vvvv, vbi, pfx, delta, "vpsubsw", Iop_QSub16Sx8 ); in dis_ESC_0F__VEX()
26571 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
26573 uses_vvvv, vbi, pfx, delta, "vpsubsw", Iop_QSub16Sx16 ); in dis_ESC_0F__VEX()
26581 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
26583 uses_vvvv, vbi, pfx, delta, "vpminsw", Iop_Min16Sx8 ); in dis_ESC_0F__VEX()
26588 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
26590 uses_vvvv, vbi, pfx, delta, "vpminsw", Iop_Min16Sx16 ); in dis_ESC_0F__VEX()
26598 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
26600 uses_vvvv, vbi, pfx, delta, "vpor", Iop_OrV128 ); in dis_ESC_0F__VEX()
26605 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
26607 uses_vvvv, vbi, pfx, delta, "vpor", Iop_OrV256 ); in dis_ESC_0F__VEX()
26614 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
26616 uses_vvvv, vbi, pfx, delta, "vpaddsb", Iop_QAdd8Sx16 ); in dis_ESC_0F__VEX()
26620 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
26622 uses_vvvv, vbi, pfx, delta, "vpaddsb", Iop_QAdd8Sx32 ); in dis_ESC_0F__VEX()
26629 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
26631 uses_vvvv, vbi, pfx, delta, "vpaddsw", Iop_QAdd16Sx8 ); in dis_ESC_0F__VEX()
26635 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
26637 uses_vvvv, vbi, pfx, delta, "vpaddsw", Iop_QAdd16Sx16 ); in dis_ESC_0F__VEX()
26645 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
26647 uses_vvvv, vbi, pfx, delta, "vpmaxsw", Iop_Max16Sx8 ); in dis_ESC_0F__VEX()
26652 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
26654 uses_vvvv, vbi, pfx, delta, "vpmaxsw", Iop_Max16Sx16 ); in dis_ESC_0F__VEX()
26662 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
26664 uses_vvvv, vbi, pfx, delta, "vpxor", Iop_XorV128 ); in dis_ESC_0F__VEX()
26669 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
26671 uses_vvvv, vbi, pfx, delta, "vpxor", Iop_XorV256 ); in dis_ESC_0F__VEX()
26678 if (haveF2no66noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
26680 UInt rD = gregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
26683 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
26691 if (haveF2no66noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
26693 UInt rD = gregOfRexRM(pfx, modrm); in dis_ESC_0F__VEX()
26696 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F__VEX()
26707 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
26708 delta = dis_AVX128_shiftV_byE( vbi, pfx, delta, in dis_ESC_0F__VEX()
26715 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
26716 delta = dis_AVX256_shiftV_byE( vbi, pfx, delta, in dis_ESC_0F__VEX()
26726 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
26727 delta = dis_AVX128_shiftV_byE( vbi, pfx, delta, in dis_ESC_0F__VEX()
26733 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
26734 delta = dis_AVX256_shiftV_byE( vbi, pfx, delta, in dis_ESC_0F__VEX()
26743 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
26744 delta = dis_AVX128_shiftV_byE( vbi, pfx, delta, in dis_ESC_0F__VEX()
26750 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
26751 delta = dis_AVX256_shiftV_byE( vbi, pfx, delta, in dis_ESC_0F__VEX()
26760 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
26762 uses_vvvv, vbi, pfx, delta, in dis_ESC_0F__VEX()
26767 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
26769 uses_vvvv, vbi, pfx, delta, in dis_ESC_0F__VEX()
26777 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
26779 uses_vvvv, vbi, pfx, delta, in dis_ESC_0F__VEX()
26784 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
26786 uses_vvvv, vbi, pfx, delta, in dis_ESC_0F__VEX()
26794 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
26796 uses_vvvv, vbi, pfx, delta, in dis_ESC_0F__VEX()
26801 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
26803 uses_vvvv, vbi, pfx, delta, in dis_ESC_0F__VEX()
26811 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/ in dis_ESC_0F__VEX()
26813 delta = dis_MASKMOVDQU( vbi, pfx, delta, True/*isAvx*/ ); in dis_ESC_0F__VEX()
26821 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
26823 uses_vvvv, vbi, pfx, delta, "vpsubb", Iop_Sub8x16 ); in dis_ESC_0F__VEX()
26828 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
26830 uses_vvvv, vbi, pfx, delta, "vpsubb", Iop_Sub8x32 ); in dis_ESC_0F__VEX()
26838 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
26840 uses_vvvv, vbi, pfx, delta, "vpsubw", Iop_Sub16x8 ); in dis_ESC_0F__VEX()
26845 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
26847 uses_vvvv, vbi, pfx, delta, "vpsubw", Iop_Sub16x16 ); in dis_ESC_0F__VEX()
26855 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
26857 uses_vvvv, vbi, pfx, delta, "vpsubd", Iop_Sub32x4 ); in dis_ESC_0F__VEX()
26862 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
26864 uses_vvvv, vbi, pfx, delta, "vpsubd", Iop_Sub32x8 ); in dis_ESC_0F__VEX()
26872 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
26874 uses_vvvv, vbi, pfx, delta, "vpsubq", Iop_Sub64x2 ); in dis_ESC_0F__VEX()
26879 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
26881 uses_vvvv, vbi, pfx, delta, "vpsubq", Iop_Sub64x4 ); in dis_ESC_0F__VEX()
26889 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
26891 uses_vvvv, vbi, pfx, delta, "vpaddb", Iop_Add8x16 ); in dis_ESC_0F__VEX()
26896 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
26898 uses_vvvv, vbi, pfx, delta, "vpaddb", Iop_Add8x32 ); in dis_ESC_0F__VEX()
26906 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
26908 uses_vvvv, vbi, pfx, delta, "vpaddw", Iop_Add16x8 ); in dis_ESC_0F__VEX()
26913 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
26915 uses_vvvv, vbi, pfx, delta, "vpaddw", Iop_Add16x16 ); in dis_ESC_0F__VEX()
26923 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F__VEX()
26925 uses_vvvv, vbi, pfx, delta, "vpaddd", Iop_Add32x4 ); in dis_ESC_0F__VEX()
26930 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F__VEX()
26932 uses_vvvv, vbi, pfx, delta, "vpaddd", Iop_Add32x8 ); in dis_ESC_0F__VEX()
27031 const VexAbiInfo* vbi, Prefix pfx, Long delta, in dis_SHIFTX() argument
27036 Int size = getRexW(pfx) ? 8 : 4; in dis_SHIFTX()
27042 assign( amt, getIRegV(size,pfx) ); in dis_SHIFTX()
27044 assign( src, getIRegE(size,pfx,rm) ); in dis_SHIFTX()
27045 DIP("%s %s,%s,%s\n", opname, nameIRegV(size,pfx), in dis_SHIFTX()
27046 nameIRegE(size,pfx,rm), nameIRegG(size,pfx,rm)); in dis_SHIFTX()
27049 IRTemp addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_SHIFTX()
27051 DIP("%s %s,%s,%s\n", opname, nameIRegV(size,pfx), dis_buf, in dis_SHIFTX()
27052 nameIRegG(size,pfx,rm)); in dis_SHIFTX()
27056 putIRegG( size, pfx, rm, in dis_SHIFTX()
27066 static Long dis_FMA ( const VexAbiInfo* vbi, Prefix pfx, Long delta, UChar opc ) in dis_FMA() argument
27069 UInt rG = gregOfRexRM(pfx, modrm); in dis_FMA()
27070 UInt rV = getVexNvvvv(pfx); in dis_FMA()
27072 IRType ty = getRexW(pfx) ? Ity_F64 : Ity_F32; in dis_FMA()
27073 IRType vty = scalar ? ty : getVexL(pfx) ? Ity_V256 : Ity_V128; in dis_FMA()
27150 UInt rE = eregOfRexRM(pfx, modrm); in dis_FMA()
27167 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_FMA()
27255 Prefix pfx, Long delta, in dis_VMASKMOV() argument
27263 UInt rG = gregOfRexRM(pfx,modrm); in dis_VMASKMOV()
27264 UInt rV = getVexNvvvv(pfx); in dis_VMASKMOV()
27266 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_VMASKMOV()
27325 Prefix pfx, Long delta, in dis_VGATHER() argument
27333 UInt rG = gregOfRexRM(pfx,modrm); in dis_VGATHER()
27334 UInt rV = getVexNvvvv(pfx); in dis_VGATHER()
27339 addr = disAVSIBMode ( &alen, vbi, pfx, delta, dis_buf, &rI, in dis_VGATHER()
27391 addr_expr = handleAddrOverrides(vbi, pfx, addr_expr); in dis_VGATHER()
27428 Prefix pfx, Int sz, Long deltaIN in dis_ESC_0F38__VEX() argument
27444 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F38__VEX()
27446 uses_vvvv, vbi, pfx, delta, "vpshufb", math_PSHUFB_XMM ); in dis_ESC_0F38__VEX()
27451 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F38__VEX()
27453 uses_vvvv, vbi, pfx, delta, "vpshufb", math_PSHUFB_YMM ); in dis_ESC_0F38__VEX()
27464 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F38__VEX()
27465 delta = dis_PHADD_128( vbi, pfx, delta, True/*isAvx*/, opc ); in dis_ESC_0F38__VEX()
27472 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F38__VEX()
27473 delta = dis_PHADD_256( vbi, pfx, delta, opc ); in dis_ESC_0F38__VEX()
27481 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F38__VEX()
27483 uses_vvvv, vbi, pfx, delta, "vpmaddubsw", in dis_ESC_0F38__VEX()
27488 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F38__VEX()
27490 uses_vvvv, vbi, pfx, delta, "vpmaddubsw", in dis_ESC_0F38__VEX()
27502 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F38__VEX()
27503 delta = dis_PHADD_128( vbi, pfx, delta, True/*isAvx*/, opc ); in dis_ESC_0F38__VEX()
27510 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F38__VEX()
27511 delta = dis_PHADD_256( vbi, pfx, delta, opc ); in dis_ESC_0F38__VEX()
27523 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F38__VEX()
27531 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F38__VEX()
27532 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F38__VEX()
27544 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F38__VEX()
27550 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__VEX()
27573 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F38__VEX()
27582 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F38__VEX()
27583 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F38__VEX()
27595 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F38__VEX()
27601 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__VEX()
27631 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F38__VEX()
27637 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F38__VEX()
27638 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F38__VEX()
27643 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F38__VEX()
27649 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__VEX()
27670 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F38__VEX()
27676 UInt rG = gregOfRexRM(pfx,modrm); in dis_ESC_0F38__VEX()
27677 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F38__VEX()
27682 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F38__VEX()
27688 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__VEX()
27716 if (have66noF2noF3(pfx) in dis_ESC_0F38__VEX()
27717 && 0==getVexL(pfx)/*128*/ && 0==getRexW(pfx)/*W0*/) { in dis_ESC_0F38__VEX()
27719 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F38__VEX()
27720 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F38__VEX()
27723 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F38__VEX()
27729 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__VEX()
27743 if (have66noF2noF3(pfx) in dis_ESC_0F38__VEX()
27744 && 1==getVexL(pfx)/*256*/ && 0==getRexW(pfx)/*W0*/) { in dis_ESC_0F38__VEX()
27746 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F38__VEX()
27747 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F38__VEX()
27750 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F38__VEX()
27756 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__VEX()
27773 if (have66noF2noF3(pfx) in dis_ESC_0F38__VEX()
27774 && 0==getVexL(pfx)/*128*/ && 0==getRexW(pfx)/*W0*/) { in dis_ESC_0F38__VEX()
27776 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F38__VEX()
27777 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F38__VEX()
27780 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F38__VEX()
27786 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__VEX()
27800 if (have66noF2noF3(pfx) in dis_ESC_0F38__VEX()
27801 && 1==getVexL(pfx)/*256*/ && 0==getRexW(pfx)/*W0*/) { in dis_ESC_0F38__VEX()
27803 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F38__VEX()
27804 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F38__VEX()
27807 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F38__VEX()
27813 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__VEX()
27830 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F38__VEX()
27831 delta = dis_xTESTy_128( vbi, pfx, delta, True/*isAvx*/, 32 ); in dis_ESC_0F38__VEX()
27835 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F38__VEX()
27836 delta = dis_xTESTy_256( vbi, pfx, delta, 32 ); in dis_ESC_0F38__VEX()
27843 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F38__VEX()
27844 delta = dis_xTESTy_128( vbi, pfx, delta, True/*isAvx*/, 64 ); in dis_ESC_0F38__VEX()
27848 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F38__VEX()
27849 delta = dis_xTESTy_256( vbi, pfx, delta, 64 ); in dis_ESC_0F38__VEX()
27856 if (have66noF2noF3(pfx) in dis_ESC_0F38__VEX()
27857 && 1==getVexL(pfx)/*256*/ && 0==getRexW(pfx)/*W0*/) { in dis_ESC_0F38__VEX()
27859 uses_vvvv, vbi, pfx, delta, "vpermps", math_VPERMD ); in dis_ESC_0F38__VEX()
27866 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F38__VEX()
27867 delta = dis_xTESTy_128( vbi, pfx, delta, True/*isAvx*/, 0 ); in dis_ESC_0F38__VEX()
27871 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F38__VEX()
27872 delta = dis_xTESTy_256( vbi, pfx, delta, 0 ); in dis_ESC_0F38__VEX()
27879 if (have66noF2noF3(pfx) in dis_ESC_0F38__VEX()
27880 && 0==getVexL(pfx)/*128*/ in dis_ESC_0F38__VEX()
27883 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F38__VEX()
27884 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__VEX()
27896 if (have66noF2noF3(pfx) in dis_ESC_0F38__VEX()
27897 && 1==getVexL(pfx)/*256*/ in dis_ESC_0F38__VEX()
27900 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F38__VEX()
27901 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__VEX()
27914 if (have66noF2noF3(pfx) in dis_ESC_0F38__VEX()
27915 && 0==getVexL(pfx)/*128*/ in dis_ESC_0F38__VEX()
27918 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F38__VEX()
27919 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F38__VEX()
27931 if (have66noF2noF3(pfx) in dis_ESC_0F38__VEX()
27932 && 1==getVexL(pfx)/*256*/ in dis_ESC_0F38__VEX()
27935 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F38__VEX()
27936 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F38__VEX()
27952 if (have66noF2noF3(pfx) in dis_ESC_0F38__VEX()
27953 && 1==getVexL(pfx)/*256*/ in dis_ESC_0F38__VEX()
27956 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F38__VEX()
27957 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__VEX()
27968 if (have66noF2noF3(pfx) in dis_ESC_0F38__VEX()
27969 && 1==getVexL(pfx)/*256*/ in dis_ESC_0F38__VEX()
27972 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F38__VEX()
27973 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F38__VEX()
27987 if (have66noF2noF3(pfx) in dis_ESC_0F38__VEX()
27988 && 1==getVexL(pfx)/*256*/ in dis_ESC_0F38__VEX()
27991 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F38__VEX()
27992 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__VEX()
28004 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F38__VEX()
28006 uses_vvvv, vbi, pfx, delta, in dis_ESC_0F38__VEX()
28011 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F38__VEX()
28013 uses_vvvv, vbi, pfx, delta, in dis_ESC_0F38__VEX()
28021 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F38__VEX()
28023 uses_vvvv, vbi, pfx, delta, in dis_ESC_0F38__VEX()
28028 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F38__VEX()
28030 uses_vvvv, vbi, pfx, delta, in dis_ESC_0F38__VEX()
28038 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F38__VEX()
28040 uses_vvvv, vbi, pfx, delta, in dis_ESC_0F38__VEX()
28045 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F38__VEX()
28047 uses_vvvv, vbi, pfx, delta, in dis_ESC_0F38__VEX()
28056 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F38__VEX()
28057 delta = dis_PMOVxXBW_128( vbi, pfx, delta, in dis_ESC_0F38__VEX()
28063 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F38__VEX()
28064 delta = dis_PMOVxXBW_256( vbi, pfx, delta, False/*!xIsZ*/ ); in dis_ESC_0F38__VEX()
28072 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F38__VEX()
28073 delta = dis_PMOVxXBD_128( vbi, pfx, delta, in dis_ESC_0F38__VEX()
28079 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F38__VEX()
28080 delta = dis_PMOVxXBD_256( vbi, pfx, delta, False/*!xIsZ*/ ); in dis_ESC_0F38__VEX()
28088 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F38__VEX()
28089 delta = dis_PMOVSXBQ_128( vbi, pfx, delta, True/*isAvx*/ ); in dis_ESC_0F38__VEX()
28094 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F38__VEX()
28095 delta = dis_PMOVSXBQ_256( vbi, pfx, delta ); in dis_ESC_0F38__VEX()
28102 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F38__VEX()
28103 delta = dis_PMOVxXWD_128( vbi, pfx, delta, in dis_ESC_0F38__VEX()
28108 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F38__VEX()
28109 delta = dis_PMOVxXWD_256( vbi, pfx, delta, False/*!xIsZ*/ ); in dis_ESC_0F38__VEX()
28116 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F38__VEX()
28117 delta = dis_PMOVSXWQ_128( vbi, pfx, delta, True/*isAvx*/ ); in dis_ESC_0F38__VEX()
28121 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F38__VEX()
28122 delta = dis_PMOVSXWQ_256( vbi, pfx, delta ); in dis_ESC_0F38__VEX()
28129 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F38__VEX()
28130 delta = dis_PMOVxXDQ_128( vbi, pfx, delta, in dis_ESC_0F38__VEX()
28135 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F38__VEX()
28136 delta = dis_PMOVxXDQ_256( vbi, pfx, delta, False/*!xIsZ*/ ); in dis_ESC_0F38__VEX()
28143 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F38__VEX()
28145 uses_vvvv, vbi, pfx, delta, in dis_ESC_0F38__VEX()
28150 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F38__VEX()
28152 uses_vvvv, vbi, pfx, delta, in dis_ESC_0F38__VEX()
28161 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F38__VEX()
28163 uses_vvvv, vbi, pfx, delta, "vpcmpeqq", Iop_CmpEQ64x2 ); in dis_ESC_0F38__VEX()
28168 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F38__VEX()
28170 uses_vvvv, vbi, pfx, delta, "vpcmpeqq", Iop_CmpEQ64x4 ); in dis_ESC_0F38__VEX()
28177 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/ in dis_ESC_0F38__VEX()
28180 UInt rD = gregOfRexRM(pfx, modrm); in dis_ESC_0F38__VEX()
28182 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__VEX()
28191 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/ in dis_ESC_0F38__VEX()
28194 UInt rD = gregOfRexRM(pfx, modrm); in dis_ESC_0F38__VEX()
28196 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__VEX()
28209 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F38__VEX()
28211 uses_vvvv, vbi, pfx, delta, "vpackusdw", in dis_ESC_0F38__VEX()
28218 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F38__VEX()
28220 uses_vvvv, vbi, pfx, delta, "vpackusdw", in dis_ESC_0F38__VEX()
28228 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/ in dis_ESC_0F38__VEX()
28229 && 0==getRexW(pfx)/*W0*/ in dis_ESC_0F38__VEX()
28231 delta = dis_VMASKMOV( uses_vvvv, vbi, pfx, delta, "vmaskmovps", in dis_ESC_0F38__VEX()
28236 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/ in dis_ESC_0F38__VEX()
28237 && 0==getRexW(pfx)/*W0*/ in dis_ESC_0F38__VEX()
28239 delta = dis_VMASKMOV( uses_vvvv, vbi, pfx, delta, "vmaskmovps", in dis_ESC_0F38__VEX()
28247 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/ in dis_ESC_0F38__VEX()
28248 && 0==getRexW(pfx)/*W0*/ in dis_ESC_0F38__VEX()
28250 delta = dis_VMASKMOV( uses_vvvv, vbi, pfx, delta, "vmaskmovpd", in dis_ESC_0F38__VEX()
28255 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/ in dis_ESC_0F38__VEX()
28256 && 0==getRexW(pfx)/*W0*/ in dis_ESC_0F38__VEX()
28258 delta = dis_VMASKMOV( uses_vvvv, vbi, pfx, delta, "vmaskmovpd", in dis_ESC_0F38__VEX()
28266 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/ in dis_ESC_0F38__VEX()
28267 && 0==getRexW(pfx)/*W0*/ in dis_ESC_0F38__VEX()
28269 delta = dis_VMASKMOV( uses_vvvv, vbi, pfx, delta, "vmaskmovps", in dis_ESC_0F38__VEX()
28274 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/ in dis_ESC_0F38__VEX()
28275 && 0==getRexW(pfx)/*W0*/ in dis_ESC_0F38__VEX()
28277 delta = dis_VMASKMOV( uses_vvvv, vbi, pfx, delta, "vmaskmovps", in dis_ESC_0F38__VEX()
28285 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/ in dis_ESC_0F38__VEX()
28286 && 0==getRexW(pfx)/*W0*/ in dis_ESC_0F38__VEX()
28288 delta = dis_VMASKMOV( uses_vvvv, vbi, pfx, delta, "vmaskmovpd", in dis_ESC_0F38__VEX()
28293 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/ in dis_ESC_0F38__VEX()
28294 && 0==getRexW(pfx)/*W0*/ in dis_ESC_0F38__VEX()
28296 delta = dis_VMASKMOV( uses_vvvv, vbi, pfx, delta, "vmaskmovpd", in dis_ESC_0F38__VEX()
28305 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F38__VEX()
28306 delta = dis_PMOVxXBW_128( vbi, pfx, delta, in dis_ESC_0F38__VEX()
28312 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F38__VEX()
28313 delta = dis_PMOVxXBW_256( vbi, pfx, delta, True/*xIsZ*/ ); in dis_ESC_0F38__VEX()
28321 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F38__VEX()
28322 delta = dis_PMOVxXBD_128( vbi, pfx, delta, in dis_ESC_0F38__VEX()
28328 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F38__VEX()
28329 delta = dis_PMOVxXBD_256( vbi, pfx, delta, True/*xIsZ*/ ); in dis_ESC_0F38__VEX()
28337 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F38__VEX()
28338 delta = dis_PMOVZXBQ_128( vbi, pfx, delta, True/*isAvx*/ ); in dis_ESC_0F38__VEX()
28343 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F38__VEX()
28344 delta = dis_PMOVZXBQ_256( vbi, pfx, delta ); in dis_ESC_0F38__VEX()
28352 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F38__VEX()
28353 delta = dis_PMOVxXWD_128( vbi, pfx, delta, in dis_ESC_0F38__VEX()
28359 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F38__VEX()
28360 delta = dis_PMOVxXWD_256( vbi, pfx, delta, True/*xIsZ*/ ); in dis_ESC_0F38__VEX()
28367 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F38__VEX()
28368 delta = dis_PMOVZXWQ_128( vbi, pfx, delta, True/*isAvx*/ ); in dis_ESC_0F38__VEX()
28372 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F38__VEX()
28373 delta = dis_PMOVZXWQ_256( vbi, pfx, delta ); in dis_ESC_0F38__VEX()
28380 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F38__VEX()
28381 delta = dis_PMOVxXDQ_128( vbi, pfx, delta, in dis_ESC_0F38__VEX()
28386 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F38__VEX()
28387 delta = dis_PMOVxXDQ_256( vbi, pfx, delta, True/*xIsZ*/ ); in dis_ESC_0F38__VEX()
28394 if (have66noF2noF3(pfx) in dis_ESC_0F38__VEX()
28395 && 1==getVexL(pfx)/*256*/ && 0==getRexW(pfx)/*W0*/) { in dis_ESC_0F38__VEX()
28397 uses_vvvv, vbi, pfx, delta, "vpermd", math_VPERMD ); in dis_ESC_0F38__VEX()
28405 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F38__VEX()
28407 uses_vvvv, vbi, pfx, delta, "vpcmpgtq", Iop_CmpGT64Sx2 ); in dis_ESC_0F38__VEX()
28412 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F38__VEX()
28414 uses_vvvv, vbi, pfx, delta, "vpcmpgtq", Iop_CmpGT64Sx4 ); in dis_ESC_0F38__VEX()
28422 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F38__VEX()
28424 uses_vvvv, vbi, pfx, delta, "vpminsb", Iop_Min8Sx16 ); in dis_ESC_0F38__VEX()
28429 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F38__VEX()
28431 uses_vvvv, vbi, pfx, delta, "vpminsb", Iop_Min8Sx32 ); in dis_ESC_0F38__VEX()
28439 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F38__VEX()
28441 uses_vvvv, vbi, pfx, delta, "vpminsd", Iop_Min32Sx4 ); in dis_ESC_0F38__VEX()
28446 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F38__VEX()
28448 uses_vvvv, vbi, pfx, delta, "vpminsd", Iop_Min32Sx8 ); in dis_ESC_0F38__VEX()
28456 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F38__VEX()
28458 uses_vvvv, vbi, pfx, delta, "vpminuw", Iop_Min16Ux8 ); in dis_ESC_0F38__VEX()
28463 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F38__VEX()
28465 uses_vvvv, vbi, pfx, delta, "vpminuw", Iop_Min16Ux16 ); in dis_ESC_0F38__VEX()
28473 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F38__VEX()
28475 uses_vvvv, vbi, pfx, delta, "vpminud", Iop_Min32Ux4 ); in dis_ESC_0F38__VEX()
28480 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F38__VEX()
28482 uses_vvvv, vbi, pfx, delta, "vpminud", Iop_Min32Ux8 ); in dis_ESC_0F38__VEX()
28490 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F38__VEX()
28492 uses_vvvv, vbi, pfx, delta, "vpmaxsb", Iop_Max8Sx16 ); in dis_ESC_0F38__VEX()
28497 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F38__VEX()
28499 uses_vvvv, vbi, pfx, delta, "vpmaxsb", Iop_Max8Sx32 ); in dis_ESC_0F38__VEX()
28507 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F38__VEX()
28509 uses_vvvv, vbi, pfx, delta, "vpmaxsd", Iop_Max32Sx4 ); in dis_ESC_0F38__VEX()
28514 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F38__VEX()
28516 uses_vvvv, vbi, pfx, delta, "vpmaxsd", Iop_Max32Sx8 ); in dis_ESC_0F38__VEX()
28524 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F38__VEX()
28526 uses_vvvv, vbi, pfx, delta, "vpmaxuw", Iop_Max16Ux8 ); in dis_ESC_0F38__VEX()
28531 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F38__VEX()
28533 uses_vvvv, vbi, pfx, delta, "vpmaxuw", Iop_Max16Ux16 ); in dis_ESC_0F38__VEX()
28541 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F38__VEX()
28543 uses_vvvv, vbi, pfx, delta, "vpmaxud", Iop_Max32Ux4 ); in dis_ESC_0F38__VEX()
28548 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F38__VEX()
28550 uses_vvvv, vbi, pfx, delta, "vpmaxud", Iop_Max32Ux8 ); in dis_ESC_0F38__VEX()
28558 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F38__VEX()
28560 uses_vvvv, vbi, pfx, delta, "vpmulld", Iop_Mul32x4 ); in dis_ESC_0F38__VEX()
28565 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F38__VEX()
28567 uses_vvvv, vbi, pfx, delta, "vpmulld", Iop_Mul32x8 ); in dis_ESC_0F38__VEX()
28574 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F38__VEX()
28575 delta = dis_PHMINPOSUW_128( vbi, pfx, delta, True/*isAvx*/ ); in dis_ESC_0F38__VEX()
28583 if (have66noF2noF3(pfx) && 0==getRexW(pfx)/*W0*/) { in dis_ESC_0F38__VEX()
28584 delta = dis_AVX_var_shiftV_byE( vbi, pfx, delta, "vpsrlvd", in dis_ESC_0F38__VEX()
28585 Iop_Shr32, 1==getVexL(pfx) ); in dis_ESC_0F38__VEX()
28591 if (have66noF2noF3(pfx) && 1==getRexW(pfx)/*W1*/) { in dis_ESC_0F38__VEX()
28592 delta = dis_AVX_var_shiftV_byE( vbi, pfx, delta, "vpsrlvq", in dis_ESC_0F38__VEX()
28593 Iop_Shr64, 1==getVexL(pfx) ); in dis_ESC_0F38__VEX()
28602 if (have66noF2noF3(pfx) && 0==getRexW(pfx)/*W0*/) { in dis_ESC_0F38__VEX()
28603 delta = dis_AVX_var_shiftV_byE( vbi, pfx, delta, "vpsravd", in dis_ESC_0F38__VEX()
28604 Iop_Sar32, 1==getVexL(pfx) ); in dis_ESC_0F38__VEX()
28613 if (have66noF2noF3(pfx) && 0==getRexW(pfx)/*W0*/) { in dis_ESC_0F38__VEX()
28614 delta = dis_AVX_var_shiftV_byE( vbi, pfx, delta, "vpsllvd", in dis_ESC_0F38__VEX()
28615 Iop_Shl32, 1==getVexL(pfx) ); in dis_ESC_0F38__VEX()
28621 if (have66noF2noF3(pfx) && 1==getRexW(pfx)/*W1*/) { in dis_ESC_0F38__VEX()
28622 delta = dis_AVX_var_shiftV_byE( vbi, pfx, delta, "vpsllvq", in dis_ESC_0F38__VEX()
28623 Iop_Shl64, 1==getVexL(pfx) ); in dis_ESC_0F38__VEX()
28631 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/ in dis_ESC_0F38__VEX()
28632 && 0==getRexW(pfx)/*W0*/) { in dis_ESC_0F38__VEX()
28634 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F38__VEX()
28637 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F38__VEX()
28642 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__VEX()
28654 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/ in dis_ESC_0F38__VEX()
28655 && 0==getRexW(pfx)/*W0*/) { in dis_ESC_0F38__VEX()
28657 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F38__VEX()
28660 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F38__VEX()
28665 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__VEX()
28681 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/ in dis_ESC_0F38__VEX()
28682 && 0==getRexW(pfx)/*W0*/) { in dis_ESC_0F38__VEX()
28684 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F38__VEX()
28687 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F38__VEX()
28692 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__VEX()
28702 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/ in dis_ESC_0F38__VEX()
28703 && 0==getRexW(pfx)/*W0*/) { in dis_ESC_0F38__VEX()
28705 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F38__VEX()
28708 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F38__VEX()
28713 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__VEX()
28727 if (have66noF2noF3(pfx) in dis_ESC_0F38__VEX()
28728 && 1==getVexL(pfx)/*256*/ in dis_ESC_0F38__VEX()
28731 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F38__VEX()
28732 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__VEX()
28744 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/ in dis_ESC_0F38__VEX()
28745 && 0==getRexW(pfx)/*W0*/) { in dis_ESC_0F38__VEX()
28747 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F38__VEX()
28750 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F38__VEX()
28755 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__VEX()
28771 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/ in dis_ESC_0F38__VEX()
28772 && 0==getRexW(pfx)/*W0*/) { in dis_ESC_0F38__VEX()
28774 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F38__VEX()
28777 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F38__VEX()
28782 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__VEX()
28802 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/ in dis_ESC_0F38__VEX()
28803 && 0==getRexW(pfx)/*W0*/) { in dis_ESC_0F38__VEX()
28805 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F38__VEX()
28808 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F38__VEX()
28813 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__VEX()
28827 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/ in dis_ESC_0F38__VEX()
28828 && 0==getRexW(pfx)/*W0*/) { in dis_ESC_0F38__VEX()
28830 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F38__VEX()
28833 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F38__VEX()
28838 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__VEX()
28856 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/ in dis_ESC_0F38__VEX()
28857 && 0==getRexW(pfx)/*W0*/ && !epartIsReg(getUChar(delta))) { in dis_ESC_0F38__VEX()
28858 delta = dis_VMASKMOV( uses_vvvv, vbi, pfx, delta, "vpmaskmovd", in dis_ESC_0F38__VEX()
28863 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/ in dis_ESC_0F38__VEX()
28864 && 0==getRexW(pfx)/*W0*/ && !epartIsReg(getUChar(delta))) { in dis_ESC_0F38__VEX()
28865 delta = dis_VMASKMOV( uses_vvvv, vbi, pfx, delta, "vpmaskmovd", in dis_ESC_0F38__VEX()
28870 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/ in dis_ESC_0F38__VEX()
28871 && 1==getRexW(pfx)/*W1*/ && !epartIsReg(getUChar(delta))) { in dis_ESC_0F38__VEX()
28872 delta = dis_VMASKMOV( uses_vvvv, vbi, pfx, delta, "vpmaskmovq", in dis_ESC_0F38__VEX()
28877 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/ in dis_ESC_0F38__VEX()
28878 && 1==getRexW(pfx)/*W1*/ && !epartIsReg(getUChar(delta))) { in dis_ESC_0F38__VEX()
28879 delta = dis_VMASKMOV( uses_vvvv, vbi, pfx, delta, "vpmaskmovq", in dis_ESC_0F38__VEX()
28887 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/ in dis_ESC_0F38__VEX()
28888 && 0==getRexW(pfx)/*W0*/ && !epartIsReg(getUChar(delta))) { in dis_ESC_0F38__VEX()
28889 delta = dis_VMASKMOV( uses_vvvv, vbi, pfx, delta, "vpmaskmovd", in dis_ESC_0F38__VEX()
28894 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/ in dis_ESC_0F38__VEX()
28895 && 0==getRexW(pfx)/*W0*/ && !epartIsReg(getUChar(delta))) { in dis_ESC_0F38__VEX()
28896 delta = dis_VMASKMOV( uses_vvvv, vbi, pfx, delta, "vpmaskmovd", in dis_ESC_0F38__VEX()
28901 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/ in dis_ESC_0F38__VEX()
28902 && 1==getRexW(pfx)/*W1*/ && !epartIsReg(getUChar(delta))) { in dis_ESC_0F38__VEX()
28903 delta = dis_VMASKMOV( uses_vvvv, vbi, pfx, delta, "vpmaskmovq", in dis_ESC_0F38__VEX()
28908 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/ in dis_ESC_0F38__VEX()
28909 && 1==getRexW(pfx)/*W1*/ && !epartIsReg(getUChar(delta))) { in dis_ESC_0F38__VEX()
28910 delta = dis_VMASKMOV( uses_vvvv, vbi, pfx, delta, "vpmaskmovq", in dis_ESC_0F38__VEX()
28918 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/ in dis_ESC_0F38__VEX()
28919 && 0 == getRexW(pfx)/*W0*/ && !epartIsReg(getUChar(delta))) { in dis_ESC_0F38__VEX()
28921 delta = dis_VGATHER( uses_vvvv, vbi, pfx, delta, "vpgatherdd", in dis_ESC_0F38__VEX()
28927 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/ in dis_ESC_0F38__VEX()
28928 && 0 == getRexW(pfx)/*W0*/ && !epartIsReg(getUChar(delta))) { in dis_ESC_0F38__VEX()
28930 delta = dis_VGATHER( uses_vvvv, vbi, pfx, delta, "vpgatherdd", in dis_ESC_0F38__VEX()
28936 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/ in dis_ESC_0F38__VEX()
28937 && 1 == getRexW(pfx)/*W1*/ && !epartIsReg(getUChar(delta))) { in dis_ESC_0F38__VEX()
28939 delta = dis_VGATHER( uses_vvvv, vbi, pfx, delta, "vpgatherdq", in dis_ESC_0F38__VEX()
28945 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/ in dis_ESC_0F38__VEX()
28946 && 1 == getRexW(pfx)/*W1*/ && !epartIsReg(getUChar(delta))) { in dis_ESC_0F38__VEX()
28948 delta = dis_VGATHER( uses_vvvv, vbi, pfx, delta, "vpgatherdq", in dis_ESC_0F38__VEX()
28957 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/ in dis_ESC_0F38__VEX()
28958 && 0 == getRexW(pfx)/*W0*/ && !epartIsReg(getUChar(delta))) { in dis_ESC_0F38__VEX()
28960 delta = dis_VGATHER( uses_vvvv, vbi, pfx, delta, "vpgatherqd", in dis_ESC_0F38__VEX()
28966 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/ in dis_ESC_0F38__VEX()
28967 && 0 == getRexW(pfx)/*W0*/ && !epartIsReg(getUChar(delta))) { in dis_ESC_0F38__VEX()
28969 delta = dis_VGATHER( uses_vvvv, vbi, pfx, delta, "vpgatherqd", in dis_ESC_0F38__VEX()
28975 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/ in dis_ESC_0F38__VEX()
28976 && 1 == getRexW(pfx)/*W1*/ && !epartIsReg(getUChar(delta))) { in dis_ESC_0F38__VEX()
28978 delta = dis_VGATHER( uses_vvvv, vbi, pfx, delta, "vpgatherqq", in dis_ESC_0F38__VEX()
28984 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/ in dis_ESC_0F38__VEX()
28985 && 1 == getRexW(pfx)/*W1*/ && !epartIsReg(getUChar(delta))) { in dis_ESC_0F38__VEX()
28987 delta = dis_VGATHER( uses_vvvv, vbi, pfx, delta, "vpgatherqq", in dis_ESC_0F38__VEX()
28996 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/ in dis_ESC_0F38__VEX()
28997 && 0 == getRexW(pfx)/*W0*/ && !epartIsReg(getUChar(delta))) { in dis_ESC_0F38__VEX()
28999 delta = dis_VGATHER( uses_vvvv, vbi, pfx, delta, "vgatherdps", in dis_ESC_0F38__VEX()
29005 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/ in dis_ESC_0F38__VEX()
29006 && 0 == getRexW(pfx)/*W0*/ && !epartIsReg(getUChar(delta))) { in dis_ESC_0F38__VEX()
29008 delta = dis_VGATHER( uses_vvvv, vbi, pfx, delta, "vgatherdps", in dis_ESC_0F38__VEX()
29014 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/ in dis_ESC_0F38__VEX()
29015 && 1 == getRexW(pfx)/*W1*/ && !epartIsReg(getUChar(delta))) { in dis_ESC_0F38__VEX()
29017 delta = dis_VGATHER( uses_vvvv, vbi, pfx, delta, "vgatherdpd", in dis_ESC_0F38__VEX()
29023 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/ in dis_ESC_0F38__VEX()
29024 && 1 == getRexW(pfx)/*W1*/ && !epartIsReg(getUChar(delta))) { in dis_ESC_0F38__VEX()
29026 delta = dis_VGATHER( uses_vvvv, vbi, pfx, delta, "vgatherdpd", in dis_ESC_0F38__VEX()
29035 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/ in dis_ESC_0F38__VEX()
29036 && 0 == getRexW(pfx)/*W0*/ && !epartIsReg(getUChar(delta))) { in dis_ESC_0F38__VEX()
29038 delta = dis_VGATHER( uses_vvvv, vbi, pfx, delta, "vgatherqps", in dis_ESC_0F38__VEX()
29044 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/ in dis_ESC_0F38__VEX()
29045 && 0 == getRexW(pfx)/*W0*/ && !epartIsReg(getUChar(delta))) { in dis_ESC_0F38__VEX()
29047 delta = dis_VGATHER( uses_vvvv, vbi, pfx, delta, "vgatherqps", in dis_ESC_0F38__VEX()
29053 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/ in dis_ESC_0F38__VEX()
29054 && 1 == getRexW(pfx)/*W1*/ && !epartIsReg(getUChar(delta))) { in dis_ESC_0F38__VEX()
29056 delta = dis_VGATHER( uses_vvvv, vbi, pfx, delta, "vgatherqpd", in dis_ESC_0F38__VEX()
29062 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/ in dis_ESC_0F38__VEX()
29063 && 1 == getRexW(pfx)/*W1*/ && !epartIsReg(getUChar(delta))) { in dis_ESC_0F38__VEX()
29065 delta = dis_VGATHER( uses_vvvv, vbi, pfx, delta, "vgatherqpd", in dis_ESC_0F38__VEX()
29171 if (have66noF2noF3(pfx)) { in dis_ESC_0F38__VEX()
29172 delta = dis_FMA( vbi, pfx, delta, opc ); in dis_ESC_0F38__VEX()
29188 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F38__VEX()
29189 delta = dis_AESx( vbi, pfx, delta, True/*!isAvx*/, opc ); in dis_ESC_0F38__VEX()
29198 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*LZ*/ && !haveREX(pfx)) { in dis_ESC_0F38__VEX()
29199 Int size = getRexW(pfx) ? 8 : 4; in dis_ESC_0F38__VEX()
29206 assign( src1, getIRegV(size,pfx) ); in dis_ESC_0F38__VEX()
29208 assign( src2, getIRegE(size,pfx,rm) ); in dis_ESC_0F38__VEX()
29209 DIP("andn %s,%s,%s\n", nameIRegE(size,pfx,rm), in dis_ESC_0F38__VEX()
29210 nameIRegV(size,pfx), nameIRegG(size,pfx,rm)); in dis_ESC_0F38__VEX()
29213 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__VEX()
29215 DIP("andn %s,%s,%s\n", dis_buf, nameIRegV(size,pfx), in dis_ESC_0F38__VEX()
29216 nameIRegG(size,pfx,rm)); in dis_ESC_0F38__VEX()
29223 putIRegG( size, pfx, rm, mkexpr(dst) ); in dis_ESC_0F38__VEX()
29237 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*LZ*/ in dis_ESC_0F38__VEX()
29238 && !haveREX(pfx) && gregLO3ofRM(getUChar(delta)) == 3) { in dis_ESC_0F38__VEX()
29239 Int size = getRexW(pfx) ? 8 : 4; in dis_ESC_0F38__VEX()
29246 assign( src, getIRegE(size,pfx,rm) ); in dis_ESC_0F38__VEX()
29247 DIP("blsi %s,%s\n", nameIRegE(size,pfx,rm), in dis_ESC_0F38__VEX()
29248 nameIRegV(size,pfx)); in dis_ESC_0F38__VEX()
29251 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__VEX()
29253 DIP("blsi %s,%s\n", dis_buf, nameIRegV(size,pfx)); in dis_ESC_0F38__VEX()
29260 putIRegV( size, pfx, mkexpr(dst) ); in dis_ESC_0F38__VEX()
29271 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*LZ*/ in dis_ESC_0F38__VEX()
29272 && !haveREX(pfx) && gregLO3ofRM(getUChar(delta)) == 2) { in dis_ESC_0F38__VEX()
29273 Int size = getRexW(pfx) ? 8 : 4; in dis_ESC_0F38__VEX()
29280 assign( src, getIRegE(size,pfx,rm) ); in dis_ESC_0F38__VEX()
29281 DIP("blsmsk %s,%s\n", nameIRegE(size,pfx,rm), in dis_ESC_0F38__VEX()
29282 nameIRegV(size,pfx)); in dis_ESC_0F38__VEX()
29285 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__VEX()
29287 DIP("blsmsk %s,%s\n", dis_buf, nameIRegV(size,pfx)); in dis_ESC_0F38__VEX()
29294 putIRegV( size, pfx, mkexpr(dst) ); in dis_ESC_0F38__VEX()
29305 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*LZ*/ in dis_ESC_0F38__VEX()
29306 && !haveREX(pfx) && gregLO3ofRM(getUChar(delta)) == 1) { in dis_ESC_0F38__VEX()
29307 Int size = getRexW(pfx) ? 8 : 4; in dis_ESC_0F38__VEX()
29314 assign( src, getIRegE(size,pfx,rm) ); in dis_ESC_0F38__VEX()
29315 DIP("blsr %s,%s\n", nameIRegE(size,pfx,rm), in dis_ESC_0F38__VEX()
29316 nameIRegV(size,pfx)); in dis_ESC_0F38__VEX()
29319 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__VEX()
29321 DIP("blsr %s,%s\n", dis_buf, nameIRegV(size,pfx)); in dis_ESC_0F38__VEX()
29328 putIRegV( size, pfx, mkexpr(dst) ); in dis_ESC_0F38__VEX()
29342 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*LZ*/ && !haveREX(pfx)) { in dis_ESC_0F38__VEX()
29343 Int size = getRexW(pfx) ? 8 : 4; in dis_ESC_0F38__VEX()
29352 assign( src2, getIRegV(size,pfx) ); in dis_ESC_0F38__VEX()
29354 assign( src1, getIRegE(size,pfx,rm) ); in dis_ESC_0F38__VEX()
29355 DIP("bzhi %s,%s,%s\n", nameIRegV(size,pfx), in dis_ESC_0F38__VEX()
29356 nameIRegE(size,pfx,rm), nameIRegG(size,pfx,rm)); in dis_ESC_0F38__VEX()
29359 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__VEX()
29361 DIP("bzhi %s,%s,%s\n", nameIRegV(size,pfx), dis_buf, in dis_ESC_0F38__VEX()
29362 nameIRegG(size,pfx,rm)); in dis_ESC_0F38__VEX()
29397 putIRegG( size, pfx, rm, mkexpr(dst) ); in dis_ESC_0F38__VEX()
29408 if (haveF2no66noF3(pfx) && 0==getVexL(pfx)/*LZ*/ && !haveREX(pfx)) { in dis_ESC_0F38__VEX()
29409 Int size = getRexW(pfx) ? 8 : 4; in dis_ESC_0F38__VEX()
29415 assign( src, getIRegV(size,pfx) ); in dis_ESC_0F38__VEX()
29417 assign( mask, getIRegE(size,pfx,rm) ); in dis_ESC_0F38__VEX()
29418 DIP("pdep %s,%s,%s\n", nameIRegE(size,pfx,rm), in dis_ESC_0F38__VEX()
29419 nameIRegV(size,pfx), nameIRegG(size,pfx,rm)); in dis_ESC_0F38__VEX()
29422 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__VEX()
29424 DIP("pdep %s,%s,%s\n", dis_buf, nameIRegV(size,pfx), in dis_ESC_0F38__VEX()
29425 nameIRegG(size,pfx,rm)); in dis_ESC_0F38__VEX()
29431 putIRegG( size, pfx, rm, in dis_ESC_0F38__VEX()
29441 if (haveF3no66noF2(pfx) && 0==getVexL(pfx)/*LZ*/ && !haveREX(pfx)) { in dis_ESC_0F38__VEX()
29442 Int size = getRexW(pfx) ? 8 : 4; in dis_ESC_0F38__VEX()
29448 assign( src, getIRegV(size,pfx) ); in dis_ESC_0F38__VEX()
29450 assign( mask, getIRegE(size,pfx,rm) ); in dis_ESC_0F38__VEX()
29451 DIP("pext %s,%s,%s\n", nameIRegE(size,pfx,rm), in dis_ESC_0F38__VEX()
29452 nameIRegV(size,pfx), nameIRegG(size,pfx,rm)); in dis_ESC_0F38__VEX()
29455 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__VEX()
29457 DIP("pext %s,%s,%s\n", dis_buf, nameIRegV(size,pfx), in dis_ESC_0F38__VEX()
29458 nameIRegG(size,pfx,rm)); in dis_ESC_0F38__VEX()
29468 putIRegG( size, pfx, rm, in dis_ESC_0F38__VEX()
29481 if (haveF2no66noF3(pfx) && 0==getVexL(pfx)/*LZ*/ && !haveREX(pfx)) { in dis_ESC_0F38__VEX()
29482 Int size = getRexW(pfx) ? 8 : 4; in dis_ESC_0F38__VEX()
29491 assign( src2, getIRegE(size,pfx,rm) ); in dis_ESC_0F38__VEX()
29492 DIP("mulx %s,%s,%s\n", nameIRegE(size,pfx,rm), in dis_ESC_0F38__VEX()
29493 nameIRegV(size,pfx), nameIRegG(size,pfx,rm)); in dis_ESC_0F38__VEX()
29496 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__VEX()
29498 DIP("mulx %s,%s,%s\n", dis_buf, nameIRegV(size,pfx), in dis_ESC_0F38__VEX()
29499 nameIRegG(size,pfx,rm)); in dis_ESC_0F38__VEX()
29505 putIRegV( size, pfx, in dis_ESC_0F38__VEX()
29507 putIRegG( size, pfx, rm, in dis_ESC_0F38__VEX()
29519 if (haveF3no66noF2(pfx) && 0==getVexL(pfx)/*LZ*/ && !haveREX(pfx)) { in dis_ESC_0F38__VEX()
29520 delta = dis_SHIFTX( uses_vvvv, vbi, pfx, delta, "sarx", Iop_Sar8 ); in dis_ESC_0F38__VEX()
29525 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*LZ*/ && !haveREX(pfx)) { in dis_ESC_0F38__VEX()
29526 delta = dis_SHIFTX( uses_vvvv, vbi, pfx, delta, "shlx", Iop_Shl8 ); in dis_ESC_0F38__VEX()
29531 if (haveF2no66noF3(pfx) && 0==getVexL(pfx)/*LZ*/ && !haveREX(pfx)) { in dis_ESC_0F38__VEX()
29532 delta = dis_SHIFTX( uses_vvvv, vbi, pfx, delta, "shrx", Iop_Shr8 ); in dis_ESC_0F38__VEX()
29537 if (haveNo66noF2noF3(pfx) && 0==getVexL(pfx)/*LZ*/ && !haveREX(pfx)) { in dis_ESC_0F38__VEX()
29538 Int size = getRexW(pfx) ? 8 : 4; in dis_ESC_0F38__VEX()
29548 assign( src2, getIRegV(size,pfx) ); in dis_ESC_0F38__VEX()
29550 assign( src1, getIRegE(size,pfx,rm) ); in dis_ESC_0F38__VEX()
29551 DIP("bextr %s,%s,%s\n", nameIRegV(size,pfx), in dis_ESC_0F38__VEX()
29552 nameIRegE(size,pfx,rm), nameIRegG(size,pfx,rm)); in dis_ESC_0F38__VEX()
29555 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F38__VEX()
29557 DIP("bextr %s,%s,%s\n", nameIRegV(size,pfx), dis_buf, in dis_ESC_0F38__VEX()
29558 nameIRegG(size,pfx,rm)); in dis_ESC_0F38__VEX()
29605 putIRegG( size, pfx, rm, mkexpr(dst) ); in dis_ESC_0F38__VEX()
29662 Prefix pfx, Int sz, Long deltaIN in dis_ESC_0F3A__VEX() argument
29679 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/ in dis_ESC_0F3A__VEX()
29680 && 1==getRexW(pfx)/*W1*/) { in dis_ESC_0F3A__VEX()
29683 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
29687 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
29694 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F3A__VEX()
29718 if (have66noF2noF3(pfx) in dis_ESC_0F3A__VEX()
29719 && 0==getVexL(pfx)/*128*/ && 0==getRexW(pfx)/*W0*/) { in dis_ESC_0F3A__VEX()
29722 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
29723 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F3A__VEX()
29730 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
29737 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F3A__VEX()
29758 if (have66noF2noF3(pfx) in dis_ESC_0F3A__VEX()
29759 && 1==getVexL(pfx)/*256*/ && 0==getRexW(pfx)/*W0*/) { in dis_ESC_0F3A__VEX()
29762 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
29763 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F3A__VEX()
29770 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
29777 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F3A__VEX()
29802 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F3A__VEX()
29805 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
29808 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
29815 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F3A__VEX()
29832 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F3A__VEX()
29835 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
29838 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
29845 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F3A__VEX()
29860 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F3A__VEX()
29863 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
29866 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
29873 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F3A__VEX()
29893 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F3A__VEX()
29896 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
29899 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
29906 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F3A__VEX()
29930 if (have66noF2noF3(pfx) in dis_ESC_0F3A__VEX()
29931 && 1==getVexL(pfx)/*256*/ && 0==getRexW(pfx)/*W0*/) { in dis_ESC_0F3A__VEX()
29934 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
29935 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F3A__VEX()
29943 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
29951 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F3A__VEX()
29977 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F3A__VEX()
29979 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
29991 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
29998 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F3A__VEX()
30025 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F3A__VEX()
30027 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
30043 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
30050 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F3A__VEX()
30082 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F3A__VEX()
30084 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
30094 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
30101 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F3A__VEX()
30126 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F3A__VEX()
30128 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
30140 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
30147 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F3A__VEX()
30178 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F3A__VEX()
30180 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
30181 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F3A__VEX()
30188 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
30198 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F3A__VEX()
30233 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F3A__VEX()
30236 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
30237 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F3A__VEX()
30242 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
30249 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F3A__VEX()
30264 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F3A__VEX()
30267 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
30268 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F3A__VEX()
30273 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
30280 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F3A__VEX()
30298 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F3A__VEX()
30301 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
30302 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F3A__VEX()
30307 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
30314 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F3A__VEX()
30329 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F3A__VEX()
30332 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
30333 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F3A__VEX()
30338 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
30345 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F3A__VEX()
30363 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F3A__VEX()
30366 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
30367 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F3A__VEX()
30372 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
30379 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F3A__VEX()
30394 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F3A__VEX()
30397 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
30398 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F3A__VEX()
30405 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
30412 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F3A__VEX()
30433 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F3A__VEX()
30435 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
30436 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F3A__VEX()
30444 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
30451 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F3A__VEX()
30466 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F3A__VEX()
30468 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
30469 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F3A__VEX()
30479 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
30486 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F3A__VEX()
30507 if (have66noF2noF3(pfx) in dis_ESC_0F3A__VEX()
30508 && 0==getVexL(pfx)/*128*/ && 0==getRexW(pfx)/*W0*/) { in dis_ESC_0F3A__VEX()
30509 delta = dis_PEXTRB_128_GtoE( vbi, pfx, delta, False/*!isAvx*/ ); in dis_ESC_0F3A__VEX()
30517 if (have66noF2noF3(pfx) in dis_ESC_0F3A__VEX()
30518 && 0==getVexL(pfx)/*128*/ && 0==getRexW(pfx)/*W0*/) { in dis_ESC_0F3A__VEX()
30519 delta = dis_PEXTRW( vbi, pfx, delta, True/*isAvx*/ ); in dis_ESC_0F3A__VEX()
30527 if (have66noF2noF3(pfx) in dis_ESC_0F3A__VEX()
30528 && 0==getVexL(pfx)/*128*/ && 0==getRexW(pfx)/*W0*/) { in dis_ESC_0F3A__VEX()
30529 delta = dis_PEXTRD( vbi, pfx, delta, True/*isAvx*/ ); in dis_ESC_0F3A__VEX()
30533 if (have66noF2noF3(pfx) in dis_ESC_0F3A__VEX()
30534 && 0==getVexL(pfx)/*128*/ && 1==getRexW(pfx)/*W1*/) { in dis_ESC_0F3A__VEX()
30535 delta = dis_PEXTRQ( vbi, pfx, delta, True/*isAvx*/ ); in dis_ESC_0F3A__VEX()
30542 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F3A__VEX()
30543 delta = dis_EXTRACTPS( vbi, pfx, delta, True/*isAvx*/ ); in dis_ESC_0F3A__VEX()
30552 if (have66noF2noF3(pfx) in dis_ESC_0F3A__VEX()
30553 && 1==getVexL(pfx)/*256*/ && 0==getRexW(pfx)/*W0*/) { in dis_ESC_0F3A__VEX()
30556 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
30557 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F3A__VEX()
30560 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
30567 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F3A__VEX()
30587 if (have66noF2noF3(pfx) in dis_ESC_0F3A__VEX()
30588 && 1==getVexL(pfx)/*256*/ && 0==getRexW(pfx)/*W0*/) { in dis_ESC_0F3A__VEX()
30591 UInt rS = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
30594 UInt rD = eregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
30602 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F3A__VEX()
30618 if (have66noF2noF3(pfx) in dis_ESC_0F3A__VEX()
30619 && 0==getVexL(pfx)/*128*/ && 0==getRexW(pfx)/*W0*/) { in dis_ESC_0F3A__VEX()
30621 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
30622 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F3A__VEX()
30627 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F3A__VEX()
30634 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F3A__VEX()
30654 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F3A__VEX()
30656 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
30657 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F3A__VEX()
30663 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
30674 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F3A__VEX()
30693 if (have66noF2noF3(pfx) in dis_ESC_0F3A__VEX()
30694 && 0==getVexL(pfx)/*128*/ && 0==getRexW(pfx)/*W0*/) { in dis_ESC_0F3A__VEX()
30696 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
30697 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F3A__VEX()
30702 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F3A__VEX()
30709 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F3A__VEX()
30725 if (have66noF2noF3(pfx) in dis_ESC_0F3A__VEX()
30726 && 0==getVexL(pfx)/*128*/ && 1==getRexW(pfx)/*W1*/) { in dis_ESC_0F3A__VEX()
30728 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
30729 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F3A__VEX()
30734 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F3A__VEX()
30741 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F3A__VEX()
30762 if (have66noF2noF3(pfx) in dis_ESC_0F3A__VEX()
30763 && 1==getVexL(pfx)/*256*/ && 0==getRexW(pfx)/*W0*/) { in dis_ESC_0F3A__VEX()
30766 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
30767 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F3A__VEX()
30770 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
30777 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F3A__VEX()
30797 if (have66noF2noF3(pfx) in dis_ESC_0F3A__VEX()
30798 && 1==getVexL(pfx)/*256*/ && 0==getRexW(pfx)/*W0*/) { in dis_ESC_0F3A__VEX()
30801 UInt rS = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
30804 UInt rD = eregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
30812 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F3A__VEX()
30828 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F3A__VEX()
30830 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
30831 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F3A__VEX()
30835 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F3A__VEX()
30842 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F3A__VEX()
30858 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F3A__VEX()
30860 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
30861 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F3A__VEX()
30865 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F3A__VEX()
30872 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F3A__VEX()
30896 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F3A__VEX()
30898 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
30899 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F3A__VEX()
30903 UInt rE = eregOfRexRM(pfx,modrm); in dis_ESC_0F3A__VEX()
30910 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F3A__VEX()
30930 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F3A__VEX()
30935 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
30936 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F3A__VEX()
30941 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
30949 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, in dis_ESC_0F3A__VEX()
30965 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F3A__VEX()
30970 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
30971 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F3A__VEX()
30978 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
30986 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, in dis_ESC_0F3A__VEX()
31012 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F3A__VEX()
31017 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
31018 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F3A__VEX()
31023 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
31030 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, in dis_ESC_0F3A__VEX()
31047 if (have66noF2noF3(pfx) in dis_ESC_0F3A__VEX()
31048 && 1==getVexL(pfx)/*256*/ && 0==getRexW(pfx)/*W0*/) { in dis_ESC_0F3A__VEX()
31051 UInt rG = gregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
31052 UInt rV = getVexNvvvv(pfx); in dis_ESC_0F3A__VEX()
31060 UInt rE = eregOfRexRM(pfx, modrm); in dis_ESC_0F3A__VEX()
31068 addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 ); in dis_ESC_0F3A__VEX()
31095 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F3A__VEX()
31096 delta = dis_VBLENDV_128 ( vbi, pfx, delta, in dis_ESC_0F3A__VEX()
31104 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F3A__VEX()
31105 delta = dis_VBLENDV_256 ( vbi, pfx, delta, in dis_ESC_0F3A__VEX()
31116 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F3A__VEX()
31117 delta = dis_VBLENDV_128 ( vbi, pfx, delta, in dis_ESC_0F3A__VEX()
31125 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F3A__VEX()
31126 delta = dis_VBLENDV_256 ( vbi, pfx, delta, in dis_ESC_0F3A__VEX()
31137 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F3A__VEX()
31138 delta = dis_VBLENDV_128 ( vbi, pfx, delta, in dis_ESC_0F3A__VEX()
31146 if (have66noF2noF3(pfx) && 1==getVexL(pfx)/*256*/) { in dis_ESC_0F3A__VEX()
31147 delta = dis_VBLENDV_256 ( vbi, pfx, delta, in dis_ESC_0F3A__VEX()
31165 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F3A__VEX()
31167 delta = dis_PCMPxSTRx( vbi, pfx, delta, True/*isAvx*/, opc ); in dis_ESC_0F3A__VEX()
31175 if (have66noF2noF3(pfx) && 0==getVexL(pfx)/*128*/) { in dis_ESC_0F3A__VEX()
31176 delta = dis_AESKEYGENASSIST( vbi, pfx, delta, True/*!isAvx*/ ); in dis_ESC_0F3A__VEX()
31184 if (haveF2no66noF3(pfx) && 0==getVexL(pfx)/*LZ*/ && !haveREX(pfx)) { in dis_ESC_0F3A__VEX()
31185 Int size = getRexW(pfx) ? 8 : 4; in dis_ESC_0F3A__VEX()
31193 assign( src, getIRegE(size,pfx,rm) ); in dis_ESC_0F3A__VEX()
31194 DIP("rorx %d,%s,%s\n", imm8, nameIRegE(size,pfx,rm), in dis_ESC_0F3A__VEX()
31195 nameIRegG(size,pfx,rm)); in dis_ESC_0F3A__VEX()
31198 addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 ); in dis_ESC_0F3A__VEX()
31201 DIP("rorx %d,%s,%s\n", imm8, dis_buf, nameIRegG(size,pfx,rm)); in dis_ESC_0F3A__VEX()
31207 putIRegG( size, pfx, rm, in dis_ESC_0F3A__VEX()
31271 Prefix pfx = PFX_EMPTY; local
31376 case 0x66: pfx |= PFX_66; break;
31377 case 0x67: pfx |= PFX_ASO; break;
31378 case 0xF2: pfx |= PFX_F2; break;
31379 case 0xF3: pfx |= PFX_F3; break;
31380 case 0xF0: pfx |= PFX_LOCK; *expect_CAS = True; break;
31381 case 0x2E: pfx |= PFX_CS; break;
31382 case 0x3E: pfx |= PFX_DS; break;
31383 case 0x26: pfx |= PFX_ES; break;
31384 case 0x64: pfx |= PFX_FS; break;
31385 case 0x65: pfx |= PFX_GS; break;
31386 case 0x36: pfx |= PFX_SS; break;
31388 pfx |= PFX_REX;
31389 if (pre & (1<<3)) pfx |= PFX_REXW;
31390 if (pre & (1<<2)) pfx |= PFX_REXR;
31391 if (pre & (1<<1)) pfx |= PFX_REXX;
31392 if (pre & (1<<0)) pfx |= PFX_REXB;
31412 pfx |= PFX_VEX;
31414 /* R */ pfx |= (vex1 & (1<<7)) ? 0 : PFX_REXR;
31415 /* X */ pfx |= (vex1 & (1<<6)) ? 0 : PFX_REXX;
31416 /* B */ pfx |= (vex1 & (1<<5)) ? 0 : PFX_REXB;
31426 /* W */ pfx |= (vex2 & (1<<7)) ? PFX_REXW : 0;
31427 /* ~v3 */ pfx |= (vex2 & (1<<6)) ? 0 : PFX_VEXnV3;
31428 /* ~v2 */ pfx |= (vex2 & (1<<5)) ? 0 : PFX_VEXnV2;
31429 /* ~v1 */ pfx |= (vex2 & (1<<4)) ? 0 : PFX_VEXnV1;
31430 /* ~v0 */ pfx |= (vex2 & (1<<3)) ? 0 : PFX_VEXnV0;
31431 /* L */ pfx |= (vex2 & (1<<2)) ? PFX_VEXL : 0;
31435 case 1: pfx |= PFX_66; break;
31436 case 2: pfx |= PFX_F3; break;
31437 case 3: pfx |= PFX_F2; break;
31445 pfx |= PFX_VEX;
31447 /* R */ pfx |= (vex1 & (1<<7)) ? 0 : PFX_REXR;
31448 /* ~v3 */ pfx |= (vex1 & (1<<6)) ? 0 : PFX_VEXnV3;
31449 /* ~v2 */ pfx |= (vex1 & (1<<5)) ? 0 : PFX_VEXnV2;
31450 /* ~v1 */ pfx |= (vex1 & (1<<4)) ? 0 : PFX_VEXnV1;
31451 /* ~v0 */ pfx |= (vex1 & (1<<3)) ? 0 : PFX_VEXnV0;
31452 /* L */ pfx |= (vex1 & (1<<2)) ? PFX_VEXL : 0;
31456 case 1: pfx |= PFX_66; break;
31457 case 2: pfx |= PFX_F3; break;
31458 case 3: pfx |= PFX_F2; break;
31465 if ((pfx & PFX_VEX) && (pfx & PFX_REX))
31471 if (pfx & PFX_F2) n++;
31472 if (pfx & PFX_F3) n++;
31477 if (pfx & PFX_CS) n++;
31478 if (pfx & PFX_DS) n++;
31479 if (pfx & PFX_ES) n++;
31480 if (pfx & PFX_FS) n++;
31481 if (pfx & PFX_GS) n++;
31482 if (pfx & PFX_SS) n++;
31488 if ((pfx & PFX_FS) && !vbi->guest_amd64_assume_fs_is_const)
31492 if ((pfx & PFX_GS) && !vbi->guest_amd64_assume_gs_is_const)
31497 if (pfx & PFX_66) sz = 2;
31498 if ((pfx & PFX_REX) && (pfx & PFX_REXW)) sz = 8;
31503 if (haveLOCK(pfx)) {
31514 if (!(pfx & PFX_VEX)) {
31532 if (!(pfx & PFX_VEX)) {
31541 archinfo, vbi, pfx, sz, delta );
31546 archinfo, vbi, pfx, sz, delta );
31551 archinfo, vbi, pfx, sz, delta );
31556 archinfo, vbi, pfx, sz, delta );
31572 archinfo, vbi, pfx, sz, delta );
31578 archinfo, vbi, pfx, sz, delta );
31584 archinfo, vbi, pfx, sz, delta );
31596 if (getVexNvvvv(pfx) != 0)
31688 if (have66orF2orF3(pfx)) goto decode_failure;
31813 haveREX(pfx) ? 1 : 0, getRexW(pfx), getRexR(pfx),
31814 getRexX(pfx), getRexB(pfx));
31816 haveVEX(pfx) ? 1 : 0, getVexL(pfx),
31817 getVexNvvvv(pfx),
31823 have66(pfx) ? 1 : 0, haveF2(pfx) ? 1 : 0,
31824 haveF3(pfx) ? 1 : 0);