Lines Matching refs:unop

243 static IRExpr* unop ( IROp op, IRExpr* a )  in unop()  function
329 return unop(signd ? Iop_8Sto32 : Iop_8Uto32, src); in doScalarWidening()
332 return unop(signd ? Iop_8Sto16 : Iop_8Uto16, src); in doScalarWidening()
335 return unop(signd ? Iop_16Sto32 : Iop_16Uto32, src); in doScalarWidening()
338 return unop(Iop_8Uto64, src); in doScalarWidening()
341 return unop(Iop_8Sto64, src); in doScalarWidening()
344 return unop(Iop_16Uto64, src); in doScalarWidening()
347 return unop(Iop_16Sto64, src); in doScalarWidening()
1020 case 4: return unop(Iop_64to32, IRExpr_Get( OFFB_RAX, Ity_I64 )); in getIRegRAX()
1035 stmt( IRStmt_Put( OFFB_RAX, unop(Iop_32Uto64,e) )); in putIRegRAX()
1068 case 4: return unop(Iop_64to32, IRExpr_Get( OFFB_RDX, Ity_I64 )); in getIRegRDX()
1081 case 4: stmt( IRStmt_Put( OFFB_RDX, unop(Iop_32Uto64,e) )); in putIRegRDX()
1119 return unop(Iop_64to32, in getIReg32()
1128 unop(Iop_32Uto64,e) ) ); in putIReg32()
1151 unop(Iop_16Uto64,e) ) ); in putIReg16()
1194 return unop(Iop_64to32, in getIRegRexB()
1219 sz==4 ? unop(Iop_32Uto64,e) : e in putIRegRexB()
1275 return unop(Iop_64to32, in getIRegG()
1289 e = unop(Iop_32Uto64,e); in putIRegG()
1307 return unop(Iop_64to32, in getIRegV()
1321 e = unop(Iop_32Uto64,e); in putIRegV()
1354 return unop(Iop_64to32, in getIRegE()
1368 e = unop(Iop_32Uto64,e); in putIRegE()
1598 return unop(Iop_64to1, in mkAnd1()
1600 unop(Iop_1Uto64,x), in mkAnd1()
1601 unop(Iop_1Uto64,y))); in mkAnd1()
1685 return unop(Iop_64to1, call); in mk_amd64g_calculate_condition()
1732 case Ity_I32: return unop(Iop_32Uto64, e); in widenUto64()
1733 case Ity_I16: return unop(Iop_16Uto64, e); in widenUto64()
1734 case Ity_I8: return unop(Iop_8Uto64, e); in widenUto64()
1735 case Ity_I1: return unop(Iop_1Uto64, e); in widenUto64()
1745 case Ity_I32: return unop(Iop_32Sto64, e); in widenSto64()
1746 case Ity_I16: return unop(Iop_16Sto64, e); in widenSto64()
1747 case Ity_I8: return unop(Iop_8Sto64, e); in widenSto64()
1760 return unop(Iop_32to16, e); in narrowTo()
1762 return unop(Iop_32to8, e); in narrowTo()
1764 return unop(Iop_64to32, e); in narrowTo()
1766 return unop(Iop_64to16, e); in narrowTo()
1768 return unop(Iop_64to8, e); in narrowTo()
2362 virtual = unop(Iop_32Uto64, unop(Iop_64to32, virtual)); in handleAddrOverrides()
3350 putIReg64( R_RAX, unop(Iop_128to64,mkexpr(dst128)) ); in codegen_div()
3351 putIReg64( R_RDX, unop(Iop_128HIto64,mkexpr(dst128)) ); in codegen_div()
3363 putIRegRAX( 4, unop(Iop_64to32,mkexpr(dst64)) ); in codegen_div()
3364 putIRegRDX( 4, unop(Iop_64HIto32,mkexpr(dst64)) ); in codegen_div()
3369 assign( src64, unop(widen3264, in codegen_div()
3373 assign( dst64, binop(op, mkexpr(src64), unop(widen1632,mkexpr(t))) ); in codegen_div()
3374 putIRegRAX( 2, unop(Iop_32to16,unop(Iop_64to32,mkexpr(dst64))) ); in codegen_div()
3375 putIRegRDX( 2, unop(Iop_32to16,unop(Iop_64HIto32,mkexpr(dst64))) ); in codegen_div()
3382 assign( src64, unop(widen3264, in codegen_div()
3383 unop(widen1632, getIRegRAX(2))) ); in codegen_div()
3386 unop(widen1632, unop(widen816, mkexpr(t)))) ); in codegen_div()
3387 putIRegRAX( 1, unop(Iop_16to8, in codegen_div()
3388 unop(Iop_32to16, in codegen_div()
3389 unop(Iop_64to32,mkexpr(dst64)))) ); in codegen_div()
3390 putIRegAH( unop(Iop_16to8, in codegen_div()
3391 unop(Iop_32to16, in codegen_div()
3392 unop(Iop_64HIto32,mkexpr(dst64)))) ); in codegen_div()
3938 assign( resHi, unop(Iop_128HIto64,mkexpr(res128))); in codegen_mulL_A_D()
3939 assign( resLo, unop(Iop_128to64,mkexpr(res128))); in codegen_mulL_A_D()
3952 assign( resHi, unop(Iop_64HIto32,mkexpr(res64))); in codegen_mulL_A_D()
3953 assign( resLo, unop(Iop_64to32,mkexpr(res64))); in codegen_mulL_A_D()
3966 assign( resHi, unop(Iop_32HIto16,mkexpr(res32))); in codegen_mulL_A_D()
3967 assign( resLo, unop(Iop_32to16,mkexpr(res32))); in codegen_mulL_A_D()
3980 assign( resHi, unop(Iop_16HIto8,mkexpr(res16))); in codegen_mulL_A_D()
3981 assign( resLo, unop(Iop_16to8,mkexpr(res16))); in codegen_mulL_A_D()
4033 unop(mkSizedOp(ty,Iop_Not8), in dis_Grp3()
4119 assign(dst1, unop(mkSizedOp(ty,Iop_Not8), mkexpr(t1))); in dis_Grp3()
4491 assign( td, unop(Iop_32Uto64, getIReg32(R_RDI)) ); in dis_MOVS()
4492 assign( ts, unop(Iop_32Uto64, getIReg32(R_RSI)) ); in dis_MOVS()
4503 incd = unop(Iop_32Uto64, unop(Iop_64to32, incd)); in dis_MOVS()
4504 incs = unop(Iop_32Uto64, unop(Iop_64to32, incs)); in dis_MOVS()
4518 assign( ts, unop(Iop_32Uto64, getIReg32(R_RSI)) ); in dis_LODS()
4526 incs = unop(Iop_32Uto64, unop(Iop_64to32, incs)); in dis_LODS()
4541 assign( td, unop(Iop_32Uto64, getIReg32(R_RDI)) ); in dis_STOS()
4549 incd = unop(Iop_32Uto64, unop(Iop_64to32, incd)); in dis_STOS()
4564 assign( td, unop(Iop_32Uto64, getIReg32(R_RDI)) ); in dis_CMPS()
4565 assign( ts, unop(Iop_32Uto64, getIReg32(R_RSI)) ); in dis_CMPS()
4580 incd = unop(Iop_32Uto64, unop(Iop_64to32, incd)); in dis_CMPS()
4581 incs = unop(Iop_32Uto64, unop(Iop_64to32, incs)); in dis_CMPS()
4599 assign( td, unop(Iop_32Uto64, getIReg32(R_RDI)) ); in dis_SCAS()
4609 incd = unop(Iop_32Uto64, unop(Iop_64to32, incd)); in dis_SCAS()
4882 unop(Iop_Clz64, mkexpr(src64x)) in gen_LZCNT()
4910 unop(Iop_Ctz64, mkexpr(src64)) in gen_TZCNT()
4975 return unop(Iop_64to32, IRExpr_Get( OFFB_FPROUND, Ity_I64 )); in get_fpround()
4981 stmt( IRStmt_Put( OFFB_FPROUND, unop(Iop_32Uto64,e) ) ); in put_fpround()
5131 put_ftop( binop(Iop_Sub32, get_ftop(), unop(Iop_1Uto32,mkexpr(cond))) ); in maybe_fp_push()
5176 assign(i64, unop(Iop_ReinterpF64asI64, mkexpr(d64)) ); in math_IS_TRIG_ARG_FINITE_AND_IN_RANGE()
5180 binop(Iop_Shr32, unop(Iop_64HIto32, mkexpr(i64)), mkU8(20)), in math_IS_TRIG_ARG_FINITE_AND_IN_RANGE()
5194 unop(Iop_32to16, in get_FPU_sw()
5199 binop(Iop_And32, unop(Iop_64to32, get_C3210()), in get_FPU_sw()
5276 unop(Iop_F32toF64, loadLE(Ity_F32,mkexpr(addr))) in fp_do_op_mem_ST_0()
5301 unop(Iop_F32toF64, loadLE(Ity_F32,mkexpr(addr))), in fp_do_oprev_mem_ST_0()
5361 unop( Iop_32Uto64, in fp_do_ucomi_ST0_STi()
5380 unop(Iop_32Uto64, in x87ishly_qnarrow_32_to_16()
5383 unop(Iop_32to16, mkexpr(t32)), in x87ishly_qnarrow_32_to_16()
5428 unop( Iop_32Uto64, in dis_FPU()
5433 unop(Iop_F32toF64, in dis_FPU()
5446 unop( Iop_32Uto64, in dis_FPU()
5451 unop(Iop_F32toF64, in dis_FPU()
5499 unop(Iop_32Uto64, in dis_FPU()
5514 unop(Iop_32Uto64, in dis_FPU()
5561 put_ST(0, unop(Iop_F32toF64, in dis_FPU()
5621 assign(ew, unop(Iop_64to32,mkexpr(w64)) ); in dis_FPU()
5654 unop( Iop_16Uto64, in dis_FPU()
5660 put_fpround( unop(Iop_64to32, mkexpr(t64)) ); in dis_FPU()
5661 assign( ew, unop(Iop_64HIto32, mkexpr(t64) ) ); in dis_FPU()
5725 unop( Iop_64to16, in dis_FPU()
5729 mkIRExprVec_1( unop(Iop_32Uto64, get_fpround()) ) in dis_FPU()
5768 put_ST_UNCHECKED(0, unop(Iop_NegF64, get_ST(0))); in dis_FPU()
5773 put_ST_UNCHECKED(0, unop(Iop_AbsF64, get_ST(0))); in dis_FPU()
5783 = mkIRExprVec_2( unop(Iop_8Uto64, get_ST_TAG(0)), in dis_FPU()
5784 unop(Iop_ReinterpF64asI64, in dis_FPU()
5884 unop(Iop_1Uto64, mkexpr(argOK)), in dis_FPU()
5908 assign( argI, unop(Iop_ReinterpF64asI64, mkexpr(argF))); in dis_FPU()
5925 assign( sigF, unop(Iop_ReinterpI64asF64, mkexpr(sigI)) ); in dis_FPU()
5926 assign( expF, unop(Iop_ReinterpI64asF64, mkexpr(expI)) ); in dis_FPU()
5949 unop(Iop_32Uto64, in dis_FPU()
5976 unop(Iop_32Uto64, in dis_FPU()
6025 unop(Iop_1Uto64, mkexpr(argOK)), in dis_FPU()
6063 unop(Iop_1Uto64, mkexpr(argOK)), in dis_FPU()
6122 unop(Iop_I32StoF64, in dis_FPU()
6130 unop(Iop_I32StoF64, in dis_FPU()
6187 unop(Iop_32Uto64, in dis_FPU()
6220 put_ST(0, unop(Iop_I32StoF64, in dis_FPU()
6267 put_ST(0, unop(Iop_ReinterpI64asF64, mkexpr(val))); in dis_FPU()
6279 unop(Iop_ReinterpF64asI64, get_ST(0)) ); in dis_FPU()
6422 unop(Iop_32Uto64, in dis_FPU()
6594 assign(ew, unop(Iop_64to32,mkexpr(w64)) ); in dis_FPU()
6726 unop(Iop_32Uto64, in dis_FPU()
6740 unop(Iop_32Uto64, in dis_FPU()
6805 unop(Iop_I32StoF64, in dis_FPU()
6806 unop(Iop_16Sto32, in dis_FPU()
6814 unop(Iop_I32StoF64, in dis_FPU()
6815 unop(Iop_16Sto32, in dis_FPU()
6844 unop(Iop_32Uto64, in dis_FPU()
6894 put_ST(0, unop(Iop_I32StoF64, in dis_FPU()
6895 unop(Iop_16Sto32, in dis_FPU()
6963 unop(Iop_32to16, in dis_FPU()
6969 unop(Iop_64to32, get_C3210()), in dis_FPU()
7163 argG = unop(Iop_Not64, argG); in dis_MMXop_regmem_to_reg()
7242 assign( amt8, unop(Iop_64to8, mkexpr(amt)) ); in dis_MMX_shiftG_byE()
7415 unop(Iop_64to32, getMMXReg(gregLO3ofRM(modrm)) ) ); in dis_MMX()
7423 unop(Iop_64to32, getMMXReg(gregLO3ofRM(modrm)) ) ); in dis_MMX()
7714 unop(Iop_Not64, mkexpr(mask)))) ); in dis_MMX()
7897 binop(Iop_Shl64, unop(Iop_16Uto64, mkexpr(esrc)), in dis_SHLRD_Gv_Ev()
7913 unop(Iop_16Uto64, mkexpr(esrc)), in dis_SHLRD_Gv_Ev()
8066 unop(Iop_64to8, in dis_bt_G_E()
8093 unop(Iop_Not8, mkexpr(t_mask))) ); in dis_bt_G_E()
8115 unop(Iop_8Uto64, mkexpr(t_fetched)), in dis_bt_G_E()
8237 fwds ? unop(Iop_Ctz64, mkexpr(src64)) in dis_bs_E_G()
8240 unop(Iop_Clz64, mkexpr(src64))), in dis_bs_E_G()
8247 assign( dst, unop(Iop_64to16, mkexpr(dst64)) ); in dis_bs_E_G()
8250 assign( dst, unop(Iop_64to32, mkexpr(dst64)) ); in dis_bs_E_G()
8789 = invertG ? unop(Iop_NotV128, getXMMReg(gregOfRexRM(pfx,rm))) in dis_SSE_E_to_G_all_wrk()
8869 assign( epart, unop( Iop_32UtoV128, in dis_SSE_E_to_G_lo32()
8905 assign( epart, unop( Iop_64UtoV128, in dis_SSE_E_to_G_lo64()
8936 : unop(op, src); in dis_SSE_E_to_G_unary_all()
8947 : unop(op, src); in dis_SSE_E_to_G_unary_all()
8981 putXMMReg( gregOfRexRM(pfx,rm), unop(op, mkexpr(oldG1)) ); in dis_SSE_E_to_G_unary_lo32()
8992 putXMMReg( gregOfRexRM(pfx,rm), unop(op, mkexpr(oldG1)) ); in dis_SSE_E_to_G_unary_lo32()
9025 putXMMReg( gregOfRexRM(pfx,rm), unop(op, mkexpr(oldG1)) ); in dis_SSE_E_to_G_unary_lo64()
9036 putXMMReg( gregOfRexRM(pfx,rm), unop(op, mkexpr(oldG1)) ); in dis_SSE_E_to_G_unary_lo64()
9261 ? unop( Iop_64UtoV128, loadLE(Ity_I64, mkexpr(addr))) in dis_SSE_cmp_E_to_G()
9263 unop( Iop_32UtoV128, loadLE(Ity_I32, mkexpr(addr))) in dis_SSE_cmp_E_to_G()
9275 unop(Iop_NotV128, mkexpr(plain)) ); in dis_SSE_cmp_E_to_G()
9322 assign( amt8, unop(Iop_64to8, mkexpr(amt)) ); in dis_SSE_shiftG_byE()
9426 unop( Iop_64to32, in get_sse_roundingmode()
9436 unop(Iop_32Uto64,sseround) ) ); in put_sse_roundingmode()
9448 assign( hi64, unop(Iop_V128HIto64, mkexpr(t128)) ); in breakupV128to32s()
9449 assign( lo64, unop(Iop_V128to64, mkexpr(t128)) ); in breakupV128to32s()
9460 assign( *t0, unop(Iop_64to32, mkexpr(lo64)) ); in breakupV128to32s()
9461 assign( *t1, unop(Iop_64HIto32, mkexpr(lo64)) ); in breakupV128to32s()
9462 assign( *t2, unop(Iop_64to32, mkexpr(hi64)) ); in breakupV128to32s()
9463 assign( *t3, unop(Iop_64HIto32, mkexpr(hi64)) ); in breakupV128to32s()
9487 assign( hi32, unop(Iop_64HIto32, mkexpr(t64)) ); in breakup64to16s()
9488 assign( lo32, unop(Iop_64to32, mkexpr(t64)) ); in breakup64to16s()
9499 assign( *t0, unop(Iop_32to16, mkexpr(lo32)) ); in breakup64to16s()
9500 assign( *t1, unop(Iop_32HIto16, mkexpr(lo32)) ); in breakup64to16s()
9501 assign( *t2, unop(Iop_32to16, mkexpr(hi32)) ); in breakup64to16s()
9502 assign( *t3, unop(Iop_32HIto16, mkexpr(hi32)) ); in breakup64to16s()
9532 assign( *t0, unop(Iop_V256to64_0, mkexpr(t256)) ); in breakupV256to64s()
9533 assign( *t1, unop(Iop_V256to64_1, mkexpr(t256)) ); in breakupV256to64s()
9534 assign( *t2, unop(Iop_V256to64_2, mkexpr(t256)) ); in breakupV256to64s()
9535 assign( *t3, unop(Iop_V256to64_3, mkexpr(t256)) ); in breakupV256to64s()
9548 assign(*t1, unop(Iop_V256toV128_1, mkexpr(t256))); in breakupV256toV128s()
9549 assign(*t0, unop(Iop_V256toV128_0, mkexpr(t256))); in breakupV256toV128s()
9578 assign( *t0, unop(Iop_V128to64, mkexpr(t128)) ); in breakupV128to64s()
9579 assign( *t1, unop(Iop_V128HIto64, mkexpr(t128)) ); in breakupV128to64s()
9750 assign( posMask, unop(Iop_Not64, mkexpr(negMask)) ); in math_PABS_MMX()
9766 assign(aaHi, unop(Iop_V128HIto64, mkexpr(aa))); in math_PABS_XMM()
9767 assign(aaLo, unop(Iop_V128to64, mkexpr(aa))); in math_PABS_XMM()
9834 assign( dHi, unop(Iop_V128HIto64, mkexpr(dV)) ); in math_PALIGNR_XMM()
9835 assign( dLo, unop(Iop_V128to64, mkexpr(dV)) ); in math_PALIGNR_XMM()
9836 assign( sHi, unop(Iop_V128HIto64, mkexpr(sV)) ); in math_PALIGNR_XMM()
9837 assign( sLo, unop(Iop_V128to64, mkexpr(sV)) ); in math_PALIGNR_XMM()
10085 unop( Iop_32Uto64, in dis_COMISD()
10128 unop( Iop_32Uto64, in dis_COMISS()
10130 unop(Iop_F32toF64,mkexpr(argL)), in dis_COMISS()
10131 unop(Iop_F32toF64,mkexpr(argR)))), in dis_COMISS()
10239 assign( hi64, unop(Iop_V128HIto64, mkexpr(sV)) ); in math_PSRLDQ()
10240 assign( lo64, unop(Iop_V128to64, mkexpr(sV)) ); in math_PSRLDQ()
10286 assign( hi64, unop(Iop_V128HIto64, mkexpr(sV)) ); in math_PSLLDQ()
10287 assign( lo64, unop(Iop_V128to64, mkexpr(sV)) ); in math_PSLLDQ()
10406 unop(Iop_F32toF64, mkexpr(f32lo))) ); in dis_CVTxSS2SI()
10412 unop(Iop_F32toF64, mkexpr(f32lo))) ); in dis_CVTxSS2SI()
10446 putXMMRegLane64F( rG, 1, unop(Iop_F32toF64, mkexpr(f32hi)) ); in dis_CVTPS2PD_128()
10447 putXMMRegLane64F( rG, 0, unop(Iop_F32toF64, mkexpr(f32lo)) ); in dis_CVTPS2PD_128()
10487 putYMMRegLane64F( rG, 3, unop(Iop_F32toF64, mkexpr(f32_3)) ); in dis_CVTPS2PD_256()
10488 putYMMRegLane64F( rG, 2, unop(Iop_F32toF64, mkexpr(f32_2)) ); in dis_CVTPS2PD_256()
10489 putYMMRegLane64F( rG, 1, unop(Iop_F32toF64, mkexpr(f32_1)) ); in dis_CVTPS2PD_256()
10490 putYMMRegLane64F( rG, 0, unop(Iop_F32toF64, mkexpr(f32_0)) ); in dis_CVTPS2PD_256()
10522 assign( t0, unop(Iop_ReinterpI64asF64, in dis_CVTPD2PS_128()
10523 unop(Iop_V128to64, mkexpr(argV))) ); in dis_CVTPD2PS_128()
10524 assign( t1, unop(Iop_ReinterpI64asF64, in dis_CVTPD2PS_128()
10525 unop(Iop_V128HIto64, mkexpr(argV))) ); in dis_CVTPD2PS_128()
10575 unop( Iop_F32toF64, \ in dis_CVTxPS2DQ_128()
10576 unop( Iop_ReinterpI32asF32, mkexpr(_t))) ) in dis_CVTxPS2DQ_128()
10625 unop( Iop_F32toF64, \ in dis_CVTxPS2DQ_256()
10626 unop( Iop_ReinterpI32asF32, mkexpr(_t))) ) in dis_CVTxPS2DQ_256()
10676 assign( t0, unop(Iop_ReinterpI64asF64, in dis_CVTxPD2DQ_128()
10677 unop(Iop_V128to64, mkexpr(argV))) ); in dis_CVTxPD2DQ_128()
10678 assign( t1, unop(Iop_ReinterpI64asF64, in dis_CVTxPD2DQ_128()
10679 unop(Iop_V128HIto64, mkexpr(argV))) ); in dis_CVTxPD2DQ_128()
10737 unop( Iop_ReinterpI64asF64, \ in dis_CVTxPD2DQ_256()
10786 unop(Iop_I32StoF64,mkexpr(_t))) in dis_CVTDQ2PS_128()
10836 unop(Iop_I32StoF64,mkexpr(_t))) in dis_CVTDQ2PS_256()
10862 assign(t1, unop(Iop_16Uto32, unop(Iop_GetMSBs8x16, mkexpr(t0)))); in dis_PMOVMSKB_128()
10884 assign(t2, unop(Iop_GetMSBs8x16, mkexpr(t0))); in dis_PMOVMSKB_256()
10885 assign(t3, unop(Iop_GetMSBs8x16, mkexpr(t1))); in dis_PMOVMSKB_256()
10917 assign( d1, unop(Iop_V128HIto64, mkexpr(dV)) ); in math_UNPCKxPD_128()
10918 assign( d0, unop(Iop_V128to64, mkexpr(dV)) ); in math_UNPCKxPD_128()
10919 assign( s1, unop(Iop_V128HIto64, mkexpr(sV)) ); in math_UNPCKxPD_128()
10920 assign( s0, unop(Iop_V128to64, mkexpr(sV)) ); in math_UNPCKxPD_128()
11013 assign( d1, unop(Iop_V128HIto64, mkexpr(dV)) ); in math_SHUFPD_128()
11014 assign( d0, unop(Iop_V128to64, mkexpr(dV)) ); in math_SHUFPD_128()
11015 assign( s1, unop(Iop_V128HIto64, mkexpr(sV)) ); in math_SHUFPD_128()
11016 assign( s0, unop(Iop_V128to64, mkexpr(sV)) ); in math_SHUFPD_128()
11064 unop( Iop_NotV128, mkexpr(imm8_mask) ) ) ) ); in math_BLENDPD_128()
11097 unop( Iop_NotV128, mkexpr(imm8_mask) ) ) ) ); in math_BLENDPS_128()
11134 unop( Iop_NotV128, mkexpr(imm16_mask) ) ) ) ); in math_PBLENDW_128()
11251 assign( a1, unop(Iop_V128HIto64, mkexpr(addV) )); in math_ADDSUBPD_128()
11252 assign( s0, unop(Iop_V128to64, mkexpr(subV) )); in math_ADDSUBPD_128()
11362 assign( sVmut, unop(xIsH ? Iop_V128HIto64 : Iop_V128to64, mkexpr(sV)) ); in dis_PSHUFxW_128()
11363 assign( sVcon, unop(xIsH ? Iop_V128to64 : Iop_V128HIto64, mkexpr(sV)) ); in dis_PSHUFxW_128()
11452 case 0: assign(d16, unop(Iop_32to16, mkexpr(s0))); break; in dis_PEXTRW_128_EregOnly_toG()
11453 case 1: assign(d16, unop(Iop_32HIto16, mkexpr(s0))); break; in dis_PEXTRW_128_EregOnly_toG()
11454 case 2: assign(d16, unop(Iop_32to16, mkexpr(s1))); break; in dis_PEXTRW_128_EregOnly_toG()
11455 case 3: assign(d16, unop(Iop_32HIto16, mkexpr(s1))); break; in dis_PEXTRW_128_EregOnly_toG()
11456 case 4: assign(d16, unop(Iop_32to16, mkexpr(s2))); break; in dis_PEXTRW_128_EregOnly_toG()
11457 case 5: assign(d16, unop(Iop_32HIto16, mkexpr(s2))); break; in dis_PEXTRW_128_EregOnly_toG()
11458 case 6: assign(d16, unop(Iop_32to16, mkexpr(s3))); break; in dis_PEXTRW_128_EregOnly_toG()
11459 case 7: assign(d16, unop(Iop_32HIto16, mkexpr(s3))); break; in dis_PEXTRW_128_EregOnly_toG()
11462 putIReg32(rG, unop(Iop_16Uto32, mkexpr(d16))); in dis_PEXTRW_128_EregOnly_toG()
11490 unop(Iop_I32StoF64, unop(Iop_64to32, mkexpr(arg64))) in dis_CVTDQ2PD_128()
11494 unop(Iop_I32StoF64, unop(Iop_64HIto32, mkexpr(arg64))) in dis_CVTDQ2PD_128()
11522 unop(Iop_64to32, in dis_STMXCSR()
11526 mkIRExprVec_1( unop(Iop_32Uto64,get_sse_roundingmode()) ) in dis_STMXCSR()
11564 unop(Iop_32Uto64, in dis_LDMXCSR()
11571 put_sse_roundingmode( unop(Iop_64to32, mkexpr(t64)) ); in dis_LDMXCSR()
11572 assign( ew, unop(Iop_64HIto32, mkexpr(t64) ) ); in dis_LDMXCSR()
11579 binop(Iop_CmpNE64, unop(Iop_32Uto64,mkexpr(ew)), mkU64(0)), in dis_LDMXCSR()
11748 unop(Iop_32Uto64, getIRegRDX(4)), mkU8(32)), in dis_XSAVE()
11749 unop(Iop_32Uto64, getIRegRAX(4))), in dis_XSAVE()
11760 unop(Iop_64to8, mkexpr(rfbm)), in dis_XSAVE()
12003 unop(Iop_32Uto64, getIRegRDX(4)), mkU8(32)), in dis_XRSTOR()
12004 unop(Iop_32Uto64, getIRegRAX(4))), in dis_XRSTOR()
12087 unop(Iop_16Uto64, mkexpr(u16)), in math_PINSRW_128()
12174 unop(Iop_NotV128, mkexpr(mask)))) ); in dis_MASKMOVDQU()
12870 unop(Iop_I32StoF64, in dis_ESC_0F__SSE2()
12871 unop(Iop_64to32, mkexpr(arg64)) )) ); in dis_ESC_0F__SSE2()
12877 unop(Iop_I32StoF64, in dis_ESC_0F__SSE2()
12878 unop(Iop_64HIto32, mkexpr(arg64)) )) ); in dis_ESC_0F__SSE2()
12907 unop(Iop_I32StoF64, mkexpr(arg32)) ) ); in dis_ESC_0F__SSE2()
12952 unop(Iop_I32StoF64, mkexpr(arg32)) in dis_ESC_0F__SSE2()
13008 unop(Iop_I32StoF64, unop(Iop_64to32, mkexpr(arg64)) ) in dis_ESC_0F__SSE2()
13013 unop(Iop_I32StoF64, unop(Iop_64HIto32, mkexpr(arg64)) ) in dis_ESC_0F__SSE2()
13086 unop( Iop_F32toF64, mkexpr(f32hi) ) ), in dis_ESC_0F__SSE2()
13089 unop( Iop_F32toF64, mkexpr(f32lo) ) ) in dis_ESC_0F__SSE2()
13424 unop( Iop_F32toF64, mkexpr(f32lo) ) ); in dis_ESC_0F__SSE2()
13730 unop( Iop_32UtoV128, getIReg32(eregOfRexRM(pfx,modrm)) ) in dis_ESC_0F__SSE2()
13737 unop( Iop_64UtoV128, getIReg64(eregOfRexRM(pfx,modrm)) ) in dis_ESC_0F__SSE2()
13748 ? unop( Iop_32UtoV128,loadLE(Ity_I32, mkexpr(addr)) ) in dis_ESC_0F__SSE2()
13749 : unop( Iop_64UtoV128,loadLE(Ity_I64, mkexpr(addr)) ) in dis_ESC_0F__SSE2()
14323 putIReg64(gregOfRexRM(pfx,modrm), unop(Iop_16Uto64, mkexpr(t5))); in dis_ESC_0F__SSE2()
14325 putIReg32(gregOfRexRM(pfx,modrm), unop(Iop_16Uto32, mkexpr(t5))); in dis_ESC_0F__SSE2()
14468 unop(Iop_64UtoV128, getMMXReg( eregLO3ofRM(modrm) )) ); in dis_ESC_0F__SSE2()
14532 assign(t1, unop(Iop_8Uto32, unop(Iop_GetMSBs8x8, mkexpr(t0)))); in dis_ESC_0F__SSE2()
14921 assign( t0, unop(Iop_64to32, mkexpr(dV)) ); in dis_ESC_0F__SSE2()
14922 assign( t1, unop(Iop_64to32, mkexpr(sV)) ); in dis_ESC_0F__SSE2()
15111 assign ( d0, unop(Iop_V128to64, mkexpr(sV)) ); in dis_MOVDDUP_128()
15475 assign( dHi, unop(Iop_V128HIto64, mkexpr(dV)) ); in math_PSHUFB_XMM()
15476 assign( dLo, unop(Iop_V128to64, mkexpr(dV)) ); in math_PSHUFB_XMM()
15477 assign( sHi, unop(Iop_V128HIto64, mkexpr(sV)) ); in math_PSHUFB_XMM()
15478 assign( sLo, unop(Iop_V128to64, mkexpr(sV)) ); in math_PSHUFB_XMM()
15491 unop(Iop_Not64, binop(Iop_SarN8x8,mkexpr(sHi),mkU8(7)))); in math_PSHUFB_XMM()
15510 unop(Iop_Not64,mkexpr(maskBit3hi))) )); in math_PSHUFB_XMM()
15518 unop(Iop_Not64, binop(Iop_SarN8x8,mkexpr(sLo),mkU8(7)))); in math_PSHUFB_XMM()
15537 unop(Iop_Not64,mkexpr(maskBit3lo))) )); in math_PSHUFB_XMM()
15613 assign( dHi, unop(Iop_V128HIto64, mkexpr(dV)) ); in dis_PHADD_128()
15614 assign( dLo, unop(Iop_V128to64, mkexpr(dV)) ); in dis_PHADD_128()
15615 assign( sHi, unop(Iop_V128HIto64, mkexpr(sV)) ); in dis_PHADD_128()
15616 assign( sLo, unop(Iop_V128to64, mkexpr(sV)) ); in dis_PHADD_128()
15828 unop(Iop_Not64, binop(Iop_SarN8x8, mkexpr(sV), mkU8(7))) in dis_ESC_0F38__SupSSE3()
16043 assign( dHi, unop(Iop_V128HIto64, mkexpr(dV)) ); in dis_ESC_0F38__SupSSE3()
16044 assign( dLo, unop(Iop_V128to64, mkexpr(dV)) ); in dis_ESC_0F38__SupSSE3()
16045 assign( sHi, unop(Iop_V128HIto64, mkexpr(sV)) ); in dis_ESC_0F38__SupSSE3()
16046 assign( sLo, unop(Iop_V128to64, mkexpr(sV)) ); in dis_ESC_0F38__SupSSE3()
16127 assign( dHi, unop(Iop_V128HIto64, mkexpr(dV)) ); in dis_ESC_0F38__SupSSE3()
16128 assign( dLo, unop(Iop_V128to64, mkexpr(dV)) ); in dis_ESC_0F38__SupSSE3()
16129 assign( sHi, unop(Iop_V128HIto64, mkexpr(sV)) ); in dis_ESC_0F38__SupSSE3()
16130 assign( sLo, unop(Iop_V128to64, mkexpr(sV)) ); in dis_ESC_0F38__SupSSE3()
16447 unop(Iop_1Uto64, in dis_ESC_0F__SSE4()
16498 unop(Iop_1Uto64, in dis_ESC_0F__SSE4()
16502 unop(Iop_1Uto64, in dis_ESC_0F__SSE4()
16559 unop(Iop_1Uto64, in dis_ESC_0F__SSE4()
16563 unop(Iop_1Uto64, in dis_ESC_0F__SSE4()
16613 assign(notmask, unop(Iop_NotV128, mkexpr(mask))); in math_PBLENDVB_128()
16642 assign(notmask, unop(Iop_NotV256, mkexpr(mask))); in math_PBLENDVB_256()
16752 unop(Iop_V128to64, in finish_xTESTy()
16760 unop(Iop_V128to64, in finish_xTESTy()
16773 unop(Iop_Not64, in finish_xTESTy()
16777 unop(Iop_Not64, in finish_xTESTy()
16798 unop(Iop_Not64, in finish_xTESTy()
16805 unop(Iop_Not64, in finish_xTESTy()
16920 mkexpr(vecE), unop(Iop_NotV256, mkexpr(vecG)))); in dis_xTESTy_256()
16959 unop( Iop_64UtoV128, loadLE( Ity_I64, mkexpr(addr) ) ) ); in dis_PMOVxXBW_128()
17043 unop( Iop_64UtoV128, loadLE( Ity_I64, mkexpr(addr) ) ) ); in dis_PMOVxXWD_128()
17126 unop( Iop_16Sto64, in dis_PMOVSXWQ_128()
17127 unop( Iop_32HIto16, mkexpr(srcBytes) ) ), in dis_PMOVSXWQ_128()
17128 unop( Iop_16Sto64, in dis_PMOVSXWQ_128()
17129 unop( Iop_32to16, mkexpr(srcBytes) ) ) ) ); in dis_PMOVSXWQ_128()
17160 unop( Iop_16Sto64, mkexpr(s3) ), in dis_PMOVSXWQ_256()
17161 unop( Iop_16Sto64, mkexpr(s2) ) ), in dis_PMOVSXWQ_256()
17163 unop( Iop_16Sto64, mkexpr(s1) ), in dis_PMOVSXWQ_256()
17164 unop( Iop_16Sto64, mkexpr(s0) ) ) ) ); in dis_PMOVSXWQ_256()
17188 unop( Iop_32UtoV128, loadLE( Ity_I32, mkexpr(addr) ) ) ); in dis_PMOVZXWQ_128()
17223 unop( Iop_64UtoV128, loadLE( Ity_I64, mkexpr(addr) ) ) ); in dis_PMOVZXWQ_256()
17264 assign( srcI64, unop(Iop_V128to64, mkexpr(srcVec)) ); in dis_PMOVxXDQ_128()
17270 assign( srcVec, unop( Iop_64UtoV128, mkexpr(srcI64)) ); in dis_PMOVxXDQ_128()
17280 unop( Iop_32Sto64, in dis_PMOVxXDQ_128()
17281 unop( Iop_64HIto32, mkexpr(srcI64) ) ), in dis_PMOVxXDQ_128()
17282 unop( Iop_32Sto64, in dis_PMOVxXDQ_128()
17283 unop( Iop_64to32, mkexpr(srcI64) ) ) ); in dis_PMOVxXDQ_128()
17331 unop( Iop_32Sto64, mkexpr(s3) ), in dis_PMOVxXDQ_256()
17332 unop( Iop_32Sto64, mkexpr(s2) ) ), in dis_PMOVxXDQ_256()
17334 unop( Iop_32Sto64, mkexpr(s1) ), in dis_PMOVxXDQ_256()
17335 unop( Iop_32Sto64, mkexpr(s0) ) ) ); in dis_PMOVxXDQ_256()
17364 unop( Iop_32UtoV128, loadLE( Ity_I32, mkexpr(addr) ) ) ); in dis_PMOVxXBD_128()
17406 unop( Iop_64UtoV128, loadLE( Ity_I64, mkexpr(addr) ) ) ); in dis_PMOVxXBD_256()
17459 unop( Iop_8Sto64, in dis_PMOVSXBQ_128()
17460 unop( Iop_16HIto8, mkexpr(srcBytes) ) ), in dis_PMOVSXBQ_128()
17461 unop( Iop_8Sto64, in dis_PMOVSXBQ_128()
17462 unop( Iop_16to8, mkexpr(srcBytes) ) ) ) ); in dis_PMOVSXBQ_128()
17492 unop( Iop_8Sto64, in dis_PMOVSXBQ_256()
17493 unop( Iop_16HIto8, in dis_PMOVSXBQ_256()
17494 unop( Iop_32HIto16, in dis_PMOVSXBQ_256()
17496 unop( Iop_8Sto64, in dis_PMOVSXBQ_256()
17497 unop( Iop_16to8, in dis_PMOVSXBQ_256()
17498 unop( Iop_32HIto16, in dis_PMOVSXBQ_256()
17501 unop( Iop_8Sto64, in dis_PMOVSXBQ_256()
17502 unop( Iop_16HIto8, in dis_PMOVSXBQ_256()
17503 unop( Iop_32to16, in dis_PMOVSXBQ_256()
17505 unop( Iop_8Sto64, in dis_PMOVSXBQ_256()
17506 unop( Iop_16to8, in dis_PMOVSXBQ_256()
17507 unop( Iop_32to16, in dis_PMOVSXBQ_256()
17532 unop( Iop_32UtoV128, in dis_PMOVZXBQ_128()
17533 unop( Iop_16Uto32, loadLE( Ity_I16, mkexpr(addr) )))); in dis_PMOVZXBQ_128()
17570 unop( Iop_32UtoV128, loadLE( Ity_I32, mkexpr(addr) ))); in dis_PMOVZXBQ_256()
17623 assign( sHi, unop(Iop_V128HIto64, mkexpr(sV)) ); in dis_PHMINPOSUW_128()
17624 assign( sLo, unop(Iop_V128to64, mkexpr(sV)) ); in dis_PHMINPOSUW_128()
17632 (rG, unop(Iop_64UtoV128, mkexpr(dLo))); in dis_PHMINPOSUW_128()
18297 putIRegG(4, pfx, modrm, unop(Iop_64to32, mkexpr(valG1))); in dis_ESC_0F38__SSE4()
18352 case 0: assign(d16, unop(Iop_32to16, mkexpr(t0))); break; in dis_PEXTRW()
18353 case 1: assign(d16, unop(Iop_32HIto16, mkexpr(t0))); break; in dis_PEXTRW()
18354 case 2: assign(d16, unop(Iop_32to16, mkexpr(t1))); break; in dis_PEXTRW()
18355 case 3: assign(d16, unop(Iop_32HIto16, mkexpr(t1))); break; in dis_PEXTRW()
18356 case 4: assign(d16, unop(Iop_32to16, mkexpr(t2))); break; in dis_PEXTRW()
18357 case 5: assign(d16, unop(Iop_32HIto16, mkexpr(t2))); break; in dis_PEXTRW()
18358 case 6: assign(d16, unop(Iop_32to16, mkexpr(t3))); break; in dis_PEXTRW()
18359 case 7: assign(d16, unop(Iop_32HIto16, mkexpr(t3))); break; in dis_PEXTRW()
18365 putIReg32( rE, unop(Iop_16Uto32, mkexpr(d16)) ); in dis_PEXTRW()
18456 case 0: assign( src_qword, unop(Iop_V128to64, mkexpr(xmm_vec)) ); in dis_PEXTRQ()
18458 case 1: assign( src_qword, unop(Iop_V128HIto64, mkexpr(xmm_vec)) ); in dis_PEXTRQ()
18481 return unop(Iop_64to32, unop(Iop_Ctz64, unop(Iop_32Uto64, exp))); in math_CTZ32()
18500 assign(zmaskL, unop(Iop_16Uto32, in dis_PCMPISTRI_3A()
18501 unop(Iop_GetMSBs8x16, in dis_PCMPISTRI_3A()
18504 assign(zmaskR, unop(Iop_16Uto32, in dis_PCMPISTRI_3A()
18505 unop(Iop_GetMSBs8x16, in dis_PCMPISTRI_3A()
18516 IRExpr *ctzL = unop(Iop_32to8, math_CTZ32(mkexpr(zmaskL))); in dis_PCMPISTRI_3A()
18533 IRExpr *ctzR = unop(Iop_32to8, math_CTZ32(mkexpr(zmaskR))); in dis_PCMPISTRI_3A()
18544 IRExpr *boolResII = unop(Iop_16Uto32, in dis_PCMPISTRI_3A()
18545 unop(Iop_GetMSBs8x16, in dis_PCMPISTRI_3A()
18556 IRExpr *intRes1_b = unop(Iop_Not32, binop(Iop_Or32, in dis_PCMPISTRI_3A()
18760 unop(Iop_8Uto64, mkexpr(u8)), in math_PINSRB_128()
18890 unop( Iop_32Uto64, in dis_PEXTRB_128_GtoE()
18897 storeLE( mkexpr(addr), unop(Iop_32to8, mkexpr(shr_lane) ) ); in dis_PEXTRB_128_GtoE()
19000 assign( sHi, unop(Iop_V128HIto64, mkexpr(src_masked)) ); in math_MPSADBW_128()
19001 assign( sLo, unop(Iop_V128to64, mkexpr(src_masked)) ); in math_MPSADBW_128()
19005 assign( dHi, unop(Iop_V128HIto64, mkexpr(dst_masked)) ); in math_MPSADBW_128()
19006 assign( dLo, unop(Iop_V128to64, mkexpr(dst_masked)) ); in math_MPSADBW_128()
19084 assign(t0, unop((imm8&1)? Iop_V128HIto64 : Iop_V128to64, in math_PCLMULQDQ()
19086 assign(t1, unop((imm8&16) ? Iop_V128HIto64 : Iop_V128to64, in math_PCLMULQDQ()
19477 assign( new8, unop(Iop_32to8, getIReg32(rE)) ); in dis_ESC_0F3A__SSE4()
20124 unop(Iop_32Sto64, in dis_ESC_NONE()
20134 unop(Iop_32Sto64, in dis_ESC_NONE()
20403 ? unop(Iop_64to32, mkexpr(addr)) in dis_ESC_NONE()
20484 putIRegRAX( 8, unop(Iop_32Sto64, getIRegRAX(4)) ); in dis_ESC_NONE()
20489 putIRegRAX( 4, unop(Iop_16Sto32, getIRegRAX(2)) ); in dis_ESC_NONE()
20494 putIRegRAX( 2, unop(Iop_8Sto16, getIRegRAX(1)) ); in dis_ESC_NONE()
20567 storeLE( mkexpr(t1), unop(Iop_32to16, in dis_ESC_NONE()
20568 unop(Iop_64to32,mkexpr(t5))) ); in dis_ESC_NONE()
20606 unop(Iop_64to1, in dis_ESC_NONE()
20618 unop(Iop_64to1, in dis_ESC_NONE()
20630 unop(Iop_64to1, in dis_ESC_NONE()
21126 unop(Iop_64to32, getIReg64(R_RCX)), in dis_ESC_NONE()
21169 unop(Iop_32Uto64, getIReg32(R_RCX)), in dis_ESC_NONE()
21206 assign(t1, unop(Iop_16Uto64, getIRegRDX(2))); in dis_ESC_NONE()
21213 assign(t1, unop(Iop_16Uto64, getIRegRDX(2))); in dis_ESC_NONE()
21255 assign( t1, unop(Iop_16Uto64, getIRegRDX(2)) ); in dis_ESC_NONE()
21262 assign( t1, unop(Iop_16Uto64, getIRegRDX(2)) ); in dis_ESC_NONE()
21716 putIRegRDX(4, unop(Iop_64HIto32, mkexpr(val))); in dis_ESC_0F()
21717 putIRegRAX(4, unop(Iop_64to32, mkexpr(val))); in dis_ESC_0F()
21838 assign( t1, unop(Iop_1Uto8,mk_amd64g_calculate_condition(opc-0x90)) ); in dis_ESC_0F()
22212 assign( expdHi, sz==4 ? unop(Iop_64to32, mkexpr(expdHi64)) in dis_ESC_0F()
22214 assign( expdLo, sz==4 ? unop(Iop_64to32, mkexpr(expdLo64)) in dis_ESC_0F()
22256 sz == 4 ? unop(Iop_32Uto64, mkexpr(oldHi)) in dis_ESC_0F()
22262 sz == 4 ? unop(Iop_32Uto64, mkexpr(oldLo)) in dis_ESC_0F()
22276 unop(Iop_1Uto64, mkexpr(success)), mkU64(1)), in dis_ESC_0F()
22634 assign(tSL, invertLeftArg ? unop(Iop_NotV128, getXMMReg(rSL)) in dis_VEX_NDS_128_AnySimdPfx_0F_WIG()
22739 assign( amt8, unop(Iop_64to8, mkexpr(amt)) ); in dis_AVX128_shiftV_byE()
22813 assign( amt8, unop(Iop_64to8, mkexpr(amt)) ); in dis_AVX256_shiftV_byE()
22945 unop(size == 32 ? Iop_32to8 : Iop_64to8, in dis_AVX_var_shiftV_byE()
23114 assign( epart, unop( Iop_64UtoV128, in dis_AVX128_E_V_to_G_lo64()
23168 putYMMRegLoAndZU( rG, unop(op, mkexpr(arg)) ); in dis_AVX128_E_V_to_G_lo64_unary()
23215 putYMMRegLoAndZU( rG, unop(op, mkexpr(arg)) ); in dis_AVX128_E_V_to_G_lo32_unary()
23251 assign( epart, unop( Iop_32UtoV128, in dis_AVX128_E_V_to_G_lo32()
23323 : sz == 8 ? unop( Iop_64UtoV128, loadLE(Ity_I64, mkexpr(addr))) in dis_AVX128_cmp_V_E_to_G()
23324 : /*sz==4*/ unop( Iop_32UtoV128, loadLE(Ity_I32, mkexpr(addr)))); in dis_AVX128_cmp_V_E_to_G()
23337 putYMMRegLoAndZU( rG, unop(Iop_NotV128, mkexpr(plain)) ); in dis_AVX128_cmp_V_E_to_G()
23371 unop(Iop_NotV128, mkexpr(plain)), in dis_AVX128_cmp_V_E_to_G()
23452 putYMMReg( rG, unop(Iop_NotV256, mkexpr(plain)) ); in dis_AVX256_cmp_V_E_to_G()
23524 : unop(op, mkexpr(arg)); in dis_AVX128_E_to_G_unary_all()
23553 assign(tSL, invertLeftArg ? unop(Iop_NotV256, getYMMReg(rSL)) in dis_VEX_NDS_256_AnySimdPfx_0F_WIG()
23700 putYMMReg( rG, unop(op, mkexpr(arg)) ); in dis_AVX256_E_to_G_unary_all()
23734 unop(Iop_ReinterpF64asI64, unop(Iop_I32StoF64, mkexpr(s3))), in dis_CVTDQ2PD_256()
23735 unop(Iop_ReinterpF64asI64, unop(Iop_I32StoF64, mkexpr(s2))), in dis_CVTDQ2PD_256()
23736 unop(Iop_ReinterpF64asI64, unop(Iop_I32StoF64, mkexpr(s1))), in dis_CVTDQ2PD_256()
23737 unop(Iop_ReinterpF64asI64, unop(Iop_I32StoF64, mkexpr(s0))) in dis_CVTDQ2PD_256()
23771 unop(Iop_ReinterpI64asF64, mkexpr(_t)) ) in dis_CVTPD2PS_256()
24611 unop(Iop_I32StoF64, mkexpr(arg32))); in dis_ESC_0F__VEX()
24697 unop(Iop_I32StoF64, mkexpr(arg32)) ) ); in dis_ESC_0F__VEX()
25181 unop( Iop_F32toF64, mkexpr(f32lo)) ); in dis_ESC_0F__VEX()
25663 unop( Iop_32UtoV128, getIReg32(eregOfRexRM(pfx,modrm)) ) in dis_ESC_0F__VEX()
25672 unop( Iop_32UtoV128,loadLE(Ity_I32, mkexpr(addr))) in dis_ESC_0F__VEX()
25688 unop( Iop_64UtoV128, getIReg64(eregOfRexRM(pfx,modrm)) ) in dis_ESC_0F__VEX()
25697 unop( Iop_64UtoV128,loadLE(Ity_I64, mkexpr(addr))) in dis_ESC_0F__VEX()
26414 assign( new16, unop(Iop_32to16, in dis_ESC_0F__VEX()
27452 = IRExpr_ITE( unop(Iop_64to1, in math_PERMILPD_VAR_128()
27456 = IRExpr_ITE( unop(Iop_64to1, in math_PERMILPD_VAR_128()
27666 assign(tem, unop(ops[i / 2 + j], mkexpr(vX))); in dis_FMA()
27667 x[i] = unop(Iop_64to32, mkexpr(tem)); in dis_FMA()
27668 x[i + 1] = unop(Iop_64HIto32, mkexpr(tem)); in dis_FMA()
27670 assign(tem, unop(ops[i / 2 + j], mkexpr(vY))); in dis_FMA()
27671 y[i] = unop(Iop_64to32, mkexpr(tem)); in dis_FMA()
27672 y[i + 1] = unop(Iop_64HIto32, mkexpr(tem)); in dis_FMA()
27674 assign(tem, unop(ops[i / 2 + j], mkexpr(vZ))); in dis_FMA()
27675 z[i] = unop(Iop_64to32, mkexpr(tem)); in dis_FMA()
27676 z[i + 1] = unop(Iop_64HIto32, mkexpr(tem)); in dis_FMA()
27682 x[i] = unop(ops[i + j], mkexpr(vX)); in dis_FMA()
27683 y[i] = unop(ops[i + j], mkexpr(vY)); in dis_FMA()
27684 z[i] = unop(ops[i + j], mkexpr(vZ)); in dis_FMA()
27691 x[i] = unop(op, x[i]); in dis_FMA()
27692 y[i] = unop(op, y[i]); in dis_FMA()
27693 z[i] = unop(op, z[i]); in dis_FMA()
27697 z[i] = unop(ty == Ity_F64 ? Iop_NegF64 : Iop_NegF32, z[i]); in dis_FMA()
27701 x[i] = unop(ty == Ity_F64 ? Iop_NegF64 : Iop_NegF32, x[i]); in dis_FMA()
27844 : unop(Iop_32Sto64, getYMMRegLane32( rI, i )); in dis_VGATHER()
29214 assign(t8, unop(Iop_32to8, getXMMRegLane32(rE, 0))); in dis_ESC_0F38__VEX()
29241 assign(t8, unop(Iop_32to8, getXMMRegLane32(rE, 0))); in dis_ESC_0F38__VEX()
29272 assign(t16, unop(Iop_32to16, getXMMRegLane32(rE, 0))); in dis_ESC_0F38__VEX()
29297 assign(t16, unop(Iop_32to16, getXMMRegLane32(rE, 0))); in dis_ESC_0F38__VEX()
29682 unop( mkSizedOp(ty,Iop_Not8), mkexpr(src1) ), in dis_ESC_0F38__VEX()
29829 unop(Iop_8Uto32, mkexpr(start)), in dis_ESC_0F38__VEX()
29967 unop(size == 8 ? Iop_128to64 : Iop_64to32, mkexpr(res)) ); in dis_ESC_0F38__VEX()
29969 unop(size == 8 ? Iop_128HIto64 : Iop_64HIto32, in dis_ESC_0F38__VEX()
30024 assign( start, unop( Iop_16to8, mkexpr(stle) ) ); in dis_ESC_0F38__VEX()
30025 assign( len, unop( Iop_16HIto8, mkexpr(stle) ) ); in dis_ESC_0F38__VEX()
30041 unop(Iop_8Uto32, mkexpr(start)), in dis_ESC_0F38__VEX()
30042 unop(Iop_8Uto32, mkexpr(len))), in dis_ESC_0F38__VEX()
30058 unop(Iop_8Uto32, mkexpr(start)), in dis_ESC_0F38__VEX()
30344 assign(s1, unop(Iop_V128HIto64, mkexpr(sV))); in dis_ESC_0F3A__VEX()
30345 assign(s0, unop(Iop_V128to64, mkexpr(sV))); in dis_ESC_0F3A__VEX()
30476 unop(Iop_ReinterpI32asF32, mkexpr(s))) in dis_ESC_0F3A__VEX()
30527 unop(Iop_ReinterpI32asF32, mkexpr(s))) in dis_ESC_0F3A__VEX()
30579 unop(Iop_ReinterpI64asF64, mkexpr(s))) in dis_ESC_0F3A__VEX()
30624 unop(Iop_ReinterpI64asF64, mkexpr(s))) in dis_ESC_0F3A__VEX()
31090 assign( src_u8, unop(Iop_32to8, getIReg32( rE )) ); in dis_ESC_0F3A__VEX()