Lines Matching refs:mce

550 static IRExpr* expr2vbits ( struct _MCEnv* mce, IRExpr* e );
610 static IRTemp findShadowTmp ( MCEnv* mce, IRTemp orig ) in findShadowTmp() argument
612 tl_assert(orig < mce->n_originalTmps); in findShadowTmp()
613 if (mce->tmpMap[orig] == IRTemp_INVALID) { in findShadowTmp()
614 mce->tmpMap[orig] in findShadowTmp()
615 = newIRTemp(mce->bb->tyenv, in findShadowTmp()
616 shadowType(mce->bb->tyenv->types[orig])); in findShadowTmp()
618 return mce->tmpMap[orig]; in findShadowTmp()
627 static void newShadowTmp ( MCEnv* mce, IRTemp orig ) in newShadowTmp() argument
629 tl_assert(orig < mce->n_originalTmps); in newShadowTmp()
630 mce->tmpMap[orig] in newShadowTmp()
631 = newIRTemp(mce->bb->tyenv, in newShadowTmp()
632 shadowType(mce->bb->tyenv->types[orig])); in newShadowTmp()
651 static Bool isOriginalAtom ( MCEnv* mce, IRAtom* a1 ) in isOriginalAtom() argument
655 if (a1->tag == Iex_RdTmp && a1->Iex.RdTmp.tmp < mce->n_originalTmps) in isOriginalAtom()
662 static Bool isShadowAtom ( MCEnv* mce, IRAtom* a1 ) in isShadowAtom() argument
666 if (a1->tag == Iex_RdTmp && a1->Iex.RdTmp.tmp >= mce->n_originalTmps) in isShadowAtom()
748 static IRAtom* assignNew ( MCEnv* mce, IRType ty, IRExpr* e ) { in assignNew() argument
749 IRTemp t = newIRTemp(mce->bb->tyenv, ty); in assignNew()
750 assign(mce->bb, t, e); in assignNew()
761 static IRAtom* mkDifD8 ( MCEnv* mce, IRAtom* a1, IRAtom* a2 ) { in mkDifD8() argument
762 tl_assert(isShadowAtom(mce,a1)); in mkDifD8()
763 tl_assert(isShadowAtom(mce,a2)); in mkDifD8()
764 return assignNew(mce, Ity_I8, binop(Iop_And8, a1, a2)); in mkDifD8()
767 static IRAtom* mkDifD16 ( MCEnv* mce, IRAtom* a1, IRAtom* a2 ) { in mkDifD16() argument
768 tl_assert(isShadowAtom(mce,a1)); in mkDifD16()
769 tl_assert(isShadowAtom(mce,a2)); in mkDifD16()
770 return assignNew(mce, Ity_I16, binop(Iop_And16, a1, a2)); in mkDifD16()
773 static IRAtom* mkDifD32 ( MCEnv* mce, IRAtom* a1, IRAtom* a2 ) { in mkDifD32() argument
774 tl_assert(isShadowAtom(mce,a1)); in mkDifD32()
775 tl_assert(isShadowAtom(mce,a2)); in mkDifD32()
776 return assignNew(mce, Ity_I32, binop(Iop_And32, a1, a2)); in mkDifD32()
779 static IRAtom* mkDifD64 ( MCEnv* mce, IRAtom* a1, IRAtom* a2 ) { in mkDifD64() argument
780 tl_assert(isShadowAtom(mce,a1)); in mkDifD64()
781 tl_assert(isShadowAtom(mce,a2)); in mkDifD64()
782 return assignNew(mce, Ity_I64, binop(Iop_And64, a1, a2)); in mkDifD64()
785 static IRAtom* mkDifDV128 ( MCEnv* mce, IRAtom* a1, IRAtom* a2 ) { in mkDifDV128() argument
786 tl_assert(isShadowAtom(mce,a1)); in mkDifDV128()
787 tl_assert(isShadowAtom(mce,a2)); in mkDifDV128()
788 return assignNew(mce, Ity_V128, binop(Iop_AndV128, a1, a2)); in mkDifDV128()
793 static IRAtom* mkUifU8 ( MCEnv* mce, IRAtom* a1, IRAtom* a2 ) { in mkUifU8() argument
794 tl_assert(isShadowAtom(mce,a1)); in mkUifU8()
795 tl_assert(isShadowAtom(mce,a2)); in mkUifU8()
796 return assignNew(mce, Ity_I8, binop(Iop_Or8, a1, a2)); in mkUifU8()
799 static IRAtom* mkUifU16 ( MCEnv* mce, IRAtom* a1, IRAtom* a2 ) { in mkUifU16() argument
800 tl_assert(isShadowAtom(mce,a1)); in mkUifU16()
801 tl_assert(isShadowAtom(mce,a2)); in mkUifU16()
802 return assignNew(mce, Ity_I16, binop(Iop_Or16, a1, a2)); in mkUifU16()
805 static IRAtom* mkUifU32 ( MCEnv* mce, IRAtom* a1, IRAtom* a2 ) { in mkUifU32() argument
806 tl_assert(isShadowAtom(mce,a1)); in mkUifU32()
807 tl_assert(isShadowAtom(mce,a2)); in mkUifU32()
808 return assignNew(mce, Ity_I32, binop(Iop_Or32, a1, a2)); in mkUifU32()
811 static IRAtom* mkUifU64 ( MCEnv* mce, IRAtom* a1, IRAtom* a2 ) { in mkUifU64() argument
812 tl_assert(isShadowAtom(mce,a1)); in mkUifU64()
813 tl_assert(isShadowAtom(mce,a2)); in mkUifU64()
814 return assignNew(mce, Ity_I64, binop(Iop_Or64, a1, a2)); in mkUifU64()
817 static IRAtom* mkUifUV128 ( MCEnv* mce, IRAtom* a1, IRAtom* a2 ) { in mkUifUV128() argument
818 tl_assert(isShadowAtom(mce,a1)); in mkUifUV128()
819 tl_assert(isShadowAtom(mce,a2)); in mkUifUV128()
820 return assignNew(mce, Ity_V128, binop(Iop_OrV128, a1, a2)); in mkUifUV128()
823 static IRAtom* mkUifU ( MCEnv* mce, IRType vty, IRAtom* a1, IRAtom* a2 ) { in mkUifU() argument
825 case Ity_I8: return mkUifU8(mce, a1, a2); in mkUifU()
826 case Ity_I16: return mkUifU16(mce, a1, a2); in mkUifU()
827 case Ity_I32: return mkUifU32(mce, a1, a2); in mkUifU()
828 case Ity_I64: return mkUifU64(mce, a1, a2); in mkUifU()
829 case Ity_V128: return mkUifUV128(mce, a1, a2); in mkUifU()
838 static IRAtom* mkLeft8 ( MCEnv* mce, IRAtom* a1 ) { in mkLeft8() argument
839 tl_assert(isShadowAtom(mce,a1)); in mkLeft8()
841 return assignNew(mce, Ity_I8, in mkLeft8()
843 assignNew(mce, Ity_I8, in mkLeft8()
848 static IRAtom* mkLeft16 ( MCEnv* mce, IRAtom* a1 ) { in mkLeft16() argument
849 tl_assert(isShadowAtom(mce,a1)); in mkLeft16()
851 return assignNew(mce, Ity_I16, in mkLeft16()
853 assignNew(mce, Ity_I16, in mkLeft16()
858 static IRAtom* mkLeft32 ( MCEnv* mce, IRAtom* a1 ) { in mkLeft32() argument
859 tl_assert(isShadowAtom(mce,a1)); in mkLeft32()
861 return assignNew(mce, Ity_I32, in mkLeft32()
863 assignNew(mce, Ity_I32, in mkLeft32()
873 static IRAtom* mkImproveAND8 ( MCEnv* mce, IRAtom* data, IRAtom* vbits ) in mkImproveAND8() argument
875 tl_assert(isOriginalAtom(mce, data)); in mkImproveAND8()
876 tl_assert(isShadowAtom(mce, vbits)); in mkImproveAND8()
878 return assignNew(mce, Ity_I8, binop(Iop_Or8, data, vbits)); in mkImproveAND8()
881 static IRAtom* mkImproveAND16 ( MCEnv* mce, IRAtom* data, IRAtom* vbits ) in mkImproveAND16() argument
883 tl_assert(isOriginalAtom(mce, data)); in mkImproveAND16()
884 tl_assert(isShadowAtom(mce, vbits)); in mkImproveAND16()
886 return assignNew(mce, Ity_I16, binop(Iop_Or16, data, vbits)); in mkImproveAND16()
889 static IRAtom* mkImproveAND32 ( MCEnv* mce, IRAtom* data, IRAtom* vbits ) in mkImproveAND32() argument
891 tl_assert(isOriginalAtom(mce, data)); in mkImproveAND32()
892 tl_assert(isShadowAtom(mce, vbits)); in mkImproveAND32()
894 return assignNew(mce, Ity_I32, binop(Iop_Or32, data, vbits)); in mkImproveAND32()
897 static IRAtom* mkImproveAND64 ( MCEnv* mce, IRAtom* data, IRAtom* vbits ) in mkImproveAND64() argument
899 tl_assert(isOriginalAtom(mce, data)); in mkImproveAND64()
900 tl_assert(isShadowAtom(mce, vbits)); in mkImproveAND64()
902 return assignNew(mce, Ity_I64, binop(Iop_Or64, data, vbits)); in mkImproveAND64()
905 static IRAtom* mkImproveANDV128 ( MCEnv* mce, IRAtom* data, IRAtom* vbits ) in mkImproveANDV128() argument
907 tl_assert(isOriginalAtom(mce, data)); in mkImproveANDV128()
908 tl_assert(isShadowAtom(mce, vbits)); in mkImproveANDV128()
910 return assignNew(mce, Ity_V128, binop(Iop_OrV128, data, vbits)); in mkImproveANDV128()
916 static IRAtom* mkImproveOR8 ( MCEnv* mce, IRAtom* data, IRAtom* vbits ) in mkImproveOR8() argument
918 tl_assert(isOriginalAtom(mce, data)); in mkImproveOR8()
919 tl_assert(isShadowAtom(mce, vbits)); in mkImproveOR8()
922 mce, Ity_I8, in mkImproveOR8()
924 assignNew(mce, Ity_I8, unop(Iop_Not8, data)), in mkImproveOR8()
928 static IRAtom* mkImproveOR16 ( MCEnv* mce, IRAtom* data, IRAtom* vbits ) in mkImproveOR16() argument
930 tl_assert(isOriginalAtom(mce, data)); in mkImproveOR16()
931 tl_assert(isShadowAtom(mce, vbits)); in mkImproveOR16()
934 mce, Ity_I16, in mkImproveOR16()
936 assignNew(mce, Ity_I16, unop(Iop_Not16, data)), in mkImproveOR16()
940 static IRAtom* mkImproveOR32 ( MCEnv* mce, IRAtom* data, IRAtom* vbits ) in mkImproveOR32() argument
942 tl_assert(isOriginalAtom(mce, data)); in mkImproveOR32()
943 tl_assert(isShadowAtom(mce, vbits)); in mkImproveOR32()
946 mce, Ity_I32, in mkImproveOR32()
948 assignNew(mce, Ity_I32, unop(Iop_Not32, data)), in mkImproveOR32()
952 static IRAtom* mkImproveOR64 ( MCEnv* mce, IRAtom* data, IRAtom* vbits ) in mkImproveOR64() argument
954 tl_assert(isOriginalAtom(mce, data)); in mkImproveOR64()
955 tl_assert(isShadowAtom(mce, vbits)); in mkImproveOR64()
958 mce, Ity_I64, in mkImproveOR64()
960 assignNew(mce, Ity_I64, unop(Iop_Not64, data)), in mkImproveOR64()
964 static IRAtom* mkImproveORV128 ( MCEnv* mce, IRAtom* data, IRAtom* vbits ) in mkImproveORV128() argument
966 tl_assert(isOriginalAtom(mce, data)); in mkImproveORV128()
967 tl_assert(isShadowAtom(mce, vbits)); in mkImproveORV128()
970 mce, Ity_V128, in mkImproveORV128()
972 assignNew(mce, Ity_V128, unop(Iop_NotV128, data)), in mkImproveORV128()
978 static IRAtom* mkPCastTo( MCEnv* mce, IRType dst_ty, IRAtom* vbits ) in mkPCastTo() argument
984 tl_assert(isShadowAtom(mce,vbits)); in mkPCastTo()
985 ty = typeOfIRExpr(mce->bb->tyenv, vbits); in mkPCastTo()
992 tmp1 = assignNew(mce, Ity_I1, binop(Iop_CmpNE8, vbits, mkU8(0))); in mkPCastTo()
995 tmp1 = assignNew(mce, Ity_I1, binop(Iop_CmpNE16, vbits, mkU16(0))); in mkPCastTo()
998 tmp1 = assignNew(mce, Ity_I1, binop(Iop_CmpNE32, vbits, mkU32(0))); in mkPCastTo()
1001 tmp1 = assignNew(mce, Ity_I1, binop(Iop_CmpNE64, vbits, mkU64(0))); in mkPCastTo()
1012 return assignNew(mce, Ity_I8, unop(Iop_1Sto8, tmp1)); in mkPCastTo()
1014 return assignNew(mce, Ity_I16, unop(Iop_1Sto16, tmp1)); in mkPCastTo()
1016 return assignNew(mce, Ity_I32, unop(Iop_1Sto32, tmp1)); in mkPCastTo()
1018 return assignNew(mce, Ity_I64, unop(Iop_1Sto64, tmp1)); in mkPCastTo()
1020 tmp1 = assignNew(mce, Ity_I64, unop(Iop_1Sto64, tmp1)); in mkPCastTo()
1021 tmp1 = assignNew(mce, Ity_V128, binop(Iop_64HLtoV128, tmp1, tmp1)); in mkPCastTo()
1039 static void setHelperAnns ( MCEnv* mce, IRDirty* di ) { in setHelperAnns() argument
1042 di->fxState[0].offset = mce->layout->offset_SP; in setHelperAnns()
1043 di->fxState[0].size = mce->layout->sizeof_SP; in setHelperAnns()
1045 di->fxState[1].offset = mce->layout->offset_IP; in setHelperAnns()
1046 di->fxState[1].size = mce->layout->sizeof_IP; in setHelperAnns()
1060 static void complainIfUndefined ( MCEnv* mce, IRAtom* atom ) in complainIfUndefined() argument
1072 tl_assert(isOriginalAtom(mce, atom)); in complainIfUndefined()
1073 vatom = expr2vbits( mce, atom ); in complainIfUndefined()
1074 tl_assert(isShadowAtom(mce, vatom)); in complainIfUndefined()
1077 ty = typeOfIRExpr(mce->bb->tyenv, vatom); in complainIfUndefined()
1082 cond = mkPCastTo( mce, Ity_I1, vatom ); in complainIfUndefined()
1116 setHelperAnns( mce, di ); in complainIfUndefined()
1117 stmt( mce->bb, IRStmt_Dirty(di)); in complainIfUndefined()
1126 newShadowTmp(mce, atom->Iex.RdTmp.tmp); in complainIfUndefined()
1127 assign(mce->bb, findShadowTmp(mce, atom->Iex.RdTmp.tmp), in complainIfUndefined()
1142 static Bool isAlwaysDefd ( MCEnv* mce, Int offset, Int size ) in isAlwaysDefd() argument
1150 for (i = 0; i < mce->layout->n_alwaysDefd; i++) { in isAlwaysDefd()
1151 minoffD = mce->layout->alwaysDefd[i].offset; in isAlwaysDefd()
1152 maxoffD = minoffD + mce->layout->alwaysDefd[i].size - 1; in isAlwaysDefd()
1174 void do_shadow_PUT ( MCEnv* mce, Int offset, in do_shadow_PUT() argument
1180 tl_assert(isOriginalAtom(mce, atom)); in do_shadow_PUT()
1181 vatom = expr2vbits( mce, atom ); in do_shadow_PUT()
1184 tl_assert(isShadowAtom(mce, vatom)); in do_shadow_PUT()
1187 ty = typeOfIRExpr(mce->bb->tyenv, vatom); in do_shadow_PUT()
1189 if (isAlwaysDefd(mce, offset, sizeofIRType(ty))) { in do_shadow_PUT()
1195 stmt( mce->bb, IRStmt_Put( offset + mce->layout->total_sizeB, vatom ) ); in do_shadow_PUT()
1204 void do_shadow_PUTI ( MCEnv* mce, in do_shadow_PUTI() argument
1211 tl_assert(isOriginalAtom(mce,atom)); in do_shadow_PUTI()
1212 vatom = expr2vbits( mce, atom ); in do_shadow_PUTI()
1218 tl_assert(isOriginalAtom(mce,ix)); in do_shadow_PUTI()
1219 complainIfUndefined(mce,ix); in do_shadow_PUTI()
1220 if (isAlwaysDefd(mce, descr->base, arrSize)) { in do_shadow_PUTI()
1228 = mkIRRegArray( descr->base + mce->layout->total_sizeB, in do_shadow_PUTI()
1230 stmt( mce->bb, IRStmt_PutI( mkIRPutI( new_descr, ix, bias, vatom ) )); in do_shadow_PUTI()
1239 IRExpr* shadow_GET ( MCEnv* mce, Int offset, IRType ty ) in shadow_GET() argument
1243 if (isAlwaysDefd(mce, offset, sizeofIRType(ty))) { in shadow_GET()
1249 return IRExpr_Get( offset + mce->layout->total_sizeB, tyS ); in shadow_GET()
1258 IRExpr* shadow_GETI ( MCEnv* mce, IRRegArray* descr, IRAtom* ix, Int bias ) in shadow_GETI() argument
1264 tl_assert(isOriginalAtom(mce,ix)); in shadow_GETI()
1265 complainIfUndefined(mce,ix); in shadow_GETI()
1266 if (isAlwaysDefd(mce, descr->base, arrSize)) { in shadow_GETI()
1273 = mkIRRegArray( descr->base + mce->layout->total_sizeB, in shadow_GETI()
1289 IRAtom* mkLazy2 ( MCEnv* mce, IRType finalVty, IRAtom* va1, IRAtom* va2 ) in mkLazy2() argument
1293 tl_assert(isShadowAtom(mce,va1)); in mkLazy2()
1294 tl_assert(isShadowAtom(mce,va2)); in mkLazy2()
1295 at = mkPCastTo(mce, Ity_I32, va1); in mkLazy2()
1296 at = mkUifU(mce, Ity_I32, at, mkPCastTo(mce, Ity_I32, va2)); in mkLazy2()
1297 at = mkPCastTo(mce, finalVty, at); in mkLazy2()
1308 IRAtom* mkLazyN ( MCEnv* mce, in mkLazyN() argument
1316 tl_assert(isOriginalAtom(mce, exprvec[i])); in mkLazyN()
1326 here = mkPCastTo( mce, Ity_I32, expr2vbits(mce, exprvec[i]) ); in mkLazyN()
1327 curr = mkUifU32(mce, here, curr); in mkLazyN()
1330 return mkPCastTo(mce, finalVtype, curr ); in mkLazyN()
1341 IRAtom* expensiveAdd32 ( MCEnv* mce, IRAtom* qaa, IRAtom* qbb, in expensiveAdd32() argument
1348 tl_assert(isShadowAtom(mce,qaa)); in expensiveAdd32()
1349 tl_assert(isShadowAtom(mce,qbb)); in expensiveAdd32()
1350 tl_assert(isOriginalAtom(mce,aa)); in expensiveAdd32()
1351 tl_assert(isOriginalAtom(mce,bb)); in expensiveAdd32()
1363 a_min = assignNew(mce,ty, in expensiveAdd32()
1365 assignNew(mce,ty, unop(opNOT, qaa)))); in expensiveAdd32()
1368 b_min = assignNew(mce,ty, in expensiveAdd32()
1370 assignNew(mce,ty, unop(opNOT, qbb)))); in expensiveAdd32()
1373 a_max = assignNew(mce,ty, binop(opOR, aa, qaa)); in expensiveAdd32()
1376 b_max = assignNew(mce,ty, binop(opOR, bb, qbb)); in expensiveAdd32()
1380 assignNew(mce,ty, in expensiveAdd32()
1382 assignNew(mce,ty, binop(opOR, qaa, qbb)), in expensiveAdd32()
1383 assignNew(mce,ty, in expensiveAdd32()
1384 binop(opXOR, assignNew(mce,ty, binop(opADD, a_min, b_min)), in expensiveAdd32()
1385 assignNew(mce,ty, binop(opADD, a_max, b_max)) in expensiveAdd32()
1399 static IRAtom* mkPCast8x16 ( MCEnv* mce, IRAtom* at ) in mkPCast8x16() argument
1401 return assignNew(mce, Ity_V128, unop(Iop_CmpNEZ8x16, at)); in mkPCast8x16()
1404 static IRAtom* mkPCast16x8 ( MCEnv* mce, IRAtom* at ) in mkPCast16x8() argument
1406 return assignNew(mce, Ity_V128, unop(Iop_CmpNEZ16x8, at)); in mkPCast16x8()
1409 static IRAtom* mkPCast32x4 ( MCEnv* mce, IRAtom* at ) in mkPCast32x4() argument
1411 return assignNew(mce, Ity_V128, unop(Iop_CmpNEZ32x4, at)); in mkPCast32x4()
1414 static IRAtom* mkPCast64x2 ( MCEnv* mce, IRAtom* at ) in mkPCast64x2() argument
1416 return assignNew(mce, Ity_V128, unop(Iop_CmpNEZ64x2, at)); in mkPCast64x2()
1457 IRAtom* binary32Fx4 ( MCEnv* mce, IRAtom* vatomX, IRAtom* vatomY ) in binary32Fx4() argument
1460 tl_assert(isShadowAtom(mce, vatomX)); in binary32Fx4()
1461 tl_assert(isShadowAtom(mce, vatomY)); in binary32Fx4()
1462 at = mkUifUV128(mce, vatomX, vatomY); in binary32Fx4()
1463 at = assignNew(mce, Ity_V128, mkPCast32x4(mce, at)); in binary32Fx4()
1468 IRAtom* unary32Fx4 ( MCEnv* mce, IRAtom* vatomX ) in unary32Fx4() argument
1471 tl_assert(isShadowAtom(mce, vatomX)); in unary32Fx4()
1472 at = assignNew(mce, Ity_V128, mkPCast32x4(mce, vatomX)); in unary32Fx4()
1477 IRAtom* binary32F0x4 ( MCEnv* mce, IRAtom* vatomX, IRAtom* vatomY ) in binary32F0x4() argument
1480 tl_assert(isShadowAtom(mce, vatomX)); in binary32F0x4()
1481 tl_assert(isShadowAtom(mce, vatomY)); in binary32F0x4()
1482 at = mkUifUV128(mce, vatomX, vatomY); in binary32F0x4()
1483 at = assignNew(mce, Ity_I32, unop(Iop_V128to32, at)); in binary32F0x4()
1484 at = mkPCastTo(mce, Ity_I32, at); in binary32F0x4()
1485 at = assignNew(mce, Ity_V128, binop(Iop_SetV128lo32, vatomX, at)); in binary32F0x4()
1490 IRAtom* unary32F0x4 ( MCEnv* mce, IRAtom* vatomX ) in unary32F0x4() argument
1493 tl_assert(isShadowAtom(mce, vatomX)); in unary32F0x4()
1494 at = assignNew(mce, Ity_I32, unop(Iop_V128to32, vatomX)); in unary32F0x4()
1495 at = mkPCastTo(mce, Ity_I32, at); in unary32F0x4()
1496 at = assignNew(mce, Ity_V128, binop(Iop_SetV128lo32, vatomX, at)); in unary32F0x4()
1503 IRAtom* binary64Fx2 ( MCEnv* mce, IRAtom* vatomX, IRAtom* vatomY ) in binary64Fx2() argument
1506 tl_assert(isShadowAtom(mce, vatomX)); in binary64Fx2()
1507 tl_assert(isShadowAtom(mce, vatomY)); in binary64Fx2()
1508 at = mkUifUV128(mce, vatomX, vatomY); in binary64Fx2()
1509 at = assignNew(mce, Ity_V128, mkPCast64x2(mce, at)); in binary64Fx2()
1514 IRAtom* unary64Fx2 ( MCEnv* mce, IRAtom* vatomX ) in unary64Fx2() argument
1517 tl_assert(isShadowAtom(mce, vatomX)); in unary64Fx2()
1518 at = assignNew(mce, Ity_V128, mkPCast64x2(mce, vatomX)); in unary64Fx2()
1523 IRAtom* binary64F0x2 ( MCEnv* mce, IRAtom* vatomX, IRAtom* vatomY ) in binary64F0x2() argument
1526 tl_assert(isShadowAtom(mce, vatomX)); in binary64F0x2()
1527 tl_assert(isShadowAtom(mce, vatomY)); in binary64F0x2()
1528 at = mkUifUV128(mce, vatomX, vatomY); in binary64F0x2()
1529 at = assignNew(mce, Ity_I64, unop(Iop_V128to64, at)); in binary64F0x2()
1530 at = mkPCastTo(mce, Ity_I64, at); in binary64F0x2()
1531 at = assignNew(mce, Ity_V128, binop(Iop_SetV128lo64, vatomX, at)); in binary64F0x2()
1536 IRAtom* unary64F0x2 ( MCEnv* mce, IRAtom* vatomX ) in unary64F0x2() argument
1539 tl_assert(isShadowAtom(mce, vatomX)); in unary64F0x2()
1540 at = assignNew(mce, Ity_I64, unop(Iop_V128to64, vatomX)); in unary64F0x2()
1541 at = mkPCastTo(mce, Ity_I64, at); in unary64F0x2()
1542 at = assignNew(mce, Ity_V128, binop(Iop_SetV128lo64, vatomX, at)); in unary64F0x2()
1575 IRAtom* vectorNarrowV128 ( MCEnv* mce, IROp narrow_op, in vectorNarrowV128() argument
1586 tl_assert(isShadowAtom(mce,vatom1)); in vectorNarrowV128()
1587 tl_assert(isShadowAtom(mce,vatom2)); in vectorNarrowV128()
1588 at1 = assignNew(mce, Ity_V128, pcast(mce, vatom1)); in vectorNarrowV128()
1589 at2 = assignNew(mce, Ity_V128, pcast(mce, vatom2)); in vectorNarrowV128()
1590 at3 = assignNew(mce, Ity_V128, binop(narrow_op, at1, at2)); in vectorNarrowV128()
1599 IRAtom* binary8Ix16 ( MCEnv* mce, IRAtom* vatom1, IRAtom* vatom2 ) in binary8Ix16() argument
1602 at = mkUifUV128(mce, vatom1, vatom2); in binary8Ix16()
1603 at = mkPCast8x16(mce, at); in binary8Ix16()
1608 IRAtom* binary16Ix8 ( MCEnv* mce, IRAtom* vatom1, IRAtom* vatom2 ) in binary16Ix8() argument
1611 at = mkUifUV128(mce, vatom1, vatom2); in binary16Ix8()
1612 at = mkPCast16x8(mce, at); in binary16Ix8()
1617 IRAtom* binary32Ix4 ( MCEnv* mce, IRAtom* vatom1, IRAtom* vatom2 ) in binary32Ix4() argument
1620 at = mkUifUV128(mce, vatom1, vatom2); in binary32Ix4()
1621 at = mkPCast32x4(mce, at); in binary32Ix4()
1626 IRAtom* binary64Ix2 ( MCEnv* mce, IRAtom* vatom1, IRAtom* vatom2 ) in binary64Ix2() argument
1629 at = mkUifUV128(mce, vatom1, vatom2); in binary64Ix2()
1630 at = mkPCast64x2(mce, at); in binary64Ix2()
1640 IRAtom* expr2vbits_Binop ( MCEnv* mce, in expr2vbits_Binop() argument
1649 IRAtom* vatom1 = expr2vbits( mce, atom1 ); in expr2vbits_Binop()
1650 IRAtom* vatom2 = expr2vbits( mce, atom2 ); in expr2vbits_Binop()
1652 tl_assert(isOriginalAtom(mce,atom1)); in expr2vbits_Binop()
1653 tl_assert(isOriginalAtom(mce,atom2)); in expr2vbits_Binop()
1654 tl_assert(isShadowAtom(mce,vatom1)); in expr2vbits_Binop()
1655 tl_assert(isShadowAtom(mce,vatom2)); in expr2vbits_Binop()
1671 complainIfUndefined(mce, atom2); in expr2vbits_Binop()
1672 return assignNew(mce, Ity_V128, binop(op, vatom1, atom2)); in expr2vbits_Binop()
1685 return binary8Ix16(mce, vatom1, vatom2); in expr2vbits_Binop()
1701 return binary16Ix8(mce, vatom1, vatom2); in expr2vbits_Binop()
1711 return binary32Ix4(mce, vatom1, vatom2); in expr2vbits_Binop()
1719 return binary64Ix2(mce, vatom1, vatom2); in expr2vbits_Binop()
1724 return vectorNarrowV128(mce, op, vatom1, vatom2); in expr2vbits_Binop()
1735 return binary64Fx2(mce, vatom1, vatom2); in expr2vbits_Binop()
1746 return binary64F0x2(mce, vatom1, vatom2); in expr2vbits_Binop()
1759 return binary32Fx4(mce, vatom1, vatom2); in expr2vbits_Binop()
1770 return binary32F0x4(mce, vatom1, vatom2); in expr2vbits_Binop()
1784 return assignNew(mce, Ity_V128, binop(op, vatom1, vatom2)); in expr2vbits_Binop()
1793 return mkLazy2(mce, Ity_I64, vatom1, vatom2); in expr2vbits_Binop()
1800 return mkLazy2(mce, Ity_I32, vatom1, vatom2); in expr2vbits_Binop()
1804 return mkLazy2(mce, Ity_I16, vatom1, vatom2); in expr2vbits_Binop()
1815 return mkLazy2(mce, Ity_I64, vatom1, vatom2); in expr2vbits_Binop()
1818 return mkLazy2(mce, Ity_I32, vatom1, vatom2); in expr2vbits_Binop()
1824 return mkLazy2(mce, Ity_I64, vatom1, vatom2); in expr2vbits_Binop()
1827 return assignNew(mce, Ity_I32, binop(op, vatom1, vatom2)); in expr2vbits_Binop()
1829 return assignNew(mce, Ity_I64, binop(op, vatom1, vatom2)); in expr2vbits_Binop()
1833 IRAtom* vLo32 = mkLeft32(mce, mkUifU32(mce, vatom1,vatom2)); in expr2vbits_Binop()
1834 IRAtom* vHi32 = mkPCastTo(mce, Ity_I32, vLo32); in expr2vbits_Binop()
1835 return assignNew(mce, Ity_I64, binop(Iop_32HLto64, vHi32, vLo32)); in expr2vbits_Binop()
1840 IRAtom* vLo16 = mkLeft16(mce, mkUifU16(mce, vatom1,vatom2)); in expr2vbits_Binop()
1841 IRAtom* vHi16 = mkPCastTo(mce, Ity_I16, vLo16); in expr2vbits_Binop()
1842 return assignNew(mce, Ity_I32, binop(Iop_16HLto32, vHi16, vLo16)); in expr2vbits_Binop()
1847 IRAtom* vLo8 = mkLeft8(mce, mkUifU8(mce, vatom1,vatom2)); in expr2vbits_Binop()
1848 IRAtom* vHi8 = mkPCastTo(mce, Ity_I8, vLo8); in expr2vbits_Binop()
1849 return assignNew(mce, Ity_I16, binop(Iop_8HLto16, vHi8, vLo8)); in expr2vbits_Binop()
1854 return expensiveAdd32(mce, vatom1,vatom2, atom1,atom2); in expr2vbits_Binop()
1858 return mkLeft32(mce, mkUifU32(mce, vatom1,vatom2)); in expr2vbits_Binop()
1863 return mkLeft16(mce, mkUifU16(mce, vatom1,vatom2)); in expr2vbits_Binop()
1867 return mkLeft8(mce, mkUifU8(mce, vatom1,vatom2)); in expr2vbits_Binop()
1872 return mkPCastTo(mce, Ity_I1, mkUifU32(mce, vatom1,vatom2)); in expr2vbits_Binop()
1875 return mkPCastTo(mce, Ity_I1, mkUifU16(mce, vatom1,vatom2)); in expr2vbits_Binop()
1878 return mkPCastTo(mce, Ity_I1, mkUifU8(mce, vatom1,vatom2)); in expr2vbits_Binop()
1883 complainIfUndefined(mce, atom2); in expr2vbits_Binop()
1884 return assignNew(mce, Ity_I32, binop(op, vatom1, atom2)); in expr2vbits_Binop()
1888 complainIfUndefined(mce, atom2); in expr2vbits_Binop()
1889 return assignNew(mce, Ity_I16, binop(op, vatom1, atom2)); in expr2vbits_Binop()
1893 complainIfUndefined(mce, atom2); in expr2vbits_Binop()
1894 return assignNew(mce, Ity_I8, binop(op, vatom1, atom2)); in expr2vbits_Binop()
1898 complainIfUndefined(mce, atom2); in expr2vbits_Binop()
1899 return assignNew(mce, Ity_I64, binop(op, vatom1, atom2)); in expr2vbits_Binop()
1936 mce, in expr2vbits_Binop()
1938 difd(mce, uifu(mce, vatom1, vatom2), in expr2vbits_Binop()
1939 difd(mce, improve(mce, atom1, vatom1), in expr2vbits_Binop()
1940 improve(mce, atom2, vatom2) ) ) ); in expr2vbits_Binop()
1943 return mkUifU8(mce, vatom1, vatom2); in expr2vbits_Binop()
1945 return mkUifU16(mce, vatom1, vatom2); in expr2vbits_Binop()
1947 return mkUifU32(mce, vatom1, vatom2); in expr2vbits_Binop()
1949 return mkUifU64(mce, vatom1, vatom2); in expr2vbits_Binop()
1951 return mkUifUV128(mce, vatom1, vatom2); in expr2vbits_Binop()
1961 IRExpr* expr2vbits_Unop ( MCEnv* mce, IROp op, IRAtom* atom ) in expr2vbits_Unop() argument
1963 IRAtom* vatom = expr2vbits( mce, atom ); in expr2vbits_Unop()
1964 tl_assert(isOriginalAtom(mce,atom)); in expr2vbits_Unop()
1968 return unary64Fx2(mce, vatom); in expr2vbits_Unop()
1971 return unary64F0x2(mce, vatom); in expr2vbits_Unop()
1975 return unary32Fx4(mce, vatom); in expr2vbits_Unop()
1980 return unary32F0x4(mce, vatom); in expr2vbits_Unop()
1984 return assignNew(mce, Ity_V128, unop(op, vatom)); in expr2vbits_Unop()
1995 return mkPCastTo(mce, Ity_I64, vatom); in expr2vbits_Unop()
1999 return mkPCastTo(mce, Ity_I32, vatom); in expr2vbits_Unop()
2005 return assignNew(mce, Ity_I64, unop(op, vatom)); in expr2vbits_Unop()
2014 return assignNew(mce, Ity_I32, unop(op, vatom)); in expr2vbits_Unop()
2020 return assignNew(mce, Ity_I16, unop(op, vatom)); in expr2vbits_Unop()
2025 return assignNew(mce, Ity_I8, unop(op, vatom)); in expr2vbits_Unop()
2028 return assignNew(mce, Ity_I1, unop(Iop_32to1, vatom)); in expr2vbits_Unop()
2050 IRAtom* expr2vbits_LDle_WRK ( MCEnv* mce, IRType ty, IRAtom* addr, UInt bias ) in expr2vbits_LDle_WRK() argument
2058 tl_assert(isOriginalAtom(mce,addr)); in expr2vbits_LDle_WRK()
2062 complainIfUndefined( mce, addr ); in expr2vbits_LDle_WRK()
2090 IRType tyAddr = mce->hWordTy; in expr2vbits_LDle_WRK()
2094 addrAct = assignNew(mce, tyAddr, binop(mkAdd, addr, eBias) ); in expr2vbits_LDle_WRK()
2099 datavbits = newIRTemp(mce->bb->tyenv, ty); in expr2vbits_LDle_WRK()
2103 setHelperAnns( mce, di ); in expr2vbits_LDle_WRK()
2104 stmt( mce->bb, IRStmt_Dirty(di) ); in expr2vbits_LDle_WRK()
2111 IRAtom* expr2vbits_LDle ( MCEnv* mce, IRType ty, IRAtom* addr, UInt bias ) in expr2vbits_LDle() argument
2119 return expr2vbits_LDle_WRK(mce, ty, addr, bias); in expr2vbits_LDle()
2121 v64lo = expr2vbits_LDle_WRK(mce, Ity_I64, addr, bias); in expr2vbits_LDle()
2122 v64hi = expr2vbits_LDle_WRK(mce, Ity_I64, addr, bias+8); in expr2vbits_LDle()
2123 return assignNew( mce, in expr2vbits_LDle()
2133 IRAtom* expr2vbits_ITE ( MCEnv* mce, in expr2vbits_ITE() argument
2143 tl_assert(isOriginalAtom(mce, cond)); in expr2vbits_ITE()
2144 tl_assert(isOriginalAtom(mce, iftrue)); in expr2vbits_ITE()
2145 tl_assert(isOriginalAtom(mce, iffalse)); in expr2vbits_ITE()
2147 vbitsC = expr2vbits(mce, cond); in expr2vbits_ITE()
2148 vbits0 = expr2vbits(mce, iffalse); in expr2vbits_ITE()
2149 vbits1 = expr2vbits(mce, iftrue); in expr2vbits_ITE()
2150 ty = typeOfIRExpr(mce->bb->tyenv, vbits0); in expr2vbits_ITE()
2153 mkUifU(mce, ty, assignNew(mce, ty, IRExpr_ITE(cond, vbits1, vbits0)), in expr2vbits_ITE()
2154 mkPCastTo(mce, ty, vbitsC) ); in expr2vbits_ITE()
2160 IRExpr* expr2vbits ( MCEnv* mce, IRExpr* e ) in expr2vbits() argument
2165 return shadow_GET( mce, e->Iex.Get.offset, e->Iex.Get.ty ); in expr2vbits()
2168 return shadow_GETI( mce, e->Iex.GetI.descr, in expr2vbits()
2172 return IRExpr_RdTmp( findShadowTmp(mce, e->Iex.RdTmp.tmp) ); in expr2vbits()
2175 return definedOfType(shadowType(typeOfIRExpr(mce->bb->tyenv, e))); in expr2vbits()
2179 mce, in expr2vbits()
2185 return expr2vbits_Unop( mce, e->Iex.Unop.op, e->Iex.Unop.arg ); in expr2vbits()
2188 return expr2vbits_LDle( mce, e->Iex.Load.ty, in expr2vbits()
2192 return mkLazyN( mce, e->Iex.CCall.args, in expr2vbits()
2197 return expr2vbits_ITE( mce, e->Iex.ITE.cond, e->Iex.ITE.iftrue, in expr2vbits()
2215 IRExpr* zwidenToHostWord ( MCEnv* mce, IRAtom* vatom ) in zwidenToHostWord() argument
2220 tl_assert(isShadowAtom(mce,vatom)); in zwidenToHostWord()
2222 ty = typeOfIRExpr(mce->bb->tyenv, vatom); in zwidenToHostWord()
2223 tyH = mce->hWordTy; in zwidenToHostWord()
2228 case Ity_I16: return assignNew(mce, tyH, unop(Iop_16Uto32, vatom)); in zwidenToHostWord()
2229 case Ity_I8: return assignNew(mce, tyH, unop(Iop_8Uto32, vatom)); in zwidenToHostWord()
2246 void do_shadow_STle ( MCEnv* mce, in do_shadow_STle() argument
2259 tyAddr = mce->hWordTy; in do_shadow_STle()
2270 tl_assert(isOriginalAtom(mce, data)); in do_shadow_STle()
2272 vdata = expr2vbits( mce, data ); in do_shadow_STle()
2277 tl_assert(isOriginalAtom(mce,addr)); in do_shadow_STle()
2278 tl_assert(isShadowAtom(mce,vdata)); in do_shadow_STle()
2280 ty = typeOfIRExpr(mce->bb->tyenv, vdata); in do_shadow_STle()
2284 complainIfUndefined( mce, addr ); in do_shadow_STle()
2310 addrLo64 = assignNew(mce, tyAddr, binop(mkAdd, addr, eBias0) ); in do_shadow_STle()
2311 vdataLo64 = assignNew(mce, Ity_I64, unop(Iop_V128to64, vdata)); in do_shadow_STle()
2317 addrHi64 = assignNew(mce, tyAddr, binop(mkAdd, addr, eBias8) ); in do_shadow_STle()
2318 vdataHi64 = assignNew(mce, Ity_I64, unop(Iop_V128HIto64, vdata)); in do_shadow_STle()
2323 setHelperAnns( mce, diLo64 ); in do_shadow_STle()
2324 setHelperAnns( mce, diHi64 ); in do_shadow_STle()
2325 stmt( mce->bb, IRStmt_Dirty(diLo64) ); in do_shadow_STle()
2326 stmt( mce->bb, IRStmt_Dirty(diHi64) ); in do_shadow_STle()
2336 addrAct = assignNew(mce, tyAddr, binop(mkAdd, addr, eBias) ); in do_shadow_STle()
2350 zwidenToHostWord( mce, vdata ))); in do_shadow_STle()
2352 setHelperAnns( mce, di ); in do_shadow_STle()
2353 stmt( mce->bb, IRStmt_Dirty(di) ); in do_shadow_STle()
2375 void do_shadow_Dirty ( MCEnv* mce, IRDirty* d ) in do_shadow_Dirty() argument
2383 complainIfUndefined(mce, d->guard); in do_shadow_Dirty()
2393 here = mkPCastTo( mce, Ity_I32, expr2vbits(mce, d->args[i]) ); in do_shadow_Dirty()
2394 curr = mkUifU32(mce, here, curr); in do_shadow_Dirty()
2405 if (isAlwaysDefd(mce, d->fxState[i].offset, d->fxState[i].size )) { in do_shadow_Dirty()
2424 src = assignNew( mce, tySrc, in do_shadow_Dirty()
2425 shadow_GET(mce, gOff, tySrc ) ); in do_shadow_Dirty()
2426 here = mkPCastTo( mce, Ity_I32, src ); in do_shadow_Dirty()
2427 curr = mkUifU32(mce, here, curr); in do_shadow_Dirty()
2444 complainIfUndefined(mce, d->mAddr); in do_shadow_Dirty()
2446 tyAddr = typeOfIRExpr(mce->bb->tyenv, d->mAddr); in do_shadow_Dirty()
2448 tl_assert(tyAddr == mce->hWordTy); /* not really right */ in do_shadow_Dirty()
2458 mce, Ity_I32, in do_shadow_Dirty()
2459 expr2vbits_LDle ( mce, Ity_I32, in do_shadow_Dirty()
2462 curr = mkUifU32(mce, here, curr); in do_shadow_Dirty()
2468 mce, Ity_I32, in do_shadow_Dirty()
2469 expr2vbits_LDle ( mce, Ity_I16, in do_shadow_Dirty()
2472 curr = mkUifU32(mce, here, curr); in do_shadow_Dirty()
2484 dst = findShadowTmp(mce, d->tmp); in do_shadow_Dirty()
2485 tyDst = typeOfIRTemp(mce->bb->tyenv, d->tmp); in do_shadow_Dirty()
2486 assign( mce->bb, dst, mkPCastTo( mce, tyDst, curr) ); in do_shadow_Dirty()
2495 if (isAlwaysDefd(mce, d->fxState[i].offset, d->fxState[i].size )) in do_shadow_Dirty()
2509 do_shadow_PUT( mce, gOff, in do_shadow_Dirty()
2511 mkPCastTo( mce, tyDst, curr ) ); in do_shadow_Dirty()
2523 do_shadow_STle( mce, d->mAddr, d->mSize - toDo, in do_shadow_Dirty()
2525 mkPCastTo( mce, Ity_I32, curr ) ); in do_shadow_Dirty()
2530 do_shadow_STle( mce, d->mAddr, d->mSize - toDo, in do_shadow_Dirty()
2532 mkPCastTo( mce, Ity_I16, curr ) ); in do_shadow_Dirty()
2624 MCEnv mce; in mc_instrument() local
2634 mce.bb = bb; in mc_instrument()
2635 mce.layout = layout; in mc_instrument()
2636 mce.n_originalTmps = bb->tyenv->types_used; in mc_instrument()
2637 mce.hWordTy = hWordTy; in mc_instrument()
2638 mce.tmpMap = LibVEX_Alloc(mce.n_originalTmps * sizeof(IRTemp)); in mc_instrument()
2639 for (i = 0; i < mce.n_originalTmps; i++) in mc_instrument()
2640 mce.tmpMap[i] = IRTemp_INVALID; in mc_instrument()
2670 assign( bb, findShadowTmp(&mce, st->Ist.WrTmp.tmp), in mc_instrument()
2671 expr2vbits( &mce, st->Ist.WrTmp.data) ); in mc_instrument()
2675 do_shadow_PUT( &mce, in mc_instrument()
2682 do_shadow_PUTI( &mce, in mc_instrument()
2690 do_shadow_STle( &mce, st->Ist.Store.addr, 0/* addr bias */, in mc_instrument()
2697 complainIfUndefined( &mce, st->Ist.Exit.guard ); in mc_instrument()
2701 do_shadow_Dirty( &mce, st->Ist.Dirty.details ); in mc_instrument()
2738 complainIfUndefined( &mce, bb->next ); in mc_instrument()