Home
last modified time | relevance | path

Searched refs:mkU32 (Results 1 – 17 of 17) sorted by relevance

/external/valgrind/VEX/priv/
Dguest_mips_toIR.c467 mkU32(extend_s_16to32(imm)))); \
494 assign(t2, binop(Iop_And32, mkexpr(t1), mkU32(0xFFFFFFFC))); \
496 assign(t4, binop(Iop_And32, mkexpr(t1), mkU32(0x00000003)))
504 mkU32(0x0000001F) \
516 mkU32(0x0000001F) \
529 putIReg(rt, binop(op, getIReg(rs), mkU32(imm)));
542 IRExpr_ITE( binop(Iop_CmpEQ32, mkU32(cc), mkU32(0)), \
545 mkU32(0x1)));
548 putPC(mkU32(guest_PC_curr_instr + 4)); \
553 (mode64 ? mkU64(0xFFFFFFFFFFFFFFFFULL) : mkU32(0xFFFFFFFF))
[all …]
Dguest_ppc_toIR.c656 static IRExpr* mkU32 ( UInt i ) in mkU32() function
977 binop(Iop_Add32, mkU32(0x7FFFFFFF), in mkQNarrow64Sto32()
994 binop(Iop_CmpEQ32, mkexpr(hi32), mkU32(0)), in mkQNarrow64Uto32()
998 mkU32(0xFFFFFFFF)); in mkQNarrow64Uto32()
1106 return ty == Ity_I64 ? mkU64(imm64) : mkU32((UInt)imm64); in mkSzImm()
1124 mkU32(extend_s_16to32(imm16)) ); in mkSzExtendS16()
1133 mkU32(imm32) ); in mkSzExtendS32()
1618 assign(mask[0], mkU32(0x55555555)); in gen_POPCOUNT()
1619 assign(mask[1], mkU32(0x33333333)); in gen_POPCOUNT()
1620 assign(mask[2], mkU32(0x0F0F0F0F)); in gen_POPCOUNT()
[all …]
Dguest_x86_helpers.c820 # define mkU32(_n) IRExpr_Const(IRConst_U32(_n)) in guest_x86_spechelper() macro
854 mkU32(0))); in guest_x86_spechelper()
882 mkU32(1)); in guest_x86_spechelper()
898 mkU32(1)); in guest_x86_spechelper()
913 mkU32(1)); in guest_x86_spechelper()
928 mkU32(1)); in guest_x86_spechelper()
936 mkU32(0))); in guest_x86_spechelper()
944 mkU32(0))), in guest_x86_spechelper()
945 mkU32(1)); in guest_x86_spechelper()
988 binop(Iop_And32,cc_dep2,mkU32(0xFF)), in guest_x86_spechelper()
[all …]
Dguest_x86_toIR.c675 static IRExpr* mkU32 ( UInt i ) in mkU32() function
689 if (ty == Ity_I32) return mkU32(i); in mkU()
810 = mkIRExprVec_5( mkU32(cond), in mk_x86g_calculate_condition()
926 stmt( IRStmt_Put( OFFB_CC_OP, mkU32(ccOp)) ); in setFlags_DEP1_DEP2()
931 stmt( IRStmt_Put( OFFB_CC_NDEP, mkU32(0) )); in setFlags_DEP1_DEP2()
951 stmt( IRStmt_Put( OFFB_CC_OP, mkU32(ccOp)) ); in setFlags_DEP1()
953 stmt( IRStmt_Put( OFFB_CC_DEP2, mkU32(0)) ); in setFlags_DEP1()
956 stmt( IRStmt_Put( OFFB_CC_NDEP, mkU32(0) )); in setFlags_DEP1()
992 mkU32(ccOp), in setFlags_DEP1_DEP2_shift()
1006 mkU32(0), in setFlags_DEP1_DEP2_shift()
[all …]
Dir_inject.c40 #define mkU32(v) IRExpr_Const(IRConst_U32(v)) macro
94 addr = mkU32(haddr); in load()
95 next_addr = binop(Iop_Add32, addr, mkU32(8)); in load()
155 addr = mkU32(haddr); in store()
156 next_addr = binop(Iop_Add32, addr, mkU32(8)); in store()
199 rounding_mode = mkU32(iricb.rounding_mode); in vex_inject_ir()
231 opnd2 = mkU32(*((ULong *)iricb.opnd2)); in vex_inject_ir()
263 opnd3 = mkU32(*((ULong *)iricb.opnd3)); in vex_inject_ir()
296 opnd4 = mkU32(*((ULong *)iricb.opnd4)); in vex_inject_ir()
Dguest_arm_toIR.c290 static IRExpr* mkU32 ( UInt i ) in mkU32() function
349 binop(Iop_CmpNE32, mkexpr(guardT), mkU32(0))) ); in storeGuardedLE()
381 binop(Iop_CmpNE32, mkexpr(guardT), mkU32(0))) ); in loadGuardedLE()
396 return mkU32(Irrm_NEAREST); in get_FAKE_roundingmode()
421 return binop(Iop_And32, e, mkU32(~3)); in align4if()
550 e = mkU32(guest_R15_curr_instr_notENC + 8); in getIRegA()
567 e = mkU32(guest_R15_curr_instr_notENC + 4); in getIRegT()
607 IRExpr_ITE( binop(Iop_CmpNE32, mkexpr(guardT), mkU32(0)), in putIRegA()
640 IRExpr_ITE( binop(Iop_CmpNE32, mkexpr(guardT), mkU32(0)), in putIRegT()
735 IRExpr_ITE( binop(Iop_CmpNE32, mkexpr(guardT), mkU32(0)), in putDReg()
[all …]
Dguest_s390_helpers.c1811 #define mkU32(v) IRExpr_Const(IRConst_U32(v)) macro
1904 return mkU32(1); in guest_s390x_spechelper()
1907 return mkU32(0); in guest_s390x_spechelper()
1940 return mkU32(1); in guest_s390x_spechelper()
1943 return mkU32(0); in guest_s390x_spechelper()
1984 return mkU32(1); in guest_s390x_spechelper()
1987 return mkU32(0); in guest_s390x_spechelper()
2007 return mkU32(1); in guest_s390x_spechelper()
2016 return mkU32(0); in guest_s390x_spechelper()
2055 word = binop(Iop_And32, word, mkU32(imask)); in guest_s390x_spechelper()
[all …]
Dguest_arm_helpers.c820 # define mkU32(_n) IRExpr_Const(IRConst_U32(_n)) in guest_arm_spechelper() macro
924 binop(Iop_CmpNE32, cc_ndep, mkU32(0)), in guest_arm_spechelper()
937 binop(Iop_CmpEQ32, cc_dep1, mkU32(0))); in guest_arm_spechelper()
942 binop(Iop_CmpNE32, cc_dep1, mkU32(0))); in guest_arm_spechelper()
950 mkU32(0))); in guest_arm_spechelper()
957 mkU32(1))); in guest_arm_spechelper()
968 mkU32(1)); in guest_arm_spechelper()
976 mkU32(1)), in guest_arm_spechelper()
977 mkU32(1)); in guest_arm_spechelper()
986 mkU32(1)); in guest_arm_spechelper()
[all …]
Dguest_s390_toIR.c200 mkU32(UInt value) in mkU32() function
488 emulation_failure_with_expr(mkU32(fail_kind)); in emulation_failure()
505 emulation_warning_with_expr(mkU32(warn_kind)); in emulation_warning()
1539 assign(fpc_bits, binop(Iop_And32, get_fpc_w0(), mkU32(7))); in get_bfp_rounding_mode_from_fpc()
1551 IRExpr *rm_s390 = mkite(binop(Iop_CmpLE32S, mkexpr(fpc_bits), mkU32(3)), in get_bfp_rounding_mode_from_fpc()
1553 mkU32(S390_FPC_BFP_ROUND_NEAREST_EVEN)); in get_bfp_rounding_mode_from_fpc()
1556 return binop(Iop_And32, binop(Iop_Sub32, mkU32(4), rm_s390), mkU32(3)); in get_bfp_rounding_mode_from_fpc()
1575 case S390_BFP_ROUND_NEAREST_EVEN: rm = mkU32(Irrm_NEAREST); break; in encode_bfp_rounding_mode()
1576 case S390_BFP_ROUND_ZERO: rm = mkU32(Irrm_ZERO); break; in encode_bfp_rounding_mode()
1577 case S390_BFP_ROUND_POSINF: rm = mkU32(Irrm_PosINF); break; in encode_bfp_rounding_mode()
[all …]
Dguest_amd64_toIR.c275 static IRExpr* mkU32 ( ULong i ) in mkU32() function
291 case Ity_I32: return mkU32(i); in mkU()
4745 cmp = binop(Iop_CmpEQ32, mkexpr(tc), mkU32(0)); in dis_REP_op()
4756 putIReg32(R_RCX, binop(Iop_Sub32, mkexpr(tc), mkU32(1)) ); in dis_REP_op()
4914 assign(mask[0], mkU32(0x55555555)); in gen_POPCOUNT()
4915 assign(mask[1], mkU32(0x33333333)); in gen_POPCOUNT()
4916 assign(mask[2], mkU32(0x0F0F0F0F)); in gen_POPCOUNT()
4917 assign(mask[3], mkU32(0x00FF00FF)); in gen_POPCOUNT()
4918 assign(mask[4], mkU32(0x0000FFFF)); in gen_POPCOUNT()
5103 return binop( Iop_And32, get_fpround(), mkU32(3) ); in get_roundingmode()
[all …]
Dguest_arm64_toIR.c254 static IRExpr* mkU32 ( UInt i ) in mkU32() function
790 case Ity_I32: return mkU32((UInt)(imm & 0xFFFFFFFFULL)); in mkU()
1284 return mkU32(0); in getIReg32orZR()
1672 mkU32(2)), in mk_get_IR_rounding_mode()
1675 mkU32(1)) in mk_get_IR_rounding_mode()
2446 assign(argR, mkU32(uimm12)); in dis_ARM64_data_processing_immediate()
2530 IRExpr* argR = mkU32((UInt)imm); in dis_ARM64_data_processing_immediate()
2569 putIRegOrZR(is64, dd, is64 ? mkU64(imm64) : mkU32((UInt)imm64)); in dis_ARM64_data_processing_immediate()
2576 putIRegOrZR(is64, dd, is64 ? mkU64(imm64) : mkU32((UInt)imm64)); in dis_ARM64_data_processing_immediate()
2601 binop(Iop_And32, mkexpr(old), mkU32(~mask)), in dis_ARM64_data_processing_immediate()
[all …]
Dguest_amd64_helpers.c1024 # define mkU32(_n) IRExpr_Const(IRConst_U32(_n)) in guest_amd64_spechelper() macro
1529 mkU32(0))); in guest_amd64_spechelper()
1536 mkU32(0))); in guest_amd64_spechelper()
1549 mkU32(0))); in guest_amd64_spechelper()
1648 mkU32(0))); in guest_amd64_spechelper()
1680 mkU32(0))); in guest_amd64_spechelper()
1686 mkU32(0))); in guest_amd64_spechelper()
1847 # undef mkU32 in guest_amd64_spechelper()
Dhost_ppc_isel.c199 static IRExpr* mkU32 ( UInt i ) in mkU32() function
1363 zeros = mk_AvDuplicateRI(env, mkU32(0), IEndianess); in isNan()
1364 msk_exp = mk_AvDuplicateRI(env, mkU32(0x7F800000), IEndianess); in isNan()
1365 msk_mnt = mk_AvDuplicateRI(env, mkU32(0x7FFFFF), IEndianess); in isNan()
/external/valgrind/coregrind/
Dm_translate.c994 static IRExpr* mkU32 ( UInt n ) { in mkU32() function
1047 IRExpr*(*mkU)(UInt) = mkU32; in gen_PUSH()
1077 IRStmt_Put(offB_EMNOTE, mkU32(EmWarn_PPC64_redir_overflow)) in gen_PUSH()
1138 IRExpr*(*mkU)(UInt) = mkU32; in gen_POP()
1159 IRStmt_Put(offB_EMNOTE, mkU32(EmWarn_PPC64_redir_underflow)) in gen_POP()
1332 nraddr_szB == 8 ? mkU64(0) : mkU32(0) in mk_preamble__set_NRADDR_to_zero()
1339 addStmtToIRSB(bb, IRStmt_Put(offB_GPR25, mkU32(closure->readdr))); in mk_preamble__set_NRADDR_to_zero()
1352 VG_WORDSIZE==8 ? mkU64(0) : mkU32(0) in mk_preamble__set_NRADDR_to_zero()
1366 VG_WORDSIZE==8 ? mkU64(0) : mkU32(0) in mk_preamble__set_NRADDR_to_zero()
1398 addStmtToIRSB(bb, IRStmt_Put(offB_GPR25, mkU32(closure->readdr))); in mk_preamble__set_NRADDR_to_nraddr()
/external/valgrind/memcheck/
Dmc_translate.c443 #define mkU32(_n) IRExpr_Const(IRConst_U32(_n)) macro
1016 top = mkU32(0xFFFFFFFF); in expensiveCmpEQorNE()
1141 threeLeft1 = m64 ? mkU64(3<<1) : mkU32(3<<1); in doCmpORD()
1164 sevenLeft1 = m64 ? mkU64(7<<1) : mkU32(7<<1); in doCmpORD()
2021 one = mkU32(1); in expensiveCountTrailingZeroes()
4787 eBias = tyAddr==Ity_I32 ? mkU32(bias) : mkU64(bias); in expr2vbits_Load_WRK()
5216 eBiasQ0 = tyAddr==Ity_I32 ? mkU32(bias+offQ0) : mkU64(bias+offQ0); in do_shadow_Store()
5225 eBiasQ1 = tyAddr==Ity_I32 ? mkU32(bias+offQ1) : mkU64(bias+offQ1); in do_shadow_Store()
5234 eBiasQ2 = tyAddr==Ity_I32 ? mkU32(bias+offQ2) : mkU64(bias+offQ2); in do_shadow_Store()
5243 eBiasQ3 = tyAddr==Ity_I32 ? mkU32(bias+offQ3) : mkU64(bias+offQ3); in do_shadow_Store()
[all …]
/external/valgrind/exp-dhat/
Ddh_main.c767 #define mkU32(_n) IRExpr_Const(IRConst_U32(_n)) macro
845 ? binop(Iop_Sub32, mkexpr(sp), mkU32(rz_szB)) in addMemEvent()
863 ? binop(Iop_CmpLT32U, mkU32(THRESH), mkexpr(diff)) in addMemEvent()
1021 #undef mkU32
/external/valgrind/helgrind/
Dhg_main.c4403 #define mkU32(_n) IRExpr_Const(IRConst_U32(_n)) macro
4565 ? binop(Iop_Add32, mkexpr(addr_minus_sp), mkU32(rz_szB)) in instrument_mem_access()
4575 ? binop(Iop_CmpLT32U, mkU32(THRESH), mkexpr(diff)) in instrument_mem_access()
4857 #undef mkU32