/external/valgrind/VEX/priv/ |
D | main_main.c | 94 #define X86ST(f) vassert(0) 102 #define AMD64ST(f) vassert(0) 110 #define PPC32ST(f) vassert(0) 118 #define PPC64ST(f) vassert(0) 126 #define S390ST(f) vassert(0) 134 #define ARMST(f) vassert(0) 142 #define ARM64ST(f) vassert(0) 150 #define MIPS32ST(f) vassert(0) 158 #define MIPS64ST(f) vassert(0) 166 #define TILEGXST(f) vassert(0) [all …]
|
D | host_generic_reg_alloc2.c | 175 vassert(search_from_instr >= 0); in findMostDistantlyMentionedVReg() 179 vassert(state[k].disp == Bound); in findMostDistantlyMentionedVReg() 199 vassert(0 == ((UShort)vreg->spill_offset % 16)); break; in sanity_check_spill_offset() 201 vassert(0 == ((UShort)vreg->spill_offset % 8)); break; in sanity_check_spill_offset() 214 vassert(used == *size); in ensureRRLRspace_SLOW() 242 vassert(size >= 0); in sortRRLRarray() 450 vassert(0 == (guest_sizeB % LibVEX_GUEST_STATE_ALIGN)); in doRegisterAllocation() 451 vassert(0 == (LibVEX_N_SPILL_BYTES % LibVEX_GUEST_STATE_ALIGN)); in doRegisterAllocation() 452 vassert(0 == (N_SPILL64S % 2)); in doRegisterAllocation() 457 vassert(instrs_in->arr_used <= 15000); in doRegisterAllocation() [all …]
|
D | main_util.c | 80 vassert(temporary_first == &temporary[0]); in vexAllocSanityCheck() 81 vassert(temporary_last == &temporary[N_TEMPORARY_BYTES-1]); in vexAllocSanityCheck() 82 vassert(permanent_first == &permanent[0]); in vexAllocSanityCheck() 83 vassert(permanent_last == &permanent[N_PERMANENT_BYTES-1]); in vexAllocSanityCheck() 84 vassert(temporary_first <= temporary_curr); in vexAllocSanityCheck() 85 vassert(temporary_curr <= temporary_last); in vexAllocSanityCheck() 86 vassert(permanent_first <= permanent_curr); in vexAllocSanityCheck() 87 vassert(permanent_curr <= permanent_last); in vexAllocSanityCheck() 88 vassert(private_LibVEX_alloc_first <= private_LibVEX_alloc_curr); in vexAllocSanityCheck() 89 vassert(private_LibVEX_alloc_curr <= private_LibVEX_alloc_last); in vexAllocSanityCheck() [all …]
|
D | host_generic_regs.c | 103 vassert(univ->size > 0); in RRegUniverse__check_is_sane() 104 vassert(univ->size <= N_RREGUNIVERSE_REGS); in RRegUniverse__check_is_sane() 105 vassert(univ->allocable <= univ->size); in RRegUniverse__check_is_sane() 108 vassert(!hregIsInvalid(reg)); in RRegUniverse__check_is_sane() 109 vassert(!hregIsVirtual(reg)); in RRegUniverse__check_is_sane() 110 vassert(hregIndex(reg) == i); in RRegUniverse__check_is_sane() 114 vassert(hregIsInvalid(reg)); in RRegUniverse__check_is_sane() 127 vassert(N_RREGUNIVERSE_REGS == 64); in ppHRegUsage() 177 vassert(tab->n_vRegs < N_HREGUSAGE_VREGS); in addHRegUse() 202 vassert(ix < N_RREGUNIVERSE_REGS); in addHRegUse() [all …]
|
D | host_ppc_defs.c | 170 vassert(r >= 0 && r < 32); in ppHRegPPC() 175 vassert(r >= 0 && r < 32); in ppHRegPPC() 180 vassert(r >= 0 && r < 32); in ppHRegPPC() 185 vassert(r >= 0 && r < 32); in ppHRegPPC() 222 vassert(flag == Pcf_NONE); in mk_PPCCondCode() 224 vassert(flag != Pcf_NONE); in mk_PPCCondCode() 232 vassert(ct != Pct_ALWAYS); in invertCondTest() 241 vassert(idx >= -0x8000 && idx < 0x8000); in PPCAMode_IR() 324 vassert(imm16 != 0x8000); in PPCRH_Imm() 325 vassert(syned == True || syned == False); in PPCRH_Imm() [all …]
|
D | host_arm64_defs.c | 156 vassert(r >= 0 && r < 31); in ppHRegARM64() 161 vassert(r >= 0 && r < 32); in ppHRegARM64() 166 vassert(r >= 0 && r < 32); in ppHRegARM64() 217 vassert(-256 <= simm9 && simm9 <= 255); in ARM64AMode_RI9() 227 vassert(uimm12 >= 0 && uimm12 <= 4095); in ARM64AMode_RI12() 230 default: vassert(0); in ARM64AMode_RI12() 264 vassert(0); in ppARM64AMode() 310 vassert(imm12 < 4096); in ARM64RIA_I12() 311 vassert(shift == 0 || shift == 12); in ARM64RIA_I12() 331 vassert(0); in ppARM64RIA() [all …]
|
D | host_arm64_isel.c | 117 vassert(tmp >= 0); in lookupIRTemp() 118 vassert(tmp < env->n_vregmap); in lookupIRTemp() 125 vassert(tmp >= 0); in lookupIRTempPair() 126 vassert(tmp < env->n_vregmap); in lookupIRTempPair() 127 vassert(! hregIsInvalid(env->vregmapHI[tmp])); in lookupIRTempPair() 234 vassert(off < (8 << 12)); /* otherwise it's unrepresentable */ in mk_baseblock_64bit_access_amode() 235 vassert((off & 7) == 0); /* ditto */ in mk_baseblock_64bit_access_amode() 242 vassert(off < (4 << 12)); /* otherwise it's unrepresentable */ in mk_baseblock_32bit_access_amode() 243 vassert((off & 3) == 0); /* ditto */ in mk_baseblock_32bit_access_amode() 250 vassert(off < (2 << 12)); /* otherwise it's unrepresentable */ in mk_baseblock_16bit_access_amode() [all …]
|
D | host_mips_defs.c | 162 vassert(hregClass(reg) == HRcInt32 || hregClass(reg) == HRcInt64 || in ppHRegMIPS() 169 vassert(r >= 0 && r < 32); in ppHRegMIPS() 174 vassert (r >= 0 && r < 32); in ppHRegMIPS() 179 vassert(r >= 0 && r < 32); in ppHRegMIPS() 184 vassert(r >= 0 && r < 32); in ppHRegMIPS() 592 vassert(imm16 != 0x8000); in MIPSRH_Imm() 593 vassert(syned == True || syned == False); in MIPSRH_Imm() 700 vassert(immR == False); /*there's no nor with an immediate operand!? */ in showMIPSAluOp() 883 vassert(0 == (argiregs & ~mask)); in MIPSInstr_Call() 884 vassert(is_sane_RetLoc(rloc)); in MIPSInstr_Call() [all …]
|
D | guest_generic_bb_to_IR.c | 225 vassert(sizeof(HWord) == sizeof(void*)); in bb_to_IR() 226 vassert(vex_control.guest_max_insns >= 1); in bb_to_IR() 227 vassert(vex_control.guest_max_insns <= 100); in bb_to_IR() 228 vassert(vex_control.guest_chase_thresh >= 0); in bb_to_IR() 229 vassert(vex_control.guest_chase_thresh < vex_control.guest_max_insns); in bb_to_IR() 230 vassert(guest_word_type == Ity_I32 || guest_word_type == Ity_I64); in bb_to_IR() 233 vassert(szB_GUEST_IP == 4); in bb_to_IR() 234 vassert((offB_GUEST_IP % 4) == 0); in bb_to_IR() 236 vassert(szB_GUEST_IP == 8); in bb_to_IR() 237 vassert((offB_GUEST_IP % 8) == 0); in bb_to_IR() [all …]
|
D | host_s390_defs.c | 69 vassert(ix >= 0); in s390_hreg_gpr() 77 vassert(ix >= 0); in s390_hreg_fpr() 119 vassert(r < 16); in s390_hreg_as_string() 165 vassert(fits_unsigned_12bit(d)); in s390_amode_b12() 182 vassert(fits_signed_20bit(d)); in s390_amode_b20() 199 vassert(fits_unsigned_12bit(d)); in s390_amode_bx12() 200 vassert(hregNumber(b) != 0); in s390_amode_bx12() 201 vassert(hregNumber(x) != 0); in s390_amode_bx12() 218 vassert(fits_signed_20bit(d)); in s390_amode_bx20() 219 vassert(hregNumber(b) != 0); in s390_amode_bx20() [all …]
|
D | host_tilegx_defs.c | 79 vassert(hregClass(reg) == HRcInt32 || hregClass(reg) == HRcInt64 || in ppHRegTILEGX() 87 vassert(r >= 0 && r < 64); in ppHRegTILEGX() 92 vassert(r >= 0 && r < 64); in ppHRegTILEGX() 97 vassert(r >= 0 && r < 64); in ppHRegTILEGX() 393 vassert(0); in ppTILEGXInstr() 622 vassert(imm16 != 0x8000); in TILEGXRH_Imm() 623 vassert(syned == True || syned == False); in TILEGXRH_Imm() 812 vassert(0 == (argiregs & ~mask)); in TILEGXInstr_Call() 828 vassert(0 == (argiregs & ~mask)); in TILEGXInstr_CallAlways() 889 vassert(sz == 1 || sz == 2 || sz == 4 || sz == 8); in TILEGXInstr_Load() [all …]
|
D | host_amd64_defs.c | 119 vassert(r >= 0 && r < 16); in ppHRegAMD64() 124 vassert(r >= 0 && r < 16); in ppHRegAMD64() 148 vassert(r >= 0 && r < 16); in ppHRegAMD64_lo32() 200 vassert(shift >= 0 && shift <= 3); in AMD64AMode_IRRS() 610 vassert(op != Aalu_MUL); in AMD64Instr_Alu64M() 651 default: vassert(0); in AMD64Instr_Alu32R() 668 vassert(sz == 4 || sz == 8); in AMD64Instr_Div() 685 vassert(regparms >= 0 && regparms <= 6); in AMD64Instr_Call() 686 vassert(is_sane_RetLoc(rloc)); in AMD64Instr_Call() 726 vassert(cond != Acc_ALWAYS); in AMD64Instr_CMov64() [all …]
|
D | guest_arm64_toIR.c | 150 vassert(n > 1 && n < 64); in sx_to_64() 260 vassert(i < 65536); in mkU16() 266 vassert(i < 256); in mkU8() 357 vassert(isPlausibleIRType(ty)); in newTemp() 371 vassert(t1 && *t1 == IRTemp_INVALID); in newTempsV128_2() 372 vassert(t2 && *t2 == IRTemp_INVALID); in newTempsV128_2() 380 vassert(t1 && *t1 == IRTemp_INVALID); in newTempsV128_3() 381 vassert(t2 && *t2 == IRTemp_INVALID); in newTempsV128_3() 382 vassert(t3 && *t3 == IRTemp_INVALID); in newTempsV128_3() 391 vassert(t1 && *t1 == IRTemp_INVALID); in newTempsV128_4() [all …]
|
D | host_generic_regs.h | 136 vassert(ix <= 0xFFFFF); in mkHReg() 137 vassert(enc <= 0x7F); in mkHReg() 138 vassert(((UInt)rc) <= 0xF); in mkHReg() 139 vassert(((UInt)virtual) <= 1); in mkHReg() 140 if (virtual) vassert(enc == 0); in mkHReg() 152 vassert(rc >= HRcInt32 && rc <= HRcVec128); in hregClass() 412 vassert(pri >= RLPri_INVALID && pri <= RLPri_2Int); in mk_RetLoc_simple() 417 vassert(pri >= RLPri_V128SpRel && pri <= RLPri_V256SpRel); in mk_RetLoc_spRel()
|
D | ir_opt.c | 254 vassert(h->used < h->size); in addToHHW() 486 vassert(d2->mAddr == NULL); in flatten_Stmt() 560 vassert((*minoff & ~0xFFFF) == 0); in getArrayBounds() 561 vassert((*maxoff & ~0xFFFF) == 0); in getArrayBounds() 562 vassert(*minoff <= *maxoff); in getArrayBounds() 572 vassert((minoff & ~0xFFFF) == 0); in mk_key_GetPut() 573 vassert((maxoff & ~0xFFFF) == 0); in mk_key_GetPut() 581 vassert((minoff & ~0xFFFF) == 0); in mk_key_GetIPutI() 582 vassert((maxoff & ~0xFFFF) == 0); in mk_key_GetIPutI() 594 vassert(k_lo <= k_hi); in invalidateOverlaps() [all …]
|
D | host_mips_isel.c | 135 vassert(tmp >= 0); in lookupIRTemp() 136 vassert(tmp < env->n_vregmap); in lookupIRTemp() 142 vassert(tmp >= 0); in lookupIRTemp64() 143 vassert(tmp < env->n_vregmap); in lookupIRTemp64() 144 vassert(! hregIsInvalid(env->vregmapHI[tmp])); in lookupIRTemp64() 152 vassert(env->mode64); in lookupIRTempPair() 153 vassert(tmp >= 0); in lookupIRTempPair() 154 vassert(tmp < env->n_vregmap); in lookupIRTempPair() 155 vassert(! hregIsInvalid(env->vregmapHI[tmp])); in lookupIRTempPair() 196 vassert(n < 256 && (n % 8) == 0); in add_to_sp() [all …]
|
D | host_arm_isel.c | 131 vassert(tmp >= 0); in lookupIRTemp() 132 vassert(tmp < env->n_vregmap); in lookupIRTemp() 138 vassert(tmp >= 0); in lookupIRTemp64() 139 vassert(tmp < env->n_vregmap); in lookupIRTemp64() 140 vassert(! hregIsInvalid(env->vregmapHI[tmp])); in lookupIRTemp64() 257 vassert(sh >= 0 && sh < 32); in ROR32() 277 vassert(i == 16); in fitsIn8x4() 284 vassert(hregClass(src) == HRcInt32); in mk_iMOVds_RR() 285 vassert(hregClass(dst) == HRcInt32); in mk_iMOVds_RR() 393 vassert(ARM_N_ARGREGS == 4); in doHelperCall() [all …]
|
D | host_x86_defs.c | 112 vassert(r >= 0 && r < 8); in ppHRegX86() 117 vassert(r >= 0 && r < 6); in ppHRegX86() 122 vassert(r >= 0 && r < 8); in ppHRegX86() 174 vassert(shift >= 0 && shift <= 3); in X86AMode_IRRS() 578 vassert(op != Xalu_MUL); in X86Instr_Alu32M() 631 vassert(op == Xsh_SHL || op == Xsh_SHR); in X86Instr_Sh3232() 648 vassert(regparms >= 0 && regparms <= 3); in X86Instr_Call() 649 vassert(is_sane_RetLoc(rloc)); in X86Instr_Call() 687 vassert(cond != Xcc_ALWAYS); in X86Instr_CMov32() 698 vassert(szSmall == 1 || szSmall == 2); in X86Instr_LoadEX() [all …]
|
D | host_ppc_isel.c | 304 vassert(tmp >= 0); in lookupIRTemp() 305 vassert(tmp < env->n_vregmap); in lookupIRTemp() 312 vassert(tmp >= 0); in lookupIRTempPair() 313 vassert(tmp < env->n_vregmap); in lookupIRTempPair() 314 vassert(! hregIsInvalid(env->vregmapMedLo[tmp])); in lookupIRTempPair() 323 vassert(!env->mode64); in lookupIRTempQuad() 324 vassert(tmp >= 0); in lookupIRTempQuad() 325 vassert(tmp < env->n_vregmap); in lookupIRTempQuad() 326 vassert(! hregIsInvalid(env->vregmapMedLo[tmp])); in lookupIRTempQuad() 515 vassert(hregClass(r_dst) == hregClass(r_src)); in mk_iMOVds_RR() [all …]
|
D | host_x86_isel.c | 195 vassert(tmp >= 0); in lookupIRTemp() 196 vassert(tmp < env->n_vregmap); in lookupIRTemp() 202 vassert(tmp >= 0); in lookupIRTemp64() 203 vassert(tmp < env->n_vregmap); in lookupIRTemp64() 204 vassert(! hregIsInvalid(env->vregmapHI[tmp])); in lookupIRTemp64() 291 vassert(hregClass(src) == HRcInt32); in mk_iMOVsd_RR() 292 vassert(hregClass(dst) == HRcInt32); in mk_iMOVsd_RR() 301 vassert(hregClass(src) == HRcVec128); in mk_vMOVsd_RR() 302 vassert(hregClass(dst) == HRcVec128); in mk_vMOVsd_RR() 310 vassert(n > 0 && n < 256 && (n%4) == 0); in add_to_esp() [all …]
|
D | guest_arm64_helpers.c | 101 vassert( ((UInt)(_cc_op)) < ARM64G_CC_OP_NUMBER); \ 102 vassert( ((UInt)(_cond)) < 16); \ 161 vassert((oldC & ~1) == 0); in arm64g_calculate_flag_n() 171 vassert((oldC & ~1) == 0); in arm64g_calculate_flag_n() 181 vassert((oldC & ~1) == 0); in arm64g_calculate_flag_n() 191 vassert((oldC & ~1) == 0); in arm64g_calculate_flag_n() 279 vassert((oldC & ~1) == 0); in arm64g_calculate_flag_z() 289 vassert((oldC & ~1) == 0); in arm64g_calculate_flag_z() 299 vassert((oldC & ~1) == 0); in arm64g_calculate_flag_z() 309 vassert((oldC & ~1) == 0); in arm64g_calculate_flag_z() [all …]
|
D | host_arm_defs.c | 154 vassert(r >= 0 && r < 16); in ppHRegARM() 159 vassert(r >= 0 && r < 32); in ppHRegARM() 164 vassert(r >= 0 && r < 32); in ppHRegARM() 169 vassert(r >= 0 && r < 16); in ppHRegARM() 210 vassert(-4095 <= simm13 && simm13 <= 4095); in ARMAMode1_RI() 219 vassert(0 <= shift && shift <= 3); in ARMAMode1_RRS() 238 vassert(0); in ppARMAMode1() 278 vassert(-255 <= simm9 && simm9 <= 255); in ARMAMode2_RI() 304 vassert(0); in ppARMAMode2() 341 vassert(simm11 >= -1020 && simm11 <= 1020); in mkARMAModeV() [all …]
|
D | host_amd64_isel.c | 170 vassert(tmp >= 0); in lookupIRTemp() 171 vassert(tmp < env->n_vregmap); in lookupIRTemp() 178 vassert(tmp >= 0); in lookupIRTempPair() 179 vassert(tmp < env->n_vregmap); in lookupIRTempPair() 180 vassert(! hregIsInvalid(env->vregmapHI[tmp])); in lookupIRTempPair() 312 vassert(hregClass(src) == HRcInt64); in mk_iMOVsd_RR() 313 vassert(hregClass(dst) == HRcInt64); in mk_iMOVsd_RR() 321 vassert(hregClass(src) == HRcVec128); in mk_vMOVsd_RR() 322 vassert(hregClass(dst) == HRcVec128); in mk_vMOVsd_RR() 330 vassert(n > 0 && n < 256 && (n%8) == 0); in add_to_rsp() [all …]
|
D | guest_arm_helpers.c | 98 vassert( ((UInt)(_cc_op)) < ARMG_CC_OP_NUMBER); \ 145 vassert((oldC & ~1) == 0); in armg_calculate_flag_n() 155 vassert((oldC & ~1) == 0); in armg_calculate_flag_n() 225 vassert((oldC & ~1) == 0); in armg_calculate_flag_z() 235 vassert((oldC & ~1) == 0); in armg_calculate_flag_z() 305 vassert((oldC & ~1) == 0); in armg_calculate_flag_c() 315 vassert((oldC & ~1) == 0); in armg_calculate_flag_c() 322 vassert((shco & ~1) == 0); in armg_calculate_flag_c() 329 vassert((cc_dep3 & ~3) == 0); in armg_calculate_flag_c() 336 vassert((cc_dep3 & ~3) == 0); in armg_calculate_flag_c() [all …]
|
D | host_tilegx_isel.c | 121 vassert(tmp >= 0); in lookupIRTemp() 122 vassert(tmp < env->n_vregmap); in lookupIRTemp() 184 vassert(hregClass(r_dst) == hregClass(r_src)); in mk_iMOVds_RR() 185 vassert(hregClass(r_src) == HRcInt32 || hregClass(r_src) == HRcInt64); in mk_iMOVds_RR() 303 vassert(argreg < TILEGX_N_REGPARMS); in doHelperCall() 304 vassert(typeOfIRExpr(env->type_env, args[i]) == Ity_I32 || in doHelperCall() 320 vassert(argreg < TILEGX_N_REGPARMS); in doHelperCall() 321 vassert(typeOfIRExpr(env->type_env, args[i]) == Ity_I32 in doHelperCall() 394 vassert(sane_AMode(env, am)); in iselWordExpr_AMode() 404 vassert(ty == Ity_I64); in iselWordExpr_AMode_wrk() [all …]
|