Lines Matching refs:vassert
154 vassert(r >= 0 && r < 16); in ppHRegARM()
159 vassert(r >= 0 && r < 32); in ppHRegARM()
164 vassert(r >= 0 && r < 32); in ppHRegARM()
169 vassert(r >= 0 && r < 16); in ppHRegARM()
210 vassert(-4095 <= simm13 && simm13 <= 4095); in ARMAMode1_RI()
219 vassert(0 <= shift && shift <= 3); in ARMAMode1_RRS()
238 vassert(0); in ppARMAMode1()
278 vassert(-255 <= simm9 && simm9 <= 255); in ARMAMode2_RI()
304 vassert(0); in ppARMAMode2()
341 vassert(simm11 >= -1020 && simm11 <= 1020); in mkARMAModeV()
342 vassert(0 == (simm11 & 3)); in mkARMAModeV()
416 vassert(sh >= 0 && sh < 32); in ROR32()
428 vassert(imm8 >= 0 && imm8 <= 255); in ARMRI84_I84()
429 vassert(imm4 >= 0 && imm4 <= 15); in ARMRI84_I84()
449 vassert(0); in ppARMRI84()
484 vassert(imm5 > 0 && imm5 <= 31); // zero is not allowed in ARMRI5_I5()
503 vassert(0); in ppARMRI5()
1144 vassert(cc != ARMcc_NV); in ARMInstr_LdSt32()
1157 vassert(cc != ARMcc_NV); in ARMInstr_LdSt16()
1168 vassert(cc != ARMcc_NV); in ARMInstr_LdSt8U()
1177 vassert(cc != ARMcc_NV); in ARMInstr_Ld8S()
1215 vassert(cond != ARMcc_AL); in ARMInstr_CMov()
1226 vassert(is_sane_RetLoc(rloc)); in ARMInstr_Call()
1239 vassert(szB == 8 || szB == 4 || szB == 2 || szB == 1); in ARMInstr_LdrEX()
1246 vassert(szB == 8 || szB == 4 || szB == 2 || szB == 1); in ARMInstr_StrEX()
1312 vassert(cond != ARMcc_AL); in ARMInstr_VCMovD()
1321 vassert(cond != ARMcc_AL); in ARMInstr_VCMovS()
1459 vassert(cond != ARMcc_AL); in ARMInstr_NCMovQ()
1484 vassert(amt >= 1 && amt <= 63); in ARMInstr_NShl64()
1500 vassert(i == 16); in fitsIn8x4()
1707 default: vassert(0); in ppARMInstr()
1718 default: vassert(0); in ppARMInstr()
2009 vassert(mode64 == False); in getRegUsage_ARMInstr()
2315 vassert(mode64 == False); in mapRegs_ARMInstr()
2560 vassert(offsetB >= 0); in genSpill_ARM()
2561 vassert(!hregIsVirtual(rreg)); in genSpill_ARM()
2562 vassert(mode64 == False); in genSpill_ARM()
2567 vassert(offsetB <= 4095); in genSpill_ARM()
2577 vassert(0 == (offsetB & 3)); in genSpill_ARM()
2586 vassert(offsetB <= 1020); in genSpill_ARM()
2615 vassert(offsetB >= 0); in genReload_ARM()
2616 vassert(!hregIsVirtual(rreg)); in genReload_ARM()
2617 vassert(mode64 == False); in genReload_ARM()
2622 vassert(offsetB <= 4095); in genReload_ARM()
2632 vassert(0 == (offsetB & 3)); in genReload_ARM()
2641 vassert(offsetB <= 1020); in genReload_ARM()
2674 vassert(hregClass(r) == HRcInt32); in iregEnc()
2675 vassert(!hregIsVirtual(r)); in iregEnc()
2677 vassert(n <= 15); in iregEnc()
2684 vassert(hregClass(r) == HRcFlt64); in dregEnc()
2685 vassert(!hregIsVirtual(r)); in dregEnc()
2687 vassert(n <= 31); in dregEnc()
2694 vassert(hregClass(r) == HRcFlt32); in fregEnc()
2695 vassert(!hregIsVirtual(r)); in fregEnc()
2697 vassert(n <= 31); in fregEnc()
2704 vassert(hregClass(r) == HRcVec128); in qregEnc()
2705 vassert(!hregIsVirtual(r)); in qregEnc()
2707 vassert(n <= 15); in qregEnc()
2767 vassert(0 == (ri->ARMri84.I84.imm4 & ~0x0F)); in skeletal_RI84()
2768 vassert(0 == (ri->ARMri84.I84.imm8 & ~0xFF)); in skeletal_RI84()
2786 vassert(imm5 >= 1 && imm5 <= 31); in skeletal_RI5()
2802 vassert(rD >= 0 && rD <= 14); // r15 not good to mess with! in imm32_to_ireg()
2897 vassert(0); /* lose */ in imm32_to_ireg_EXACTLY2()
2919 vassert(0); /* lose */ in is_imm32_to_ireg_EXACTLY2()
2927 vassert(rD <= 12); in do_load_or_store32()
2928 vassert(am->tag == ARMam1_RI); // RR case is not handled in do_load_or_store32()
2940 vassert(simm12 >= 0 && simm12 <= 4095); in do_load_or_store32()
2965 vassert(nbuf >= 32); in emit_ARMInstr()
2966 vassert(mode64 == False); in emit_ARMInstr()
2967 vassert(0 == (((HWord)buf) & 3)); in emit_ARMInstr()
3086 vassert(cc != ARMcc_NV); in emit_ARMInstr()
3097 vassert(simm12 >= 0 && simm12 <= 4095); in emit_ARMInstr()
3115 vassert(cc != ARMcc_NV); in emit_ARMInstr()
3127 vassert(simm8 >= 0 && simm8 <= 255); in emit_ARMInstr()
3130 vassert(!(bL == 0 && bS == 1)); // "! signed store" in emit_ARMInstr()
3152 else vassert(0); // ill-constructed insn in emit_ARMInstr()
3162 vassert(cc != ARMcc_NV); in emit_ARMInstr()
3174 vassert(simm8 >= 0 && simm8 <= 255); in emit_ARMInstr()
3194 vassert(disp_cp_chain_me_to_slowEP != NULL); in emit_ARMInstr()
3195 vassert(disp_cp_chain_me_to_fastEP != NULL); in emit_ARMInstr()
3204 vassert(i->ARMin.XDirect.cond != ARMcc_NV); in emit_ARMInstr()
3236 vassert(delta > 0 && delta < 40); in emit_ARMInstr()
3237 vassert((delta & 3) == 0); in emit_ARMInstr()
3239 vassert(notCond <= 13); /* Neither AL nor NV */ in emit_ARMInstr()
3253 vassert(disp_cp_xindir != NULL); in emit_ARMInstr()
3262 vassert(i->ARMin.XIndir.cond != ARMcc_NV); in emit_ARMInstr()
3282 vassert(delta > 0 && delta < 40); in emit_ARMInstr()
3283 vassert((delta & 3) == 0); in emit_ARMInstr()
3285 vassert(notCond <= 13); /* Neither AL nor NV */ in emit_ARMInstr()
3300 vassert(i->ARMin.XAssisted.cond != ARMcc_NV); in emit_ARMInstr()
3334 vassert(trcval != 0); in emit_ARMInstr()
3346 vassert(delta > 0 && delta < 40); in emit_ARMInstr()
3347 vassert((delta & 3) == 0); in emit_ARMInstr()
3349 vassert(notCond <= 13); /* Neither AL nor NV */ in emit_ARMInstr()
3378 default: vassert(0); in emit_ARMInstr()
3442 vassert(0); //ATC in emit_ARMInstr()
3448 vassert(0); in emit_ARMInstr()
3469 default: vassert(0); in emit_ARMInstr()
3511 vassert(0 == (off8 & 3)); in emit_ARMInstr()
3513 vassert(0 == (off8 & 0xFFFFFF00)); in emit_ARMInstr()
3528 vassert(0 == (off8 & 3)); in emit_ARMInstr()
3530 vassert(0 == (off8 & 0xFFFFFF00)); in emit_ARMInstr()
3548 vassert(pqrs != X1111); in emit_ARMInstr()
3573 vassert(pqrs != X1111); in emit_ARMInstr()
3650 vassert(cc < 16 && cc != ARMcc_AL); in emit_ARMInstr()
3659 vassert(cc < 16 && cc != ARMcc_AL); in emit_ARMInstr()
3760 vassert(0); in emit_ARMInstr()
3795 vassert(hregClass(i->ARMin.NLdStQ.dQ) == HRcVec128); in emit_ARMInstr()
3815 vassert(hregClass(i->ARMin.NLdStD.dD) == HRcFlt64); in emit_ARMInstr()
4075 vassert(sz1 + sz2 < 2); in emit_ARMInstr()
4511 vassert(amt >= 1 && amt <= 63); in emit_ARMInstr()
4512 vassert(hregClass(regDreg) == HRcFlt64); in emit_ARMInstr()
4513 vassert(hregClass(regMreg) == HRcFlt64); in emit_ARMInstr()
4583 vassert(cc < 16 && cc != ARMcc_AL && cc != ARMcc_NV); in emit_ARMInstr()
4597 vassert(regD != regN); in emit_ARMInstr()
4629 vassert(evCheckSzB_ARM() == (UChar*)p - (UChar*)p0); in emit_ARMInstr()
4656 vassert(!(*is_profInc)); in emit_ARMInstr()
4672 vassert(((UChar*)p) - &buf[0] <= 32); in emit_ARMInstr()
4693 vassert(endness_host == VexEndnessLE); in chainXDirect_ARM()
4704 vassert(0 == (3 & (HWord)p)); in chainXDirect_ARM()
4705 vassert(is_imm32_to_ireg_EXACTLY2( in chainXDirect_ARM()
4707 vassert(p[2] == 0xE12FFF3C); in chainXDirect_ARM()
4738 vassert(0 == (delta & (Long)3)); in chainXDirect_ARM()
4754 vassert(simm24 == ((simm24 << 8) >> 8)); in chainXDirect_ARM()
4776 vassert(endness_host == VexEndnessLE); in unchainXDirect_ARM()
4795 vassert(0 == (3 & (HWord)p)); in unchainXDirect_ARM()
4815 vassert(valid); in unchainXDirect_ARM()
4839 vassert(endness_host == VexEndnessLE); in patchProfInc_ARM()
4840 vassert(sizeof(ULong*) == 4); in patchProfInc_ARM()
4842 vassert(0 == (3 & (HWord)p)); in patchProfInc_ARM()
4843 vassert(is_imm32_to_ireg_EXACTLY2(p, /*r*/12, 0x65556555)); in patchProfInc_ARM()
4844 vassert(p[2] == 0xE59CB000); in patchProfInc_ARM()
4845 vassert(p[3] == 0xE29BB001); in patchProfInc_ARM()
4846 vassert(p[4] == 0xE58CB000); in patchProfInc_ARM()
4847 vassert(p[5] == 0xE59CB004); in patchProfInc_ARM()
4848 vassert(p[6] == 0xE2ABB000); in patchProfInc_ARM()
4849 vassert(p[7] == 0xE58CB004); in patchProfInc_ARM()