Lines Matching refs:vassert
156 vassert(r >= 0 && r < 31); in ppHRegARM64()
161 vassert(r >= 0 && r < 32); in ppHRegARM64()
166 vassert(r >= 0 && r < 32); in ppHRegARM64()
217 vassert(-256 <= simm9 && simm9 <= 255); in ARM64AMode_RI9()
227 vassert(uimm12 >= 0 && uimm12 <= 4095); in ARM64AMode_RI12()
230 default: vassert(0); in ARM64AMode_RI12()
264 vassert(0); in ppARM64AMode()
310 vassert(imm12 < 4096); in ARM64RIA_I12()
311 vassert(shift == 0 || shift == 12); in ARM64RIA_I12()
331 vassert(0); in ppARM64RIA()
368 vassert(bitN < 2); in ARM64RIL_I13()
369 vassert(immR < 64); in ARM64RIL_I13()
370 vassert(immS < 64); in ARM64RIL_I13()
392 vassert(0); in ppARM64RIL()
427 vassert(imm6 > 0 && imm6 < 64); in ARM64RI6_I6()
446 vassert(0); in ppARM64RI6()
877 vassert(hregClass(src) == HRcInt64); in ARM64Instr_MovI()
878 vassert(hregClass(dst) == HRcInt64); in ARM64Instr_MovI()
967 vassert(is_sane_RetLoc(rloc)); in ARM64Instr_Call()
974 vassert(-4096 < simm && simm < 4096); in ARM64Instr_AddToSP()
975 vassert(0 == (simm & 0xF)); in ARM64Instr_AddToSP()
998 vassert(szB == 8 || szB == 4 || szB == 2 || szB == 1); in ARM64Instr_LdrEX()
1005 vassert(szB == 8 || szB == 4 || szB == 2 || szB == 1); in ARM64Instr_StrEX()
1020 vassert(uimm12 < 8192 && 0 == (uimm12 & 1)); in ARM64Instr_VLdStH()
1030 vassert(uimm12 < 16384 && 0 == (uimm12 & 3)); in ARM64Instr_VLdStS()
1040 vassert(uimm12 < 32768 && 0 == (uimm12 & 7)); in ARM64Instr_VLdStD()
1067 vassert(armRM <= 3); in ARM64Instr_VCvtF2I()
1203 vassert(dszBlg2 == 0 || dszBlg2 == 1 || dszBlg2 == 2); in ARM64Instr_VNarrowV()
1257 vassert(0); in ARM64Instr_VShiftImmV()
1259 vassert(maxSh > 0); in ARM64Instr_VShiftImmV()
1260 vassert(amt >= minSh && amt <= maxSh); in ARM64Instr_VShiftImmV()
1270 vassert(amtB >= 1 && amtB <= 15); in ARM64Instr_VExtV()
1284 vassert(0); in ARM64Instr_VImmQ()
1316 vassert(laneNo <= 1); in ARM64Instr_VXfromQ()
1335 vassert(hregClass(src) == HRcVec128); in ARM64Instr_VMov()
1336 vassert(hregClass(dst) == HRcVec128); in ARM64Instr_VMov()
1339 vassert(hregClass(src) == HRcFlt64); in ARM64Instr_VMov()
1340 vassert(hregClass(dst) == HRcFlt64); in ARM64Instr_VMov()
1549 default: vassert(0); in ppARM64Instr()
1561 default: vassert(0); in ppARM64Instr()
1906 vassert(mode64 == True); in getRegUsage_ARM64Instr()
2233 vassert(mode64 == True); in mapRegs_ARM64Instr()
2508 vassert(offsetB >= 0); in genSpill_ARM64()
2509 vassert(!hregIsVirtual(rreg)); in genSpill_ARM64()
2510 vassert(mode64 == True); in genSpill_ARM64()
2515 vassert(0 == (offsetB & 7)); in genSpill_ARM64()
2517 vassert(offsetB < 4096); in genSpill_ARM64()
2525 vassert(0 == (offsetB & 7)); in genSpill_ARM64()
2526 vassert(offsetB >= 0 && offsetB < 32768); in genSpill_ARM64()
2533 vassert(0 == (offsetB & 15)); // check sane alignment in genSpill_ARM64()
2534 vassert(offsetB < 4096); in genSpill_ARM64()
2549 vassert(offsetB >= 0); in genReload_ARM64()
2550 vassert(!hregIsVirtual(rreg)); in genReload_ARM64()
2551 vassert(mode64 == True); in genReload_ARM64()
2556 vassert(0 == (offsetB & 7)); in genReload_ARM64()
2558 vassert(offsetB < 4096); in genReload_ARM64()
2566 vassert(0 == (offsetB & 7)); in genReload_ARM64()
2567 vassert(offsetB >= 0 && offsetB < 32768); in genReload_ARM64()
2574 vassert(0 == (offsetB & 15)); // check sane alignment in genReload_ARM64()
2575 vassert(offsetB < 4096); in genReload_ARM64()
2594 vassert(hregClass(r) == HRcInt64); in iregEnc()
2595 vassert(!hregIsVirtual(r)); in iregEnc()
2597 vassert(n <= 30); in iregEnc()
2604 vassert(hregClass(r) == HRcFlt64); in dregEnc()
2605 vassert(!hregIsVirtual(r)); in dregEnc()
2607 vassert(n <= 31); in dregEnc()
2614 vassert(hregClass(r) == HRcVec128); in qregEnc()
2615 vassert(!hregIsVirtual(r)); in qregEnc()
2617 vassert(n <= 31); in qregEnc()
2746 vassert(8+19+1+4 == 32); in X_8_19_1_4()
2747 vassert(f1 < (1<<8)); in X_8_19_1_4()
2748 vassert(f2 < (1<<19)); in X_8_19_1_4()
2749 vassert(f3 < (1<<1)); in X_8_19_1_4()
2750 vassert(f4 < (1<<4)); in X_8_19_1_4()
2763 vassert(3+6+2+16+5 == 32); in X_3_6_2_16_5()
2764 vassert(f1 < (1<<3)); in X_3_6_2_16_5()
2765 vassert(f2 < (1<<6)); in X_3_6_2_16_5()
2766 vassert(f3 < (1<<2)); in X_3_6_2_16_5()
2767 vassert(f4 < (1<<16)); in X_3_6_2_16_5()
2768 vassert(f5 < (1<<5)); in X_3_6_2_16_5()
2782 vassert(2+6+2+12+5+5 == 32); in X_2_6_2_12_5_5()
2783 vassert(f1 < (1<<2)); in X_2_6_2_12_5_5()
2784 vassert(f2 < (1<<6)); in X_2_6_2_12_5_5()
2785 vassert(f3 < (1<<2)); in X_2_6_2_12_5_5()
2786 vassert(f4 < (1<<12)); in X_2_6_2_12_5_5()
2787 vassert(f5 < (1<<5)); in X_2_6_2_12_5_5()
2788 vassert(f6 < (1<<5)); in X_2_6_2_12_5_5()
2801 vassert(3+8+5+6+5+5 == 32); in X_3_8_5_6_5_5()
2802 vassert(f1 < (1<<3)); in X_3_8_5_6_5_5()
2803 vassert(f2 < (1<<8)); in X_3_8_5_6_5_5()
2804 vassert(f3 < (1<<5)); in X_3_8_5_6_5_5()
2805 vassert(f4 < (1<<6)); in X_3_8_5_6_5_5()
2806 vassert(f5 < (1<<5)); in X_3_8_5_6_5_5()
2807 vassert(f6 < (1<<5)); in X_3_8_5_6_5_5()
2820 vassert(3+8+5+6+5+5 == 32); in X_3_5_8_6_5_5()
2821 vassert(f1 < (1<<3)); in X_3_5_8_6_5_5()
2822 vassert(f2 < (1<<5)); in X_3_5_8_6_5_5()
2823 vassert(f3 < (1<<8)); in X_3_5_8_6_5_5()
2824 vassert(f4 < (1<<6)); in X_3_5_8_6_5_5()
2825 vassert(f5 < (1<<5)); in X_3_5_8_6_5_5()
2826 vassert(f6 < (1<<5)); in X_3_5_8_6_5_5()
2839 vassert(3+6+7+6+5+5 == 32); in X_3_6_7_6_5_5()
2840 vassert(f1 < (1<<3)); in X_3_6_7_6_5_5()
2841 vassert(f2 < (1<<6)); in X_3_6_7_6_5_5()
2842 vassert(f3 < (1<<7)); in X_3_6_7_6_5_5()
2843 vassert(f4 < (1<<6)); in X_3_6_7_6_5_5()
2844 vassert(f5 < (1<<5)); in X_3_6_7_6_5_5()
2845 vassert(f6 < (1<<5)); in X_3_6_7_6_5_5()
2860 vassert(2+6+3+9+2+5+5 == 32); in X_2_6_3_9_2_5_5()
2861 vassert(f1 < (1<<2)); in X_2_6_3_9_2_5_5()
2862 vassert(f2 < (1<<6)); in X_2_6_3_9_2_5_5()
2863 vassert(f3 < (1<<3)); in X_2_6_3_9_2_5_5()
2864 vassert(f4 < (1<<9)); in X_2_6_3_9_2_5_5()
2865 vassert(f5 < (1<<2)); in X_2_6_3_9_2_5_5()
2866 vassert(f6 < (1<<5)); in X_2_6_3_9_2_5_5()
2867 vassert(f7 < (1<<5)); in X_2_6_3_9_2_5_5()
2881 vassert(3+6+1+6+6+5+5 == 32); in X_3_6_1_6_6_5_5()
2882 vassert(f1 < (1<<3)); in X_3_6_1_6_6_5_5()
2883 vassert(f2 < (1<<6)); in X_3_6_1_6_6_5_5()
2884 vassert(f3 < (1<<1)); in X_3_6_1_6_6_5_5()
2885 vassert(f4 < (1<<6)); in X_3_6_1_6_6_5_5()
2886 vassert(f5 < (1<<6)); in X_3_6_1_6_6_5_5()
2887 vassert(f6 < (1<<5)); in X_3_6_1_6_6_5_5()
2888 vassert(f7 < (1<<5)); in X_3_6_1_6_6_5_5()
2974 vassert(i < 4); in imm64_to_ireg()
3051 vassert(wD <= 30); in do_load_or_store8()
3057 vassert(-256 <= simm9 && simm9 <= 255); in do_load_or_store8()
3070 vassert(scale == 1); /* failure of this is serious. Do not ignore. */ in do_load_or_store8()
3072 vassert(xN <= 30); in do_load_or_store8()
3084 vassert(xN <= 30); in do_load_or_store8()
3091 vassert(0); in do_load_or_store8()
3100 vassert(wD <= 30); in do_load_or_store16()
3106 vassert(-256 <= simm9 && simm9 <= 255); in do_load_or_store16()
3119 vassert(scale == 2); /* failure of this is serious. Do not ignore. */ in do_load_or_store16()
3121 vassert(xN <= 30); in do_load_or_store16()
3133 vassert(xN <= 30); in do_load_or_store16()
3140 vassert(0); in do_load_or_store16()
3149 vassert(wD <= 30); in do_load_or_store32()
3155 vassert(-256 <= simm9 && simm9 <= 255); in do_load_or_store32()
3168 vassert(scale == 4); /* failure of this is serious. Do not ignore. */ in do_load_or_store32()
3170 vassert(xN <= 30); in do_load_or_store32()
3182 vassert(xN <= 30); in do_load_or_store32()
3189 vassert(0); in do_load_or_store32()
3199 vassert(xD <= 30); in do_load_or_store64()
3205 vassert(-256 <= simm9 && simm9 <= 255); in do_load_or_store64()
3207 vassert(xN <= 30); in do_load_or_store64()
3219 vassert(scale == 8); /* failure of this is serious. Do not ignore. */ in do_load_or_store64()
3221 vassert(xN <= 30); in do_load_or_store64()
3233 vassert(xN <= 30); in do_load_or_store64()
3240 vassert(0); in do_load_or_store64()
3259 vassert(nbuf >= 32); in emit_ARM64Instr()
3260 vassert(mode64 == True); in emit_ARM64Instr()
3261 vassert(0 == (((HWord)buf) & 3)); in emit_ARM64Instr()
3322 vassert(rD < 31); in emit_ARM64Instr()
3323 vassert(rN < 31); in emit_ARM64Instr()
3330 vassert(opc != 0); in emit_ARM64Instr()
3348 vassert(rM < 31); in emit_ARM64Instr()
3380 vassert(rD < 31); in emit_ARM64Instr()
3381 vassert(rN < 31); in emit_ARM64Instr()
3388 vassert(sh > 0 && sh < 64); in emit_ARM64Instr()
3401 vassert(0); in emit_ARM64Instr()
3410 vassert(rM < 31); in emit_ARM64Instr()
3416 default: vassert(0); in emit_ARM64Instr()
3422 vassert(0); in emit_ARM64Instr()
3499 vassert(disp_cp_chain_me_to_slowEP != NULL); in emit_ARM64Instr()
3500 vassert(disp_cp_chain_me_to_fastEP != NULL); in emit_ARM64Instr()
3509 vassert(i->ARM64in.XDirect.cond != ARM64cc_NV); in emit_ARM64Instr()
3541 vassert(delta > 0 && delta < 40); in emit_ARM64Instr()
3542 vassert((delta & 3) == 0); in emit_ARM64Instr()
3544 vassert(notCond <= 13); /* Neither AL nor NV */ in emit_ARM64Instr()
3545 vassert(ptmp != NULL); in emit_ARM64Instr()
3563 vassert(0); //ATC in emit_ARM64Instr()
3582 vassert(0); //ATC in emit_ARM64Instr()
3603 vassert(i->ARM64in.XDirect.cond != ARM64cc_NV); in emit_ARM64Instr()
3639 vassert(trcval != 0); in emit_ARM64Instr()
3650 vassert(delta > 0 && delta < 40); in emit_ARM64Instr()
3651 vassert((delta & 3) == 0); in emit_ARM64Instr()
3653 vassert(notCond <= 13); /* Neither AL nor NV */ in emit_ARM64Instr()
3654 vassert(ptmp != NULL); in emit_ARM64Instr()
3667 vassert(dd < 31 && nn < 31 && mm < 31 && cond < 16); in emit_ARM64Instr()
3705 vassert(dist >= 2 && dist <= 5); in emit_ARM64Instr()
3706 vassert(ptmp != NULL); in emit_ARM64Instr()
3711 vassert(ptmp == NULL); in emit_ARM64Instr()
3722 vassert(-4096 < simm12 && simm12 < 4096); in emit_ARM64Instr()
3723 vassert(0 == (simm12 & 0xF)); in emit_ARM64Instr()
3735 vassert(dd < 31); in emit_ARM64Instr()
3748 vassert(dd < 31 && nn < 31 && mm < 31); in emit_ARM64Instr()
3760 vassert(0); in emit_ARM64Instr()
3814 vassert(uimm12 < 8192 && 0 == (uimm12 & 1)); in emit_ARM64Instr()
3816 vassert(uimm12 < (1<<12)); in emit_ARM64Instr()
3817 vassert(hD < 32); in emit_ARM64Instr()
3818 vassert(rN < 31); in emit_ARM64Instr()
3831 vassert(uimm12 < 16384 && 0 == (uimm12 & 3)); in emit_ARM64Instr()
3833 vassert(uimm12 < (1<<12)); in emit_ARM64Instr()
3834 vassert(sD < 32); in emit_ARM64Instr()
3835 vassert(rN < 31); in emit_ARM64Instr()
3848 vassert(uimm12 < 32768 && 0 == (uimm12 & 7)); in emit_ARM64Instr()
3850 vassert(uimm12 < (1<<12)); in emit_ARM64Instr()
3851 vassert(dD < 32); in emit_ARM64Instr()
3852 vassert(rN < 31); in emit_ARM64Instr()
3863 vassert(rQ < 32); in emit_ARM64Instr()
3864 vassert(rN < 31); in emit_ARM64Instr()
4111 vassert(b1512 < 16); in emit_ARM64Instr()
4134 vassert(b1512 < 16); in emit_ARM64Instr()
4163 vassert(cond < 16); in emit_ARM64Instr()
5000 vassert(dszBlg2 >= 0 && dszBlg2 <= 2); in emit_ARM64Instr()
5198 vassert(imm4 >= 1 && imm4 <= 15); in emit_ARM64Instr()
5206 vassert(rQ < 32); in emit_ARM64Instr()
5250 vassert(xx < 31); in emit_ARM64Instr()
5262 vassert(xx < 31); in emit_ARM64Instr()
5276 vassert(xhi < 31 && xlo < 31); in emit_ARM64Instr()
5289 vassert(dd < 31); in emit_ARM64Instr()
5290 vassert(laneNo < 2); in emit_ARM64Instr()
5303 vassert(dd < 31); in emit_ARM64Instr()
5361 vassert(evCheckSzB_ARM64() == (UChar*)p - (UChar*)p0); in emit_ARM64Instr()
5381 vassert(!(*is_profInc)); in emit_ARM64Instr()
5397 vassert(((UChar*)p) - &buf[0] <= 36); in emit_ARM64Instr()
5418 vassert(endness_host == VexEndnessLE); in chainXDirect_ARM64()
5431 vassert(0 == (3 & (HWord)p)); in chainXDirect_ARM64()
5432 vassert(is_imm64_to_ireg_EXACTLY4( in chainXDirect_ARM64()
5434 vassert(p[4] == 0xD63F0120); in chainXDirect_ARM64()
5463 vassert(endness_host == VexEndnessLE); in unchainXDirect_ARM64()
5476 vassert(0 == (3 & (HWord)p)); in unchainXDirect_ARM64()
5477 vassert(is_imm64_to_ireg_EXACTLY4( in unchainXDirect_ARM64()
5479 vassert(p[4] == 0xD61F0120); in unchainXDirect_ARM64()
5505 vassert(sizeof(ULong*) == 8); in patchProfInc_ARM64()
5506 vassert(endness_host == VexEndnessLE); in patchProfInc_ARM64()
5508 vassert(0 == (3 & (HWord)p)); in patchProfInc_ARM64()
5509 vassert(is_imm64_to_ireg_EXACTLY4(p, /*x*/9, 0x6555755585559566ULL)); in patchProfInc_ARM64()
5510 vassert(p[4] == 0xF9400128); in patchProfInc_ARM64()
5511 vassert(p[5] == 0x91000508); in patchProfInc_ARM64()
5512 vassert(p[6] == 0xF9000128); in patchProfInc_ARM64()