Lines Matching refs:vuc
8 vector unsigned char vuc = { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16 }; variable
104 res_vuc = vec_add(vuc, vuc); in test1()
108 res_vuc = vec_add(vbc, vuc); in test1()
112 res_vuc = vec_add(vuc, vbc); in test1()
180 res_vuc = vec_vaddubm(vuc, vuc); in test1()
184 res_vuc = vec_vaddubm(vbc, vuc); in test1()
188 res_vuc = vec_vaddubm(vuc, vbc); in test1()
266 res_vuc = vec_adds(vuc, vuc); in test1()
270 res_vuc = vec_adds(vbc, vuc); in test1()
274 res_vuc = vec_adds(vuc, vbc); in test1()
338 res_vuc = vec_vaddubs(vuc, vuc); in test1()
342 res_vuc = vec_vaddubs(vbc, vuc); in test1()
346 res_vuc = vec_vaddubs(vuc, vbc); in test1()
411 res_vuc = vec_and(vuc, vuc); in test1()
415 res_vuc = vec_and(vbc, vuc); in test1()
419 res_vuc = vec_and(vuc, vbc); in test1()
495 res_vuc = vec_vand(vuc, vuc); in test1()
499 res_vuc = vec_vand(vbc, vuc); in test1()
503 res_vuc = vec_vand(vuc, vbc); in test1()
586 res_vuc = vec_andc(vuc, vuc); in test1()
592 res_vuc = vec_andc(vbc, vuc); in test1()
598 res_vuc = vec_andc(vuc, vbc); in test1()
722 res_vuc = vec_vandc(vuc, vuc); in test1()
728 res_vuc = vec_vandc(vbc, vuc); in test1()
734 res_vuc = vec_vandc(vuc, vbc); in test1()
851 res_vuc = vec_avg(vuc, vuc); in test2()
875 res_vuc = vec_vavgub(vuc, vuc); in test2()
918 res_vbc = vec_cmpeq(vuc, vuc); in test2()
960 res_vbc = vec_cmpgt(vuc, vuc); in test5()
988 res_vbc = vec_vcmpgtub(vuc, vuc); in test5()
1025 res_vbc = vec_cmplt(vuc, vuc); in test6()
1141 res_vuc = vec_ld(0, &vuc); in test6()
1213 res_vuc = vec_lvx(0, &vuc); in test6()
1343 res_vuc = vec_ldl(0, &vuc); in test6()
1415 res_vuc = vec_lvxl(0, &vuc); in test6()
1529 res_vuc = vec_max(vuc, vuc); in test6()
1533 res_vuc = vec_max(vbc, vuc); in test6()
1537 res_vuc = vec_max(vuc, vbc); in test6()
1605 res_vuc = vec_vmaxub(vuc, vuc); in test6()
1609 res_vuc = vec_vmaxub(vbc, vuc); in test6()
1613 res_vuc = vec_vmaxub(vuc, vbc); in test6()
1674 res_vuc = vec_mergeh(vuc, vuc); in test6()
1718 res_vuc = vec_vmrghb(vuc, vuc); in test6()
1763 res_vuc = vec_mergel(vuc, vuc); in test6()
1807 res_vuc = vec_vmrglb(vuc, vuc); in test6()
1865 res_vuc = vec_min(vuc, vuc); in test6()
1869 res_vuc = vec_min(vbc, vuc); in test6()
1873 res_vuc = vec_min(vuc, vbc); in test6()
1941 res_vuc = vec_vminub(vuc, vuc); in test6()
1945 res_vuc = vec_vminub(vbc, vuc); in test6()
1949 res_vuc = vec_vminub(vuc, vbc); in test6()
2040 res_vi = vec_msum(vsc, vuc, vi); in test6()
2044 res_vui = vec_msum(vuc, vuc, vui); in test6()
2056 res_vi = vec_vmsummbm(vsc, vuc, vi); in test6()
2060 res_vui = vec_vmsumubm(vuc, vuc, vui); in test6()
2094 vec_mtvscr(vuc); in test6()
2135 res_vus = vec_mule(vuc, vuc); in test6()
2151 res_vus = vec_vmuleub(vuc, vuc); in test6()
2168 res_vus = vec_mulo(vuc, vuc); in test6()
2184 res_vus = vec_vmuloub(vuc, vuc); in test6()
2212 res_vuc = vec_nor(vuc, vuc); in test6()
2272 res_vuc = vec_vnor(vuc, vuc); in test6()
2339 res_vuc = vec_or(vuc, vuc); in test6()
2343 res_vuc = vec_or(vbc, vuc); in test6()
2347 res_vuc = vec_or(vuc, vbc); in test6()
2435 res_vuc = vec_vor(vuc, vuc); in test6()
2439 res_vuc = vec_vor(vbc, vuc); in test6()
2443 res_vuc = vec_vor(vuc, vbc); in test6()
2644 res_vsc = vec_perm(vsc, vsc, vuc); in test6()
2648 res_vuc = vec_perm(vuc, vuc, vuc); in test6()
2652 res_vbc = vec_perm(vbc, vbc, vuc); in test6()
2656 res_vs = vec_perm(vs, vs, vuc); in test6()
2660 res_vus = vec_perm(vus, vus, vuc); in test6()
2664 res_vbs = vec_perm(vbs, vbs, vuc); in test6()
2668 res_vp = vec_perm(vp, vp, vuc); in test6()
2672 res_vi = vec_perm(vi, vi, vuc); in test6()
2676 res_vui = vec_perm(vui, vui, vuc); in test6()
2680 res_vbi = vec_perm(vbi, vbi, vuc); in test6()
2684 res_vf = vec_perm(vf, vf, vuc); in test6()
2688 res_vsc = vec_vperm(vsc, vsc, vuc); in test6()
2692 res_vuc = vec_vperm(vuc, vuc, vuc); in test6()
2696 res_vbc = vec_vperm(vbc, vbc, vuc); in test6()
2700 res_vs = vec_vperm(vs, vs, vuc); in test6()
2704 res_vus = vec_vperm(vus, vus, vuc); in test6()
2708 res_vbs = vec_vperm(vbs, vbs, vuc); in test6()
2712 res_vp = vec_vperm(vp, vp, vuc); in test6()
2716 res_vi = vec_vperm(vi, vi, vuc); in test6()
2720 res_vui = vec_vperm(vui, vui, vuc); in test6()
2724 res_vbi = vec_vperm(vbi, vbi, vuc); in test6()
2728 res_vf = vec_vperm(vf, vf, vuc); in test6()
2742 res_vsc = vec_rl(vsc, vuc); in test6()
2746 res_vuc = vec_rl(vuc, vuc); in test6()
2766 res_vsc = vec_vrlb(vsc, vuc); in test6()
2770 res_vuc = vec_vrlb(vuc, vuc); in test6()
2809 res_vsc = vec_sel(vsc, vsc, vuc); in test6()
2829 res_vuc = vec_sel(vuc, vuc, vuc); in test6()
2839 res_vuc = vec_sel(vuc, vuc, vbc); in test6()
2849 res_vbc = vec_sel(vbc, vbc, vuc); in test6()
3009 res_vsc = vec_vsel(vsc, vsc, vuc); in test6()
3029 res_vuc = vec_vsel(vuc, vuc, vuc); in test6()
3039 res_vuc = vec_vsel(vuc, vuc, vbc); in test6()
3049 res_vbc = vec_vsel(vbc, vbc, vuc); in test6()
3210 res_vsc = vec_sl(vsc, vuc); in test6()
3214 res_vuc = vec_sl(vuc, vuc); in test6()
3234 res_vsc = vec_vslb(vsc, vuc); in test6()
3238 res_vuc = vec_vslb(vuc, vuc); in test6()
3263 res_vuc = vec_sld(vuc, vuc, 0); in test6()
3295 res_vuc = vec_vsldoi(vuc, vuc, 0); in test6()
3324 res_vsc = vec_sll(vsc, vuc); in test6()
3336 res_vuc = vec_sll(vuc, vuc); in test6()
3340 res_vuc = vec_sll(vuc, vus); in test6()
3344 res_vuc = vec_sll(vuc, vui); in test6()
3348 res_vbc = vec_sll(vbc, vuc); in test6()
3360 res_vs = vec_sll(vs, vuc); in test6()
3372 res_vus = vec_sll(vus, vuc); in test6()
3384 res_vbs = vec_sll(vbs, vuc); in test6()
3396 res_vp = vec_sll(vp, vuc); in test6()
3408 res_vi = vec_sll(vi, vuc); in test6()
3420 res_vui = vec_sll(vui, vuc); in test6()
3432 res_vbi = vec_sll(vbi, vuc); in test6()
3444 res_vsc = vec_vsl(vsc, vuc); in test6()
3456 res_vuc = vec_vsl(vuc, vuc); in test6()
3460 res_vuc = vec_vsl(vuc, vus); in test6()
3464 res_vuc = vec_vsl(vuc, vui); in test6()
3468 res_vbc = vec_vsl(vbc, vuc); in test6()
3480 res_vs = vec_vsl(vs, vuc); in test6()
3492 res_vus = vec_vsl(vus, vuc); in test6()
3504 res_vbs = vec_vsl(vbs, vuc); in test6()
3516 res_vp = vec_vsl(vp, vuc); in test6()
3528 res_vi = vec_vsl(vi, vuc); in test6()
3540 res_vui = vec_vsl(vui, vuc); in test6()
3552 res_vbi = vec_vsl(vbi, vuc); in test6()
3569 res_vsc = vec_slo(vsc, vuc); in test6()
3573 res_vuc = vec_slo(vuc, vsc); in test6()
3577 res_vuc = vec_slo(vuc, vuc); in test6()
3585 res_vs = vec_slo(vs, vuc); in test6()
3593 res_vus = vec_slo(vus, vuc); in test6()
3601 res_vp = vec_slo(vp, vuc); in test6()
3609 res_vi = vec_slo(vi, vuc); in test6()
3617 res_vui = vec_slo(vui, vuc); in test6()
3625 res_vf = vec_slo(vf, vuc); in test6()
3633 res_vsc = vec_vslo(vsc, vuc); in test6()
3637 res_vuc = vec_vslo(vuc, vsc); in test6()
3641 res_vuc = vec_vslo(vuc, vuc); in test6()
3649 res_vs = vec_vslo(vs, vuc); in test6()
3657 res_vus = vec_vslo(vus, vuc); in test6()
3665 res_vp = vec_vslo(vp, vuc); in test6()
3673 res_vi = vec_vslo(vi, vuc); in test6()
3681 res_vui = vec_vslo(vui, vuc); in test6()
3689 res_vf = vec_vslo(vf, vuc); in test6()
3698 res_vuc = vec_splat(vuc, 0); in test6()
3742 res_vuc = vec_vspltb(vuc, 0); in test6()
3804 res_vsc = vec_sr(vsc, vuc); in test6()
3808 res_vuc = vec_sr(vuc, vuc); in test6()
3828 res_vsc = vec_vsrb(vsc, vuc); in test6()
3832 res_vuc = vec_vsrb(vuc, vuc); in test6()
3853 res_vsc = vec_sra(vsc, vuc); in test6()
3857 res_vuc = vec_sra(vuc, vuc); in test6()
3877 res_vsc = vec_vsrab(vsc, vuc); in test6()
3881 res_vuc = vec_vsrab(vuc, vuc); in test6()
3902 res_vsc = vec_srl(vsc, vuc); in test6()
3914 res_vuc = vec_srl(vuc, vuc); in test6()
3918 res_vuc = vec_srl(vuc, vus); in test6()
3922 res_vuc = vec_srl(vuc, vui); in test6()
3926 res_vbc = vec_srl(vbc, vuc); in test6()
3938 res_vs = vec_srl(vs, vuc); in test6()
3950 res_vus = vec_srl(vus, vuc); in test6()
3962 res_vbs = vec_srl(vbs, vuc); in test6()
3974 res_vp = vec_srl(vp, vuc); in test6()
3986 res_vi = vec_srl(vi, vuc); in test6()
3998 res_vui = vec_srl(vui, vuc); in test6()
4010 res_vbi = vec_srl(vbi, vuc); in test6()
4022 res_vsc = vec_vsr(vsc, vuc); in test6()
4034 res_vuc = vec_vsr(vuc, vuc); in test6()
4038 res_vuc = vec_vsr(vuc, vus); in test6()
4042 res_vuc = vec_vsr(vuc, vui); in test6()
4046 res_vbc = vec_vsr(vbc, vuc); in test6()
4058 res_vs = vec_vsr(vs, vuc); in test6()
4070 res_vus = vec_vsr(vus, vuc); in test6()
4082 res_vbs = vec_vsr(vbs, vuc); in test6()
4094 res_vp = vec_vsr(vp, vuc); in test6()
4106 res_vi = vec_vsr(vi, vuc); in test6()
4118 res_vui = vec_vsr(vui, vuc); in test6()
4130 res_vbi = vec_vsr(vbi, vuc); in test6()
4147 res_vsc = vec_sro(vsc, vuc); in test6()
4151 res_vuc = vec_sro(vuc, vsc); in test6()
4155 res_vuc = vec_sro(vuc, vuc); in test6()
4163 res_vs = vec_sro(vs, vuc); in test6()
4171 res_vus = vec_sro(vus, vuc); in test6()
4179 res_vp = vec_sro(vp, vuc); in test6()
4187 res_vi = vec_sro(vi, vuc); in test6()
4195 res_vui = vec_sro(vui, vuc); in test6()
4203 res_vf = vec_sro(vf, vuc); in test6()
4211 res_vsc = vec_vsro(vsc, vuc); in test6()
4215 res_vuc = vec_vsro(vuc, vsc); in test6()
4219 res_vuc = vec_vsro(vuc, vuc); in test6()
4227 res_vs = vec_vsro(vs, vuc); in test6()
4235 res_vus = vec_vsro(vus, vuc); in test6()
4243 res_vp = vec_vsro(vp, vuc); in test6()
4251 res_vi = vec_vsro(vi, vuc); in test6()
4259 res_vui = vec_vsro(vui, vuc); in test6()
4267 res_vf = vec_vsro(vf, vuc); in test6()
4280 vec_st(vuc, 0, &vuc); in test6()
4284 vec_st(vuc, 0, ¶m_uc); in test6()
4384 vec_stvx(vuc, 0, &vuc); in test6()
4388 vec_stvx(vuc, 0, ¶m_uc); in test6()
4485 vec_ste(vuc, 0, ¶m_uc); in test6()
4545 vec_stvebx(vuc, 0, ¶m_uc); in test6()
4610 vec_stl(vuc, 0, &vuc); in test6()
4614 vec_stl(vuc, 0, ¶m_uc); in test6()
4714 vec_stvxl(vuc, 0, &vuc); in test6()
4718 vec_stvxl(vuc, 0, ¶m_uc); in test6()
4823 res_vuc = vec_sub(vuc, vuc); in test6()
4827 res_vuc = vec_sub(vbc, vuc); in test6()
4831 res_vuc = vec_sub(vuc, vbc); in test6()
4899 res_vuc = vec_vsububm(vuc, vuc); in test6()
4903 res_vuc = vec_vsububm(vbc, vuc); in test6()
4907 res_vuc = vec_vsububm(vuc, vbc); in test6()
4985 res_vuc = vec_subs(vuc, vuc); in test6()
4989 res_vuc = vec_subs(vbc, vuc); in test6()
4993 res_vuc = vec_subs(vuc, vbc); in test6()
5057 res_vuc = vec_vsububs(vuc, vuc); in test6()
5061 res_vuc = vec_vsububs(vbc, vuc); in test6()
5065 res_vuc = vec_vsububs(vuc, vbc); in test6()
5122 res_vui = vec_sum4s(vuc, vui); in test6()
5134 res_vui = vec_vsum4ubs(vuc, vui); in test6()
5270 res_vuc = vec_xor(vuc, vuc); in test6()
5274 res_vuc = vec_xor(vbc, vuc); in test6()
5278 res_vuc = vec_xor(vuc, vbc); in test6()
5366 res_vuc = vec_vxor(vuc, vuc); in test6()
5370 res_vuc = vec_vxor(vbc, vuc); in test6()
5374 res_vuc = vec_vxor(vuc, vbc); in test6()
5457 res_uc = vec_extract(vuc, param_i); in test6()
5486 res_vuc = vec_insert(param_uc, vuc, param_i); in test6()
5541 res_vuc = vec_lvlx(0, &vuc); in test6()
5712 res_vuc = vec_lvlxl(0, &vuc); in test6()
5883 res_vuc = vec_lvrx(0, &vuc); in test6()
6054 res_vuc = vec_lvrxl(0, &vuc); in test6()
6227 vec_stvlx(vuc, 0, ¶m_uc); in test6()
6243 vec_stvlx(vuc, 0, &vuc); in test6()
6500 vec_stvlxl(vuc, 0, ¶m_uc); in test6()
6516 vec_stvlxl(vuc, 0, &vuc); in test6()
6773 vec_stvrx(vuc, 0, ¶m_uc); in test6()
6789 vec_stvrx(vuc, 0, &vuc); in test6()
7046 vec_stvrxl(vuc, 0, ¶m_uc); in test6()
7062 vec_stvrxl(vuc, 0, &vuc); in test6()
7369 res_i = vec_all_eq(vuc, vuc); in test6()
7373 res_i = vec_all_eq(vuc, vbc); in test6()
7381 res_i = vec_all_eq(vbc, vuc); in test6()
7462 res_i = vec_all_ge(vuc, vuc); in test6()
7466 res_i = vec_all_ge(vuc, vbc); in test6()
7474 res_i = vec_all_ge(vbc, vuc); in test6()
7551 res_i = vec_all_gt(vuc, vuc); in test6()
7555 res_i = vec_all_gt(vuc, vbc); in test6()
7563 res_i = vec_all_gt(vbc, vuc); in test6()
7645 res_i = vec_all_le(vuc, vuc); in test6()
7649 res_i = vec_all_le(vuc, vbc); in test6()
7657 res_i = vec_all_le(vbc, vuc); in test6()
7734 res_i = vec_all_lt(vuc, vuc); in test6()
7738 res_i = vec_all_lt(vuc, vbc); in test6()
7746 res_i = vec_all_lt(vbc, vuc); in test6()
7828 res_i = vec_all_ne(vuc, vuc); in test6()
7832 res_i = vec_all_ne(vuc, vbc); in test6()
7840 res_i = vec_all_ne(vbc, vuc); in test6()
7946 res_i = vec_any_eq(vuc, vuc); in test6()
7950 res_i = vec_any_eq(vuc, vbc); in test6()
7958 res_i = vec_any_eq(vbc, vuc); in test6()
8039 res_i = vec_any_ge(vuc, vuc); in test6()
8043 res_i = vec_any_ge(vuc, vbc); in test6()
8051 res_i = vec_any_ge(vbc, vuc); in test6()
8128 res_i = vec_any_gt(vuc, vuc); in test6()
8132 res_i = vec_any_gt(vuc, vbc); in test6()
8140 res_i = vec_any_gt(vbc, vuc); in test6()
8217 res_i = vec_any_le(vuc, vuc); in test6()
8221 res_i = vec_any_le(vuc, vbc); in test6()
8229 res_i = vec_any_le(vbc, vuc); in test6()
8306 res_i = vec_any_lt(vuc, vuc); in test6()
8310 res_i = vec_any_lt(vuc, vbc); in test6()
8318 res_i = vec_any_lt(vbc, vuc); in test6()
8400 res_i = vec_any_ne(vuc, vuc); in test6()
8404 res_i = vec_any_ne(vuc, vbc); in test6()
8412 res_i = vec_any_ne(vbc, vuc); in test6()