Lines Matching refs:vsc
7 vector signed char vsc = { 1, -2, 3, -4, 5, -6, 7, -8, 9, -10, 11, -12, 13, -14, 15, -16 }; variable
50 vsc = vec_abs(vsc); in test1()
73 vsc = vec_abss(vsc); in test1()
92 res_vsc = vec_add(vsc, vsc); in test1()
96 res_vsc = vec_add(vbc, vsc); in test1()
100 res_vsc = vec_add(vsc, vbc); in test1()
168 res_vsc = vec_vaddubm(vsc, vsc); in test1()
172 res_vsc = vec_vaddubm(vbc, vsc); in test1()
176 res_vsc = vec_vaddubm(vsc, vbc); in test1()
254 res_vsc = vec_adds(vsc, vsc); in test1()
258 res_vsc = vec_adds(vbc, vsc); in test1()
262 res_vsc = vec_adds(vsc, vbc); in test1()
326 res_vsc = vec_vaddsbs(vsc, vsc); in test1()
330 res_vsc = vec_vaddsbs(vbc, vsc); in test1()
334 res_vsc = vec_vaddsbs(vsc, vbc); in test1()
399 res_vsc = vec_and(vsc, vsc); in test1()
403 res_vsc = vec_and(vbc, vsc); in test1()
407 res_vsc = vec_and(vsc, vbc); in test1()
483 res_vsc = vec_vand(vsc, vsc); in test1()
487 res_vsc = vec_vand(vbc, vsc); in test1()
491 res_vsc = vec_vand(vsc, vbc); in test1()
568 res_vsc = vec_andc(vsc, vsc); in test1()
574 res_vsc = vec_andc(vbc, vsc); in test1()
580 res_vsc = vec_andc(vsc, vbc); in test1()
704 res_vsc = vec_vandc(vsc, vsc); in test1()
710 res_vsc = vec_vandc(vbc, vsc); in test1()
716 res_vsc = vec_vandc(vsc, vbc); in test1()
847 res_vsc = vec_avg(vsc, vsc); in test2()
871 res_vsc = vec_vavgsb(vsc, vsc); in test2()
914 res_vbc = vec_cmpeq(vsc, vsc); in test2()
956 res_vbc = vec_cmpgt(vsc, vsc); in test5()
984 res_vbc = vec_vcmpgtsb(vsc, vsc); in test5()
1021 res_vbc = vec_cmplt(vsc, vsc); in test6()
1095 vec_dst(&vsc, 0, 0); in test6()
1133 res_vsc = vec_ld(0, &vsc); in test6()
1205 res_vsc = vec_lvx(0, &vsc); in test6()
1335 res_vsc = vec_ldl(0, &vsc); in test6()
1407 res_vsc = vec_lvxl(0, &vsc); in test6()
1517 res_vsc = vec_max(vsc, vsc); in test6()
1521 res_vsc = vec_max(vbc, vsc); in test6()
1525 res_vsc = vec_max(vsc, vbc); in test6()
1593 res_vsc = vec_vmaxsb(vsc, vsc); in test6()
1597 res_vsc = vec_vmaxsb(vbc, vsc); in test6()
1601 res_vsc = vec_vmaxsb(vsc, vbc); in test6()
1670 res_vsc = vec_mergeh(vsc, vsc); in test6()
1714 res_vsc = vec_vmrghb(vsc, vsc); in test6()
1759 res_vsc = vec_mergel(vsc, vsc); in test6()
1803 res_vsc = vec_vmrglb(vsc, vsc); in test6()
1853 res_vsc = vec_min(vsc, vsc); in test6()
1857 res_vsc = vec_min(vbc, vsc); in test6()
1861 res_vsc = vec_min(vsc, vbc); in test6()
1929 res_vsc = vec_vminsb(vsc, vsc); in test6()
1933 res_vsc = vec_vminsb(vbc, vsc); in test6()
1937 res_vsc = vec_vminsb(vsc, vbc); in test6()
2040 res_vi = vec_msum(vsc, vuc, vi); in test6()
2056 res_vi = vec_vmsummbm(vsc, vuc, vi); in test6()
2090 vec_mtvscr(vsc); in test6()
2131 res_vs = vec_mule(vsc, vsc); in test6()
2147 res_vs = vec_vmulesb(vsc, vsc); in test6()
2164 res_vs = vec_mulo(vsc, vsc); in test6()
2180 res_vs = vec_vmulosb(vsc, vsc); in test6()
2206 res_vsc = vec_nor(vsc, vsc); in test6()
2266 res_vsc = vec_vnor(vsc, vsc); in test6()
2327 res_vsc = vec_or(vsc, vsc); in test6()
2331 res_vsc = vec_or(vbc, vsc); in test6()
2335 res_vsc = vec_or(vsc, vbc); in test6()
2423 res_vsc = vec_vor(vsc, vsc); in test6()
2427 res_vsc = vec_vor(vbc, vsc); in test6()
2431 res_vsc = vec_vor(vsc, vbc); in test6()
2644 res_vsc = vec_perm(vsc, vsc, vuc); in test6()
2688 res_vsc = vec_vperm(vsc, vsc, vuc); in test6()
2742 res_vsc = vec_rl(vsc, vuc); in test6()
2766 res_vsc = vec_vrlb(vsc, vuc); in test6()
2809 res_vsc = vec_sel(vsc, vsc, vuc); in test6()
2819 res_vsc = vec_sel(vsc, vsc, vbc); in test6()
3009 res_vsc = vec_vsel(vsc, vsc, vuc); in test6()
3019 res_vsc = vec_vsel(vsc, vsc, vbc); in test6()
3210 res_vsc = vec_sl(vsc, vuc); in test6()
3234 res_vsc = vec_vslb(vsc, vuc); in test6()
3259 res_vsc = vec_sld(vsc, vsc, 0); in test6()
3291 res_vsc = vec_vsldoi(vsc, vsc, 0); in test6()
3324 res_vsc = vec_sll(vsc, vuc); in test6()
3328 res_vsc = vec_sll(vsc, vus); in test6()
3332 res_vsc = vec_sll(vsc, vui); in test6()
3444 res_vsc = vec_vsl(vsc, vuc); in test6()
3448 res_vsc = vec_vsl(vsc, vus); in test6()
3452 res_vsc = vec_vsl(vsc, vui); in test6()
3565 res_vsc = vec_slo(vsc, vsc); in test6()
3569 res_vsc = vec_slo(vsc, vuc); in test6()
3573 res_vuc = vec_slo(vuc, vsc); in test6()
3581 res_vs = vec_slo(vs, vsc); in test6()
3589 res_vus = vec_slo(vus, vsc); in test6()
3597 res_vp = vec_slo(vp, vsc); in test6()
3605 res_vi = vec_slo(vi, vsc); in test6()
3613 res_vui = vec_slo(vui, vsc); in test6()
3621 res_vf = vec_slo(vf, vsc); in test6()
3629 res_vsc = vec_vslo(vsc, vsc); in test6()
3633 res_vsc = vec_vslo(vsc, vuc); in test6()
3637 res_vuc = vec_vslo(vuc, vsc); in test6()
3645 res_vs = vec_vslo(vs, vsc); in test6()
3653 res_vus = vec_vslo(vus, vsc); in test6()
3661 res_vp = vec_vslo(vp, vsc); in test6()
3669 res_vi = vec_vslo(vi, vsc); in test6()
3677 res_vui = vec_vslo(vui, vsc); in test6()
3685 res_vf = vec_vslo(vf, vsc); in test6()
3694 res_vsc = vec_splat(vsc, 0); in test6()
3738 res_vsc = vec_vspltb(vsc, 0); in test6()
3804 res_vsc = vec_sr(vsc, vuc); in test6()
3828 res_vsc = vec_vsrb(vsc, vuc); in test6()
3853 res_vsc = vec_sra(vsc, vuc); in test6()
3877 res_vsc = vec_vsrab(vsc, vuc); in test6()
3902 res_vsc = vec_srl(vsc, vuc); in test6()
3906 res_vsc = vec_srl(vsc, vus); in test6()
3910 res_vsc = vec_srl(vsc, vui); in test6()
4022 res_vsc = vec_vsr(vsc, vuc); in test6()
4026 res_vsc = vec_vsr(vsc, vus); in test6()
4030 res_vsc = vec_vsr(vsc, vui); in test6()
4143 res_vsc = vec_sro(vsc, vsc); in test6()
4147 res_vsc = vec_sro(vsc, vuc); in test6()
4151 res_vuc = vec_sro(vuc, vsc); in test6()
4159 res_vs = vec_sro(vs, vsc); in test6()
4167 res_vus = vec_sro(vus, vsc); in test6()
4175 res_vp = vec_sro(vp, vsc); in test6()
4183 res_vi = vec_sro(vi, vsc); in test6()
4191 res_vui = vec_sro(vui, vsc); in test6()
4199 res_vf = vec_sro(vf, vsc); in test6()
4207 res_vsc = vec_vsro(vsc, vsc); in test6()
4211 res_vsc = vec_vsro(vsc, vuc); in test6()
4215 res_vuc = vec_vsro(vuc, vsc); in test6()
4223 res_vs = vec_vsro(vs, vsc); in test6()
4231 res_vus = vec_vsro(vus, vsc); in test6()
4239 res_vp = vec_vsro(vp, vsc); in test6()
4247 res_vi = vec_vsro(vi, vsc); in test6()
4255 res_vui = vec_vsro(vui, vsc); in test6()
4263 res_vf = vec_vsro(vf, vsc); in test6()
4272 vec_st(vsc, 0, &vsc); in test6()
4276 vec_st(vsc, 0, ¶m_sc); in test6()
4376 vec_stvx(vsc, 0, &vsc); in test6()
4380 vec_stvx(vsc, 0, ¶m_sc); in test6()
4481 vec_ste(vsc, 0, ¶m_sc); in test6()
4541 vec_stvebx(vsc, 0, ¶m_sc); in test6()
4602 vec_stl(vsc, 0, &vsc); in test6()
4606 vec_stl(vsc, 0, ¶m_sc); in test6()
4706 vec_stvxl(vsc, 0, &vsc); in test6()
4710 vec_stvxl(vsc, 0, ¶m_sc); in test6()
4811 res_vsc = vec_sub(vsc, vsc); in test6()
4815 res_vsc = vec_sub(vbc, vsc); in test6()
4819 res_vsc = vec_sub(vsc, vbc); in test6()
4887 res_vsc = vec_vsububm(vsc, vsc); in test6()
4891 res_vsc = vec_vsububm(vbc, vsc); in test6()
4895 res_vsc = vec_vsububm(vsc, vbc); in test6()
4973 res_vsc = vec_subs(vsc, vsc); in test6()
4977 res_vsc = vec_subs(vbc, vsc); in test6()
4981 res_vsc = vec_subs(vsc, vbc); in test6()
5045 res_vsc = vec_vsubsbs(vsc, vsc); in test6()
5049 res_vsc = vec_vsubsbs(vbc, vsc); in test6()
5053 res_vsc = vec_vsubsbs(vsc, vbc); in test6()
5118 res_vi = vec_sum4s(vsc, vi); in test6()
5130 res_vi = vec_vsum4sbs(vsc, vi); in test6()
5176 res_vs = vec_unpackh(vsc); in test6()
5196 res_vs = vec_vupkhsb(vsc); in test6()
5217 res_vs = vec_unpackl(vsc); in test6()
5237 res_vs = vec_vupklsb(vsc); in test6()
5258 res_vsc = vec_xor(vsc, vsc); in test6()
5262 res_vsc = vec_xor(vbc, vsc); in test6()
5266 res_vsc = vec_xor(vsc, vbc); in test6()
5354 res_vsc = vec_vxor(vsc, vsc); in test6()
5358 res_vsc = vec_vxor(vbc, vsc); in test6()
5362 res_vsc = vec_vxor(vsc, vbc); in test6()
5453 res_sc = vec_extract(vsc, param_i); in test6()
5482 res_vsc = vec_insert(param_sc, vsc, param_i); in test6()
5521 res_vsc = vec_lvlx(0, &vsc); in test6()
5692 res_vsc = vec_lvlxl(0, &vsc); in test6()
5863 res_vsc = vec_lvrx(0, &vsc); in test6()
6034 res_vsc = vec_lvrxl(0, &vsc); in test6()
6195 vec_stvlx(vsc, 0, ¶m_sc); in test6()
6211 vec_stvlx(vsc, 0, &vsc); in test6()
6468 vec_stvlxl(vsc, 0, ¶m_sc); in test6()
6484 vec_stvlxl(vsc, 0, &vsc); in test6()
6741 vec_stvrx(vsc, 0, ¶m_sc); in test6()
6757 vec_stvrx(vsc, 0, &vsc); in test6()
7014 vec_stvrxl(vsc, 0, ¶m_sc); in test6()
7030 vec_stvrxl(vsc, 0, &vsc); in test6()
7361 res_i = vec_all_eq(vsc, vsc); in test6()
7365 res_i = vec_all_eq(vsc, vbc); in test6()
7377 res_i = vec_all_eq(vbc, vsc); in test6()
7454 res_i = vec_all_ge(vsc, vsc); in test6()
7458 res_i = vec_all_ge(vsc, vbc); in test6()
7470 res_i = vec_all_ge(vbc, vsc); in test6()
7543 res_i = vec_all_gt(vsc, vsc); in test6()
7547 res_i = vec_all_gt(vsc, vbc); in test6()
7559 res_i = vec_all_gt(vbc, vsc); in test6()
7637 res_i = vec_all_le(vsc, vsc); in test6()
7641 res_i = vec_all_le(vsc, vbc); in test6()
7653 res_i = vec_all_le(vbc, vsc); in test6()
7726 res_i = vec_all_lt(vsc, vsc); in test6()
7730 res_i = vec_all_lt(vsc, vbc); in test6()
7742 res_i = vec_all_lt(vbc, vsc); in test6()
7820 res_i = vec_all_ne(vsc, vsc); in test6()
7824 res_i = vec_all_ne(vsc, vbc); in test6()
7836 res_i = vec_all_ne(vbc, vsc); in test6()
7938 res_i = vec_any_eq(vsc, vsc); in test6()
7942 res_i = vec_any_eq(vsc, vbc); in test6()
7954 res_i = vec_any_eq(vbc, vsc); in test6()
8031 res_i = vec_any_ge(vsc, vsc); in test6()
8035 res_i = vec_any_ge(vsc, vbc); in test6()
8047 res_i = vec_any_ge(vbc, vsc); in test6()
8120 res_i = vec_any_gt(vsc, vsc); in test6()
8124 res_i = vec_any_gt(vsc, vbc); in test6()
8136 res_i = vec_any_gt(vbc, vsc); in test6()
8209 res_i = vec_any_le(vsc, vsc); in test6()
8213 res_i = vec_any_le(vsc, vbc); in test6()
8225 res_i = vec_any_le(vbc, vsc); in test6()
8298 res_i = vec_any_lt(vsc, vsc); in test6()
8302 res_i = vec_any_lt(vsc, vbc); in test6()
8314 res_i = vec_any_lt(vbc, vsc); in test6()
8392 res_i = vec_any_ne(vsc, vsc); in test6()
8396 res_i = vec_any_ne(vsc, vbc); in test6()
8408 res_i = vec_any_ne(vbc, vsc); in test6()