Lines Matching refs:vul
15 volatile vector unsigned long long vul; variable
96 ul = vec_extract(vul, idx); in test_core()
123 vul = vec_insert(ul, vul, idx); in test_core()
125 vul = vec_insert(ul, vbl, idx); in test_core()
144 vul = vec_promote(ul, idx); in test_core()
163 vul = vec_insert_and_zero(cptrul); in test_core()
188 vul = vec_perm(vul, vul, vuc); in test_core()
203 vul = vec_permi(vul, vul, 0); in test_core()
205 vul = vec_permi(vul, vul, 1); in test_core()
207 vul = vec_permi(vul, vul, 2); in test_core()
209 vul = vec_permi(vul, vul, 3); in test_core()
246 vsl = vec_sel(vsl, vsl, vul); in test_core()
248 vul = vec_sel(vul, vul, vul); in test_core()
249 vul = vec_sel(vul, vul, vbl); in test_core()
250 vbl = vec_sel(vbl, vbl, vul); in test_core()
252 vd = vec_sel(vd, vd, vul); in test_core()
267 vsl = vec_gather_element(vsl, vul, cptrsl, 0); in test_core()
268 vsl = vec_gather_element(vsl, vul, cptrsl, 1); in test_core()
269 vul = vec_gather_element(vul, vul, cptrul, 0); in test_core()
270 vul = vec_gather_element(vul, vul, cptrul, 1); in test_core()
271 vbl = vec_gather_element(vbl, vul, cptrul, 0); in test_core()
272 vbl = vec_gather_element(vbl, vul, cptrul, 1); in test_core()
273 vd = vec_gather_element(vd, vul, cptrd, 0); in test_core()
274 vd = vec_gather_element(vd, vul, cptrd, 1); in test_core()
288 vec_scatter_element(vsl, vul, ptrsl, 0); in test_core()
289 vec_scatter_element(vsl, vul, ptrsl, 1); in test_core()
290 vec_scatter_element(vul, vul, ptrul, 0); in test_core()
291 vec_scatter_element(vul, vul, ptrul, 1); in test_core()
292 vec_scatter_element(vbl, vul, ptrul, 0); in test_core()
293 vec_scatter_element(vbl, vul, ptrul, 1); in test_core()
294 vec_scatter_element(vd, vul, ptrd, 0); in test_core()
295 vec_scatter_element(vd, vul, ptrd, 1); in test_core()
304 vul = vec_xld2(idx, cptrul); in test_core()
321 vec_xstd2(vul, idx, ptrul); in test_core()
345 vul = vec_load_bndry(cptrul, 64); in test_core()
376 vul = vec_load_len(cptrul, idx); in test_core()
395 vec_store_len(vul, ptrul, idx); in test_core()
401 vul = vec_load_pair(ul, ul); in test_core()
428 vul = vec_genmasks_64(0, 63); in test_core()
430 vul = vec_genmasks_64(3, 40); in test_core()
432 vul = vec_genmasks_64(30, 11); in test_core()
475 vul = vec_splat(vul, 0); in test_core()
477 vul = vec_splat(vul, 1); in test_core()
516 vul = vec_splat_u64(-32768); in test_core()
518 vul = vec_splat_u64(32767); in test_core()
535 vul = vec_splats(ul); in test_core()
564 vul = vec_mergeh(vul, vul); in test_core()
591 vul = vec_mergel(vul, vul); in test_core()
612 vui = vec_pack(vul, vul); in test_core()
627 vui = vec_packs(vul, vul); in test_core()
640 vui = vec_packs_cc(vul, vul, &cc); in test_core()
653 vui = vec_packsu(vul, vul); in test_core()
660 vui = vec_packsu_cc(vul, vul, &cc); in test_core()
677 vul = vec_unpackh(vui); in test_core()
696 vul = vec_unpackl(vui); in test_core()
723 vbl = vec_cmpeq(vul, vul); in test_compare()
744 vbl = vec_cmpge(vul, vul); in test_compare()
763 vbl = vec_cmpgt(vul, vul); in test_compare()
782 vbl = vec_cmple(vul, vul); in test_compare()
801 vbl = vec_cmplt(vul, vul); in test_compare()
854 idx = vec_all_eq(vul, vul); in test_compare()
856 idx = vec_all_eq(vul, vbl); in test_compare()
858 idx = vec_all_eq(vbl, vul); in test_compare()
913 idx = vec_all_ne(vul, vul); in test_compare()
915 idx = vec_all_ne(vul, vbl); in test_compare()
917 idx = vec_all_ne(vbl, vul); in test_compare()
972 idx = vec_all_ge(vul, vul); in test_compare()
974 idx = vec_all_ge(vul, vbl); in test_compare()
976 idx = vec_all_ge(vbl, vul); in test_compare()
1031 idx = vec_all_gt(vul, vul); in test_compare()
1033 idx = vec_all_gt(vul, vbl); in test_compare()
1035 idx = vec_all_gt(vbl, vul); in test_compare()
1090 idx = vec_all_le(vul, vul); in test_compare()
1092 idx = vec_all_le(vul, vbl); in test_compare()
1094 idx = vec_all_le(vbl, vul); in test_compare()
1149 idx = vec_all_lt(vul, vul); in test_compare()
1151 idx = vec_all_lt(vul, vbl); in test_compare()
1153 idx = vec_all_lt(vbl, vul); in test_compare()
1222 idx = vec_any_eq(vul, vul); in test_compare()
1224 idx = vec_any_eq(vul, vbl); in test_compare()
1226 idx = vec_any_eq(vbl, vul); in test_compare()
1281 idx = vec_any_ne(vul, vul); in test_compare()
1283 idx = vec_any_ne(vul, vbl); in test_compare()
1285 idx = vec_any_ne(vbl, vul); in test_compare()
1340 idx = vec_any_ge(vul, vul); in test_compare()
1342 idx = vec_any_ge(vul, vbl); in test_compare()
1344 idx = vec_any_ge(vbl, vul); in test_compare()
1399 idx = vec_any_gt(vul, vul); in test_compare()
1401 idx = vec_any_gt(vul, vbl); in test_compare()
1403 idx = vec_any_gt(vbl, vul); in test_compare()
1458 idx = vec_any_le(vul, vul); in test_compare()
1460 idx = vec_any_le(vul, vbl); in test_compare()
1462 idx = vec_any_le(vbl, vul); in test_compare()
1517 idx = vec_any_lt(vul, vul); in test_compare()
1519 idx = vec_any_lt(vul, vbl); in test_compare()
1521 idx = vec_any_lt(vbl, vul); in test_compare()
1568 vul = vec_andc(vul, vul); in test_integer()
1569 vul = vec_andc(vul, vbl); in test_integer()
1570 vul = vec_andc(vbl, vul); in test_integer()
1600 vul = vec_nor(vul, vul); in test_integer()
1601 vul = vec_nor(vul, vbl); in test_integer()
1602 vul = vec_nor(vbl, vul); in test_integer()
1620 vul = vec_cntlz(vsl); in test_integer()
1622 vul = vec_cntlz(vul); in test_integer()
1637 vul = vec_cnttz(vsl); in test_integer()
1639 vul = vec_cnttz(vul); in test_integer()
1654 vul = vec_popcnt(vsl); in test_integer()
1656 vul = vec_popcnt(vul); in test_integer()
1671 vsl = vec_rl(vsl, vul); in test_integer()
1673 vul = vec_rl(vul, vul); in test_integer()
1690 vul = vec_rli(vul, ul); in test_integer()
1717 vsl = vec_rl_mask(vsl, vul, 0); in test_integer()
1719 vsl = vec_rl_mask(vsl, vul, 255); in test_integer()
1721 vul = vec_rl_mask(vul, vul, 0); in test_integer()
1723 vul = vec_rl_mask(vul, vul, 255); in test_integer()
1786 vul = vec_sll(vul, vuc); in test_integer()
1788 vul = vec_sll(vul, vus); in test_integer()
1790 vul = vec_sll(vul, vui); in test_integer()
1825 vsl = vec_slb(vsl, vul); in test_integer()
1827 vul = vec_slb(vul, vsl); in test_integer()
1829 vul = vec_slb(vul, vul); in test_integer()
1833 vd = vec_slb(vd, vul); in test_integer()
1864 vul = vec_sld(vul, vul, 0); in test_integer()
1866 vul = vec_sld(vul, vul, 15); in test_integer()
1901 vul = vec_sldw(vul, vul, 0); in test_integer()
1903 vul = vec_sldw(vul, vul, 3); in test_integer()
1970 vul = vec_sral(vul, vuc); in test_integer()
1972 vul = vec_sral(vul, vus); in test_integer()
1974 vul = vec_sral(vul, vui); in test_integer()
2009 vsl = vec_srab(vsl, vul); in test_integer()
2011 vul = vec_srab(vul, vsl); in test_integer()
2013 vul = vec_srab(vul, vul); in test_integer()
2017 vd = vec_srab(vd, vul); in test_integer()
2080 vul = vec_srl(vul, vuc); in test_integer()
2082 vul = vec_srl(vul, vus); in test_integer()
2084 vul = vec_srl(vul, vui); in test_integer()
2119 vsl = vec_srb(vsl, vul); in test_integer()
2121 vul = vec_srb(vul, vsl); in test_integer()
2123 vul = vec_srb(vul, vul); in test_integer()
2127 vd = vec_srb(vd, vul); in test_integer()
2156 vul = vec_max(vul, vul); in test_integer()
2157 vul = vec_max(vul, vbl); in test_integer()
2158 vul = vec_max(vbl, vul); in test_integer()
2182 vul = vec_min(vul, vul); in test_integer()
2183 vul = vec_min(vul, vbl); in test_integer()
2184 vul = vec_min(vbl, vul); in test_integer()
2193 vul = vec_addc(vul, vul); in test_integer()
2219 vul = vec_avg(vul, vul); in test_integer()
2229 vul = vec_gfmsum(vui, vui); in test_integer()
2231 vuc = vec_gfmsum_128(vul, vul); in test_integer()
2238 vul = vec_gfmsum_accum(vui, vui, vul); in test_integer()
2240 vuc = vec_gfmsum_accum_128(vul, vul, vuc); in test_integer()
2279 vul = vec_meadd(vui, vui, vul); in test_integer()
2292 vul = vec_moadd(vui, vui, vul); in test_integer()
2318 vul = vec_mule(vui, vui); in test_integer()
2331 vul = vec_mulo(vui, vui); in test_integer()
2340 vul = vec_subc(vul, vul); in test_integer()
2356 vul = vec_sum2(vus, vus); in test_integer()
2358 vul = vec_sum2(vui, vui); in test_integer()
2362 vuc = vec_sum_u128(vul, vul); in test_integer()
2377 idx = vec_test_mask(vsl, vul); in test_integer()
2379 idx = vec_test_mask(vul, vul); in test_integer()
2381 idx = vec_test_mask(vd, vul); in test_integer()
2914 vd = vec_ctd(vul, 0); in test_float()
2919 vd = vec_ctd(vul, 1); in test_float()
2925 vd = vec_ctd(vul, 31); in test_float()
2931 vul = vec_ctul(vd, 0); in test_float()
2936 vul = vec_ctul(vd, 1); in test_float()
2942 vul = vec_ctul(vd, 31); in test_float()