Lines Matching refs:vul
18 volatile vector unsigned long long vul; variable
118 ul = vec_extract(vul, idx); in test_core()
158 vul = vec_insert(ul, vul, idx); in test_core()
161 vul = vec_insert(ul, vbl, idx); in test_core()
189 vul = vec_promote(ul, idx); in test_core()
217 vul = vec_insert_and_zero(cptrul); in test_core()
254 vul = vec_perm(vul, vul, vuc); in test_core()
276 vul = vec_permi(vul, vul, 0); in test_core()
279 vul = vec_permi(vul, vul, 1); in test_core()
282 vul = vec_permi(vul, vul, 2); in test_core()
285 vul = vec_permi(vul, vul, 3); in test_core()
323 vul = vec_revb(vul); in test_core()
348 vul = vec_reve(vul); in test_core()
391 vsl = vec_sel(vsl, vsl, vul); in test_core()
395 vul = vec_sel(vul, vul, vul); in test_core()
397 vul = vec_sel(vul, vul, vbl); in test_core()
399 vbl = vec_sel(vbl, vbl, vul); in test_core()
403 vd = vec_sel(vd, vd, vul); in test_core()
432 vsl = vec_gather_element(vsl, vul, cptrsl, 0); in test_core()
434 vsl = vec_gather_element(vsl, vul, cptrsl, 1); in test_core()
436 vul = vec_gather_element(vul, vul, cptrul, 0); in test_core()
438 vul = vec_gather_element(vul, vul, cptrul, 1); in test_core()
440 vbl = vec_gather_element(vbl, vul, cptrul, 0); in test_core()
442 vbl = vec_gather_element(vbl, vul, cptrul, 1); in test_core()
444 vd = vec_gather_element(vd, vul, cptrd, 0); in test_core()
446 vd = vec_gather_element(vd, vul, cptrd, 1); in test_core()
473 vec_scatter_element(vsl, vul, ptrsl, 0); in test_core()
475 vec_scatter_element(vsl, vul, ptrsl, 1); in test_core()
477 vec_scatter_element(vul, vul, ptrul, 0); in test_core()
479 vec_scatter_element(vul, vul, ptrul, 1); in test_core()
481 vec_scatter_element(vbl, vul, ptrul, 0); in test_core()
483 vec_scatter_element(vbl, vul, ptrul, 1); in test_core()
485 vec_scatter_element(vd, vul, ptrd, 0); in test_core()
487 vec_scatter_element(vd, vul, ptrd, 1); in test_core()
504 vul = vec_xl(idx, cptrul); in test_core()
523 vul = vec_xld2(idx, cptrul); in test_core()
555 vec_xst(vul, idx, ptrul); in test_core()
574 vec_xstd2(vul, idx, ptrul); in test_core()
613 vul = vec_load_bndry(cptrul, 64); in test_core()
659 vul = vec_load_len(cptrul, idx); in test_core()
687 vec_store_len(vul, ptrul, idx); in test_core()
696 vul = vec_load_pair(ul + 1, ul - 1); in test_core()
724 vul = vec_genmasks_64(0, 63); in test_core()
726 vul = vec_genmasks_64(3, 40); in test_core()
728 vul = vec_genmasks_64(30, 11); in test_core()
791 vul = vec_splat(vul, 0); in test_core()
794 vul = vec_splat(vul, 1); in test_core()
838 vul = vec_splat_u64(-32768); in test_core()
840 vul = vec_splat_u64(32767); in test_core()
864 vul = vec_splats(ul); in test_core()
908 vul = vec_mergeh(vul, vul); in test_core()
948 vul = vec_mergel(vul, vul); in test_core()
979 vui = vec_pack(vul, vul); in test_core()
1001 vui = vec_packs(vul, vul); in test_core()
1020 vui = vec_packs_cc(vul, vul, &cc); in test_core()
1039 vui = vec_packsu(vul, vul); in test_core()
1049 vui = vec_packsu_cc(vul, vul, &cc); in test_core()
1074 vul = vec_unpackh(vui); in test_core()
1102 vul = vec_unpackl(vui); in test_core()
1143 vbl = vec_cmpeq(vul, vul); in test_compare()
1174 vbl = vec_cmpge(vul, vul); in test_compare()
1202 vbl = vec_cmpgt(vul, vul); in test_compare()
1230 vbl = vec_cmple(vul, vul); in test_compare()
1258 vbl = vec_cmplt(vul, vul); in test_compare()
1337 idx = vec_all_eq(vul, vul); in test_compare()
1340 idx = vec_all_eq(vul, vbl); in test_compare()
1343 idx = vec_all_eq(vbl, vul); in test_compare()
1425 idx = vec_all_ne(vul, vul); in test_compare()
1428 idx = vec_all_ne(vul, vbl); in test_compare()
1431 idx = vec_all_ne(vbl, vul); in test_compare()
1513 idx = vec_all_ge(vul, vul); in test_compare()
1516 idx = vec_all_ge(vul, vbl); in test_compare()
1519 idx = vec_all_ge(vbl, vul); in test_compare()
1601 idx = vec_all_gt(vul, vul); in test_compare()
1604 idx = vec_all_gt(vul, vbl); in test_compare()
1607 idx = vec_all_gt(vbl, vul); in test_compare()
1689 idx = vec_all_le(vul, vul); in test_compare()
1692 idx = vec_all_le(vul, vbl); in test_compare()
1695 idx = vec_all_le(vbl, vul); in test_compare()
1777 idx = vec_all_lt(vul, vul); in test_compare()
1780 idx = vec_all_lt(vul, vbl); in test_compare()
1783 idx = vec_all_lt(vbl, vul); in test_compare()
1885 idx = vec_any_eq(vul, vul); in test_compare()
1888 idx = vec_any_eq(vul, vbl); in test_compare()
1891 idx = vec_any_eq(vbl, vul); in test_compare()
1973 idx = vec_any_ne(vul, vul); in test_compare()
1976 idx = vec_any_ne(vul, vbl); in test_compare()
1979 idx = vec_any_ne(vbl, vul); in test_compare()
2061 idx = vec_any_ge(vul, vul); in test_compare()
2064 idx = vec_any_ge(vul, vbl); in test_compare()
2067 idx = vec_any_ge(vbl, vul); in test_compare()
2149 idx = vec_any_gt(vul, vul); in test_compare()
2152 idx = vec_any_gt(vul, vbl); in test_compare()
2155 idx = vec_any_gt(vbl, vul); in test_compare()
2237 idx = vec_any_le(vul, vul); in test_compare()
2240 idx = vec_any_le(vul, vbl); in test_compare()
2243 idx = vec_any_le(vbl, vul); in test_compare()
2325 idx = vec_any_lt(vul, vul); in test_compare()
2328 idx = vec_any_lt(vul, vbl); in test_compare()
2331 idx = vec_any_lt(vbl, vul); in test_compare()
2413 vul = vec_andc(vul, vul); in test_integer()
2415 vul = vec_andc(vul, vbl); in test_integer()
2417 vul = vec_andc(vbl, vul); in test_integer()
2476 vul = vec_nor(vul, vul); in test_integer()
2478 vul = vec_nor(vul, vbl); in test_integer()
2480 vul = vec_nor(vbl, vul); in test_integer()
2509 vul = vec_cntlz(vsl); in test_integer()
2512 vul = vec_cntlz(vul); in test_integer()
2534 vul = vec_cnttz(vsl); in test_integer()
2537 vul = vec_cnttz(vul); in test_integer()
2559 vul = vec_popcnt(vsl); in test_integer()
2562 vul = vec_popcnt(vul); in test_integer()
2584 vsl = vec_rl(vsl, vul); in test_integer()
2587 vul = vec_rl(vul, vul); in test_integer()
2612 vul = vec_rli(vul, ul); in test_integer()
2652 vsl = vec_rl_mask(vsl, vul, 0); in test_integer()
2655 vsl = vec_rl_mask(vsl, vul, 255); in test_integer()
2658 vul = vec_rl_mask(vul, vul, 0); in test_integer()
2661 vul = vec_rl_mask(vul, vul, 255); in test_integer()
2755 vul = vec_sll(vul, vuc); in test_integer()
2758 vul = vec_sll(vul, vus); in test_integer()
2761 vul = vec_sll(vul, vui); in test_integer()
2813 vsl = vec_slb(vsl, vul); in test_integer()
2816 vul = vec_slb(vul, vsl); in test_integer()
2819 vul = vec_slb(vul, vul); in test_integer()
2825 vd = vec_slb(vd, vul); in test_integer()
2889 vul = vec_sld(vul, vul, 0); in test_integer()
2892 vul = vec_sld(vul, vul, 15); in test_integer()
2950 vul = vec_sldw(vul, vul, 0); in test_integer()
2953 vul = vec_sldw(vul, vul, 3); in test_integer()
3053 vul = vec_sral(vul, vuc); in test_integer()
3056 vul = vec_sral(vul, vus); in test_integer()
3059 vul = vec_sral(vul, vui); in test_integer()
3111 vsl = vec_srab(vsl, vul); in test_integer()
3114 vul = vec_srab(vul, vsl); in test_integer()
3117 vul = vec_srab(vul, vul); in test_integer()
3123 vd = vec_srab(vd, vul); in test_integer()
3217 vul = vec_srl(vul, vuc); in test_integer()
3220 vul = vec_srl(vul, vus); in test_integer()
3223 vul = vec_srl(vul, vui); in test_integer()
3275 vsl = vec_srb(vsl, vul); in test_integer()
3278 vul = vec_srb(vul, vsl); in test_integer()
3281 vul = vec_srb(vul, vul); in test_integer()
3287 vd = vec_srb(vd, vul); in test_integer()
3342 vul = vec_max(vul, vul); in test_integer()
3344 vul = vec_max(vul, vbl); in test_integer()
3346 vul = vec_max(vbl, vul); in test_integer()
3393 vul = vec_min(vul, vul); in test_integer()
3395 vul = vec_min(vul, vbl); in test_integer()
3397 vul = vec_min(vbl, vul); in test_integer()
3411 vul = vec_addc(vul, vul); in test_integer()
3449 vul = vec_avg(vul, vul); in test_integer()
3463 vul = vec_gfmsum(vui, vui); in test_integer()
3466 vuc = vec_gfmsum_128(vul, vul); in test_integer()
3476 vul = vec_gfmsum_accum(vui, vui, vul); in test_integer()
3479 vuc = vec_gfmsum_accum_128(vul, vul, vuc); in test_integer()
3542 vul = vec_meadd(vui, vui, vul); in test_integer()
3561 vul = vec_moadd(vui, vui, vul); in test_integer()
3599 vul = vec_mule(vui, vui); in test_integer()
3618 vul = vec_mulo(vui, vui); in test_integer()
3631 vul = vec_subc(vul, vul); in test_integer()
3654 vul = vec_sum2(vus, vus); in test_integer()
3657 vul = vec_sum2(vui, vui); in test_integer()
3663 vuc = vec_sum_u128(vul, vul); in test_integer()
3685 idx = vec_test_mask(vsl, vul); in test_integer()
3688 idx = vec_test_mask(vul, vul); in test_integer()
3691 idx = vec_test_mask(vd, vul); in test_integer()
4471 vd = vec_ctd(vul, 0); in test_float()
4478 vd = vec_ctd(vul, 1); in test_float()
4486 vd = vec_ctd(vul, 31); in test_float()
4494 vul = vec_ctul(vd, 0); in test_float()
4501 vul = vec_ctul(vd, 1); in test_float()
4509 vul = vec_ctul(vd, 31); in test_float()
4517 vd = vec_double(vul); in test_float()
4524 vul = vec_unsigned(vd); in test_float()