Lines Matching refs:addrspace

12 define void @constant_load_i8(i8 addrspace(1)* %out, i8 addrspace(2)* %in) #0 {
14 %ld = load i8, i8 addrspace(2)* %in
15 store i8 %ld, i8 addrspace(1)* %out
24 define void @constant_load_v2i8(<2 x i8> addrspace(1)* %out, <2 x i8> addrspace(2)* %in) #0 {
26 %ld = load <2 x i8>, <2 x i8> addrspace(2)* %in
27 store <2 x i8> %ld, <2 x i8> addrspace(1)* %out
35 define void @constant_load_v3i8(<3 x i8> addrspace(1)* %out, <3 x i8> addrspace(2)* %in) #0 {
37 %ld = load <3 x i8>, <3 x i8> addrspace(2)* %in
38 store <3 x i8> %ld, <3 x i8> addrspace(1)* %out
46 define void @constant_load_v4i8(<4 x i8> addrspace(1)* %out, <4 x i8> addrspace(2)* %in) #0 {
48 %ld = load <4 x i8>, <4 x i8> addrspace(2)* %in
49 store <4 x i8> %ld, <4 x i8> addrspace(1)* %out
57 define void @constant_load_v8i8(<8 x i8> addrspace(1)* %out, <8 x i8> addrspace(2)* %in) #0 {
59 %ld = load <8 x i8>, <8 x i8> addrspace(2)* %in
60 store <8 x i8> %ld, <8 x i8> addrspace(1)* %out
68 define void @constant_load_v16i8(<16 x i8> addrspace(1)* %out, <16 x i8> addrspace(2)* %in) #0 {
70 %ld = load <16 x i8>, <16 x i8> addrspace(2)* %in
71 store <16 x i8> %ld, <16 x i8> addrspace(1)* %out
80 define void @constant_zextload_i8_to_i32(i32 addrspace(1)* %out, i8 addrspace(2)* %in) #0 {
81 %a = load i8, i8 addrspace(2)* %in
83 store i32 %ext, i32 addrspace(1)* %out
94 define void @constant_sextload_i8_to_i32(i32 addrspace(1)* %out, i8 addrspace(2)* %in) #0 {
95 %ld = load i8, i8 addrspace(2)* %in
97 store i32 %ext, i32 addrspace(1)* %out
102 define void @constant_zextload_v1i8_to_v1i32(<1 x i32> addrspace(1)* %out, <1 x i8> addrspace(2)* %…
103 %load = load <1 x i8>, <1 x i8> addrspace(2)* %in
105 store <1 x i32> %ext, <1 x i32> addrspace(1)* %out
110 define void @constant_sextload_v1i8_to_v1i32(<1 x i32> addrspace(1)* %out, <1 x i8> addrspace(2)* %…
111 %load = load <1 x i8>, <1 x i8> addrspace(2)* %in
113 store <1 x i32> %ext, <1 x i32> addrspace(1)* %out
122 define void @constant_zextload_v2i8_to_v2i32(<2 x i32> addrspace(1)* %out, <2 x i8> addrspace(2)* %…
123 %load = load <2 x i8>, <2 x i8> addrspace(2)* %in
125 store <2 x i32> %ext, <2 x i32> addrspace(1)* %out
143 define void @constant_sextload_v2i8_to_v2i32(<2 x i32> addrspace(1)* %out, <2 x i8> addrspace(2)* %…
144 %load = load <2 x i8>, <2 x i8> addrspace(2)* %in
146 store <2 x i32> %ext, <2 x i32> addrspace(1)* %out
156 define void @constant_zextload_v3i8_to_v3i32(<3 x i32> addrspace(1)* %out, <3 x i8> addrspace(2)* %…
158 %ld = load <3 x i8>, <3 x i8> addrspace(2)* %in
160 store <3 x i32> %ext, <3 x i32> addrspace(1)* %out
170 define void @constant_sextload_v3i8_to_v3i32(<3 x i32> addrspace(1)* %out, <3 x i8> addrspace(2)* %…
172 %ld = load <3 x i8>, <3 x i8> addrspace(2)* %in
174 store <3 x i32> %ext, <3 x i32> addrspace(1)* %out
187 define void @constant_zextload_v4i8_to_v4i32(<4 x i32> addrspace(1)* %out, <4 x i8> addrspace(2)* %…
188 %load = load <4 x i8>, <4 x i8> addrspace(2)* %in
190 store <4 x i32> %ext, <4 x i32> addrspace(1)* %out
211 define void @constant_sextload_v4i8_to_v4i32(<4 x i32> addrspace(1)* %out, <4 x i8> addrspace(2)* %…
212 %load = load <4 x i8>, <4 x i8> addrspace(2)* %in
214 store <4 x i32> %ext, <4 x i32> addrspace(1)* %out
222 define void @constant_zextload_v8i8_to_v8i32(<8 x i32> addrspace(1)* %out, <8 x i8> addrspace(2)* %…
223 %load = load <8 x i8>, <8 x i8> addrspace(2)* %in
225 store <8 x i32> %ext, <8 x i32> addrspace(1)* %out
233 define void @constant_sextload_v8i8_to_v8i32(<8 x i32> addrspace(1)* %out, <8 x i8> addrspace(2)* %…
234 %load = load <8 x i8>, <8 x i8> addrspace(2)* %in
236 store <8 x i32> %ext, <8 x i32> addrspace(1)* %out
241 define void @constant_zextload_v16i8_to_v16i32(<16 x i32> addrspace(1)* %out, <16 x i8> addrspace(2…
242 %load = load <16 x i8>, <16 x i8> addrspace(2)* %in
244 store <16 x i32> %ext, <16 x i32> addrspace(1)* %out
249 define void @constant_sextload_v16i8_to_v16i32(<16 x i32> addrspace(1)* %out, <16 x i8> addrspace(2…
250 %load = load <16 x i8>, <16 x i8> addrspace(2)* %in
252 store <16 x i32> %ext, <16 x i32> addrspace(1)* %out
257 define void @constant_zextload_v32i8_to_v32i32(<32 x i32> addrspace(1)* %out, <32 x i8> addrspace(2…
258 %load = load <32 x i8>, <32 x i8> addrspace(2)* %in
260 store <32 x i32> %ext, <32 x i32> addrspace(1)* %out
265 define void @constant_sextload_v32i8_to_v32i32(<32 x i32> addrspace(1)* %out, <32 x i8> addrspace(2…
266 %load = load <32 x i8>, <32 x i8> addrspace(2)* %in
268 store <32 x i32> %ext, <32 x i32> addrspace(1)* %out
273 define void @constant_zextload_v64i8_to_v64i32(<64 x i32> addrspace(1)* %out, <64 x i8> addrspace(2…
274 %load = load <64 x i8>, <64 x i8> addrspace(2)* %in
276 store <64 x i32> %ext, <64 x i32> addrspace(1)* %out
281 define void @constant_sextload_v64i8_to_v64i32(<64 x i32> addrspace(1)* %out, <64 x i8> addrspace(2…
282 %load = load <64 x i8>, <64 x i8> addrspace(2)* %in
284 store <64 x i32> %ext, <64 x i32> addrspace(1)* %out
296 define void @constant_zextload_i8_to_i64(i64 addrspace(1)* %out, i8 addrspace(2)* %in) #0 {
297 %a = load i8, i8 addrspace(2)* %in
299 store i64 %ext, i64 addrspace(1)* %out
310 define void @constant_sextload_i8_to_i64(i64 addrspace(1)* %out, i8 addrspace(2)* %in) #0 {
311 %a = load i8, i8 addrspace(2)* %in
313 store i64 %ext, i64 addrspace(1)* %out
318 define void @constant_zextload_v1i8_to_v1i64(<1 x i64> addrspace(1)* %out, <1 x i8> addrspace(2)* %…
319 %load = load <1 x i8>, <1 x i8> addrspace(2)* %in
321 store <1 x i64> %ext, <1 x i64> addrspace(1)* %out
326 define void @constant_sextload_v1i8_to_v1i64(<1 x i64> addrspace(1)* %out, <1 x i8> addrspace(2)* %…
327 %load = load <1 x i8>, <1 x i8> addrspace(2)* %in
329 store <1 x i64> %ext, <1 x i64> addrspace(1)* %out
334 define void @constant_zextload_v2i8_to_v2i64(<2 x i64> addrspace(1)* %out, <2 x i8> addrspace(2)* %…
335 %load = load <2 x i8>, <2 x i8> addrspace(2)* %in
337 store <2 x i64> %ext, <2 x i64> addrspace(1)* %out
342 define void @constant_sextload_v2i8_to_v2i64(<2 x i64> addrspace(1)* %out, <2 x i8> addrspace(2)* %…
343 %load = load <2 x i8>, <2 x i8> addrspace(2)* %in
345 store <2 x i64> %ext, <2 x i64> addrspace(1)* %out
350 define void @constant_zextload_v4i8_to_v4i64(<4 x i64> addrspace(1)* %out, <4 x i8> addrspace(2)* %…
351 %load = load <4 x i8>, <4 x i8> addrspace(2)* %in
353 store <4 x i64> %ext, <4 x i64> addrspace(1)* %out
358 define void @constant_sextload_v4i8_to_v4i64(<4 x i64> addrspace(1)* %out, <4 x i8> addrspace(2)* %…
359 %load = load <4 x i8>, <4 x i8> addrspace(2)* %in
361 store <4 x i64> %ext, <4 x i64> addrspace(1)* %out
366 define void @constant_zextload_v8i8_to_v8i64(<8 x i64> addrspace(1)* %out, <8 x i8> addrspace(2)* %…
367 %load = load <8 x i8>, <8 x i8> addrspace(2)* %in
369 store <8 x i64> %ext, <8 x i64> addrspace(1)* %out
374 define void @constant_sextload_v8i8_to_v8i64(<8 x i64> addrspace(1)* %out, <8 x i8> addrspace(2)* %…
375 %load = load <8 x i8>, <8 x i8> addrspace(2)* %in
377 store <8 x i64> %ext, <8 x i64> addrspace(1)* %out
382 define void @constant_zextload_v16i8_to_v16i64(<16 x i64> addrspace(1)* %out, <16 x i8> addrspace(2…
383 %load = load <16 x i8>, <16 x i8> addrspace(2)* %in
385 store <16 x i64> %ext, <16 x i64> addrspace(1)* %out
390 define void @constant_sextload_v16i8_to_v16i64(<16 x i64> addrspace(1)* %out, <16 x i8> addrspace(2…
391 %load = load <16 x i8>, <16 x i8> addrspace(2)* %in
393 store <16 x i64> %ext, <16 x i64> addrspace(1)* %out
398 define void @constant_zextload_v32i8_to_v32i64(<32 x i64> addrspace(1)* %out, <32 x i8> addrspace(2…
399 %load = load <32 x i8>, <32 x i8> addrspace(2)* %in
401 store <32 x i64> %ext, <32 x i64> addrspace(1)* %out
406 define void @constant_sextload_v32i8_to_v32i64(<32 x i64> addrspace(1)* %out, <32 x i8> addrspace(2…
407 %load = load <32 x i8>, <32 x i8> addrspace(2)* %in
409 store <32 x i64> %ext, <32 x i64> addrspace(1)* %out
414 ; define void @constant_zextload_v64i8_to_v64i64(<64 x i64> addrspace(1)* %out, <64 x i8> addrspace
415 ; %load = load <64 x i8>, <64 x i8> addrspace(2)* %in
417 ; store <64 x i64> %ext, <64 x i64> addrspace(1)* %out
422 ; define void @constant_sextload_v64i8_to_v64i64(<64 x i64> addrspace(1)* %out, <64 x i8> addrspace
423 ; %load = load <64 x i8>, <64 x i8> addrspace(2)* %in
425 ; store <64 x i64> %ext, <64 x i64> addrspace(1)* %out
435 define void @constant_zextload_i8_to_i16(i16 addrspace(1)* %out, i8 addrspace(2)* %in) #0 {
436 %a = load i8, i8 addrspace(2)* %in
438 store i16 %ext, i16 addrspace(1)* %out
448 define void @constant_sextload_i8_to_i16(i16 addrspace(1)* %out, i8 addrspace(2)* %in) #0 {
449 %a = load i8, i8 addrspace(2)* %in
451 store i16 %ext, i16 addrspace(1)* %out
456 define void @constant_zextload_v1i8_to_v1i16(<1 x i16> addrspace(1)* %out, <1 x i8> addrspace(2)* %…
457 %load = load <1 x i8>, <1 x i8> addrspace(2)* %in
459 store <1 x i16> %ext, <1 x i16> addrspace(1)* %out
464 define void @constant_sextload_v1i8_to_v1i16(<1 x i16> addrspace(1)* %out, <1 x i8> addrspace(2)* %…
465 %load = load <1 x i8>, <1 x i8> addrspace(2)* %in
467 store <1 x i16> %ext, <1 x i16> addrspace(1)* %out
472 define void @constant_zextload_v2i8_to_v2i16(<2 x i16> addrspace(1)* %out, <2 x i8> addrspace(2)* %…
473 %load = load <2 x i8>, <2 x i8> addrspace(2)* %in
475 store <2 x i16> %ext, <2 x i16> addrspace(1)* %out
480 define void @constant_sextload_v2i8_to_v2i16(<2 x i16> addrspace(1)* %out, <2 x i8> addrspace(2)* %…
481 %load = load <2 x i8>, <2 x i8> addrspace(2)* %in
483 store <2 x i16> %ext, <2 x i16> addrspace(1)* %out
488 define void @constant_zextload_v4i8_to_v4i16(<4 x i16> addrspace(1)* %out, <4 x i8> addrspace(2)* %…
489 %load = load <4 x i8>, <4 x i8> addrspace(2)* %in
491 store <4 x i16> %ext, <4 x i16> addrspace(1)* %out
496 define void @constant_sextload_v4i8_to_v4i16(<4 x i16> addrspace(1)* %out, <4 x i8> addrspace(2)* %…
497 %load = load <4 x i8>, <4 x i8> addrspace(2)* %in
499 store <4 x i16> %ext, <4 x i16> addrspace(1)* %out
504 define void @constant_zextload_v8i8_to_v8i16(<8 x i16> addrspace(1)* %out, <8 x i8> addrspace(2)* %…
505 %load = load <8 x i8>, <8 x i8> addrspace(2)* %in
507 store <8 x i16> %ext, <8 x i16> addrspace(1)* %out
512 define void @constant_sextload_v8i8_to_v8i16(<8 x i16> addrspace(1)* %out, <8 x i8> addrspace(2)* %…
513 %load = load <8 x i8>, <8 x i8> addrspace(2)* %in
515 store <8 x i16> %ext, <8 x i16> addrspace(1)* %out
520 define void @constant_zextload_v16i8_to_v16i16(<16 x i16> addrspace(1)* %out, <16 x i8> addrspace(2…
521 %load = load <16 x i8>, <16 x i8> addrspace(2)* %in
523 store <16 x i16> %ext, <16 x i16> addrspace(1)* %out
528 define void @constant_sextload_v16i8_to_v16i16(<16 x i16> addrspace(1)* %out, <16 x i8> addrspace(2…
529 %load = load <16 x i8>, <16 x i8> addrspace(2)* %in
531 store <16 x i16> %ext, <16 x i16> addrspace(1)* %out
536 define void @constant_zextload_v32i8_to_v32i16(<32 x i16> addrspace(1)* %out, <32 x i8> addrspace(2…
537 %load = load <32 x i8>, <32 x i8> addrspace(2)* %in
539 store <32 x i16> %ext, <32 x i16> addrspace(1)* %out
544 define void @constant_sextload_v32i8_to_v32i16(<32 x i16> addrspace(1)* %out, <32 x i8> addrspace(2…
545 %load = load <32 x i8>, <32 x i8> addrspace(2)* %in
547 store <32 x i16> %ext, <32 x i16> addrspace(1)* %out
552 ; define void @constant_zextload_v64i8_to_v64i16(<64 x i16> addrspace(1)* %out, <64 x i8> addrspace
553 ; %load = load <64 x i8>, <64 x i8> addrspace(2)* %in
555 ; store <64 x i16> %ext, <64 x i16> addrspace(1)* %out
560 ; define void @constant_sextload_v64i8_to_v64i16(<64 x i16> addrspace(1)* %out, <64 x i8> addrspace
561 ; %load = load <64 x i8>, <64 x i8> addrspace(2)* %in
563 ; store <64 x i16> %ext, <64 x i16> addrspace(1)* %out