Lines Matching refs:addrspace

7 define void @atomic_add_i32_offset(i32 addrspace(1)* %out, i32 %in) {
9 %gep = getelementptr i32, i32 addrspace(1)* %out, i32 4
10 %0 = atomicrmw volatile add i32 addrspace(1)* %gep, i32 %in seq_cst
17 define void @atomic_add_i32_ret_offset(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in) {
19 %gep = getelementptr i32, i32 addrspace(1)* %out, i32 4
20 %0 = atomicrmw volatile add i32 addrspace(1)* %gep, i32 %in seq_cst
21 store i32 %0, i32 addrspace(1)* %out2
31 define void @atomic_add_i32_addr64_offset(i32 addrspace(1)* %out, i32 %in, i64 %index) {
33 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
34 %gep = getelementptr i32, i32 addrspace(1)* %ptr, i32 4
35 %0 = atomicrmw volatile add i32 addrspace(1)* %gep, i32 %in seq_cst
45 define void @atomic_add_i32_ret_addr64_offset(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 …
47 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
48 %gep = getelementptr i32, i32 addrspace(1)* %ptr, i32 4
49 %0 = atomicrmw volatile add i32 addrspace(1)* %gep, i32 %in seq_cst
50 store i32 %0, i32 addrspace(1)* %out2
56 define void @atomic_add_i32(i32 addrspace(1)* %out, i32 %in) {
58 %0 = atomicrmw volatile add i32 addrspace(1)* %out, i32 %in seq_cst
65 define void @atomic_add_i32_ret(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in) {
67 %0 = atomicrmw volatile add i32 addrspace(1)* %out, i32 %in seq_cst
68 store i32 %0, i32 addrspace(1)* %out2
77 define void @atomic_add_i32_addr64(i32 addrspace(1)* %out, i32 %in, i64 %index) {
79 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
80 %0 = atomicrmw volatile add i32 addrspace(1)* %ptr, i32 %in seq_cst
90 define void @atomic_add_i32_ret_addr64(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in, i6…
92 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
93 %0 = atomicrmw volatile add i32 addrspace(1)* %ptr, i32 %in seq_cst
94 store i32 %0, i32 addrspace(1)* %out2
100 define void @atomic_and_i32_offset(i32 addrspace(1)* %out, i32 %in) {
102 %gep = getelementptr i32, i32 addrspace(1)* %out, i32 4
103 %0 = atomicrmw volatile and i32 addrspace(1)* %gep, i32 %in seq_cst
110 define void @atomic_and_i32_ret_offset(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in) {
112 %gep = getelementptr i32, i32 addrspace(1)* %out, i32 4
113 %0 = atomicrmw volatile and i32 addrspace(1)* %gep, i32 %in seq_cst
114 store i32 %0, i32 addrspace(1)* %out2
123 define void @atomic_and_i32_addr64_offset(i32 addrspace(1)* %out, i32 %in, i64 %index) {
125 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
126 %gep = getelementptr i32, i32 addrspace(1)* %ptr, i32 4
127 %0 = atomicrmw volatile and i32 addrspace(1)* %gep, i32 %in seq_cst
137 define void @atomic_and_i32_ret_addr64_offset(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 …
139 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
140 %gep = getelementptr i32, i32 addrspace(1)* %ptr, i32 4
141 %0 = atomicrmw volatile and i32 addrspace(1)* %gep, i32 %in seq_cst
142 store i32 %0, i32 addrspace(1)* %out2
148 define void @atomic_and_i32(i32 addrspace(1)* %out, i32 %in) {
150 %0 = atomicrmw volatile and i32 addrspace(1)* %out, i32 %in seq_cst
157 define void @atomic_and_i32_ret(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in) {
159 %0 = atomicrmw volatile and i32 addrspace(1)* %out, i32 %in seq_cst
160 store i32 %0, i32 addrspace(1)* %out2
169 define void @atomic_and_i32_addr64(i32 addrspace(1)* %out, i32 %in, i64 %index) {
171 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
172 %0 = atomicrmw volatile and i32 addrspace(1)* %ptr, i32 %in seq_cst
182 define void @atomic_and_i32_ret_addr64(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in, i6…
184 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
185 %0 = atomicrmw volatile and i32 addrspace(1)* %ptr, i32 %in seq_cst
186 store i32 %0, i32 addrspace(1)* %out2
192 define void @atomic_sub_i32_offset(i32 addrspace(1)* %out, i32 %in) {
194 %gep = getelementptr i32, i32 addrspace(1)* %out, i32 4
195 %0 = atomicrmw volatile sub i32 addrspace(1)* %gep, i32 %in seq_cst
202 define void @atomic_sub_i32_ret_offset(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in) {
204 %gep = getelementptr i32, i32 addrspace(1)* %out, i32 4
205 %0 = atomicrmw volatile sub i32 addrspace(1)* %gep, i32 %in seq_cst
206 store i32 %0, i32 addrspace(1)* %out2
215 define void @atomic_sub_i32_addr64_offset(i32 addrspace(1)* %out, i32 %in, i64 %index) {
217 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
218 %gep = getelementptr i32, i32 addrspace(1)* %ptr, i32 4
219 %0 = atomicrmw volatile sub i32 addrspace(1)* %gep, i32 %in seq_cst
229 define void @atomic_sub_i32_ret_addr64_offset(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 …
231 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
232 %gep = getelementptr i32, i32 addrspace(1)* %ptr, i32 4
233 %0 = atomicrmw volatile sub i32 addrspace(1)* %gep, i32 %in seq_cst
234 store i32 %0, i32 addrspace(1)* %out2
240 define void @atomic_sub_i32(i32 addrspace(1)* %out, i32 %in) {
242 %0 = atomicrmw volatile sub i32 addrspace(1)* %out, i32 %in seq_cst
249 define void @atomic_sub_i32_ret(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in) {
251 %0 = atomicrmw volatile sub i32 addrspace(1)* %out, i32 %in seq_cst
252 store i32 %0, i32 addrspace(1)* %out2
261 define void @atomic_sub_i32_addr64(i32 addrspace(1)* %out, i32 %in, i64 %index) {
263 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
264 %0 = atomicrmw volatile sub i32 addrspace(1)* %ptr, i32 %in seq_cst
274 define void @atomic_sub_i32_ret_addr64(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in, i6…
276 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
277 %0 = atomicrmw volatile sub i32 addrspace(1)* %ptr, i32 %in seq_cst
278 store i32 %0, i32 addrspace(1)* %out2
284 define void @atomic_max_i32_offset(i32 addrspace(1)* %out, i32 %in) {
286 %gep = getelementptr i32, i32 addrspace(1)* %out, i32 4
287 %0 = atomicrmw volatile max i32 addrspace(1)* %gep, i32 %in seq_cst
294 define void @atomic_max_i32_ret_offset(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in) {
296 %gep = getelementptr i32, i32 addrspace(1)* %out, i32 4
297 %0 = atomicrmw volatile max i32 addrspace(1)* %gep, i32 %in seq_cst
298 store i32 %0, i32 addrspace(1)* %out2
307 define void @atomic_max_i32_addr64_offset(i32 addrspace(1)* %out, i32 %in, i64 %index) {
309 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
310 %gep = getelementptr i32, i32 addrspace(1)* %ptr, i32 4
311 %0 = atomicrmw volatile max i32 addrspace(1)* %gep, i32 %in seq_cst
321 define void @atomic_max_i32_ret_addr64_offset(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 …
323 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
324 %gep = getelementptr i32, i32 addrspace(1)* %ptr, i32 4
325 %0 = atomicrmw volatile max i32 addrspace(1)* %gep, i32 %in seq_cst
326 store i32 %0, i32 addrspace(1)* %out2
332 define void @atomic_max_i32(i32 addrspace(1)* %out, i32 %in) {
334 %0 = atomicrmw volatile max i32 addrspace(1)* %out, i32 %in seq_cst
341 define void @atomic_max_i32_ret(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in) {
343 %0 = atomicrmw volatile max i32 addrspace(1)* %out, i32 %in seq_cst
344 store i32 %0, i32 addrspace(1)* %out2
353 define void @atomic_max_i32_addr64(i32 addrspace(1)* %out, i32 %in, i64 %index) {
355 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
356 %0 = atomicrmw volatile max i32 addrspace(1)* %ptr, i32 %in seq_cst
366 define void @atomic_max_i32_ret_addr64(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in, i6…
368 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
369 %0 = atomicrmw volatile max i32 addrspace(1)* %ptr, i32 %in seq_cst
370 store i32 %0, i32 addrspace(1)* %out2
376 define void @atomic_umax_i32_offset(i32 addrspace(1)* %out, i32 %in) {
378 %gep = getelementptr i32, i32 addrspace(1)* %out, i32 4
379 %0 = atomicrmw volatile umax i32 addrspace(1)* %gep, i32 %in seq_cst
386 define void @atomic_umax_i32_ret_offset(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in) {
388 %gep = getelementptr i32, i32 addrspace(1)* %out, i32 4
389 %0 = atomicrmw volatile umax i32 addrspace(1)* %gep, i32 %in seq_cst
390 store i32 %0, i32 addrspace(1)* %out2
399 define void @atomic_umax_i32_addr64_offset(i32 addrspace(1)* %out, i32 %in, i64 %index) {
401 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
402 %gep = getelementptr i32, i32 addrspace(1)* %ptr, i32 4
403 %0 = atomicrmw volatile umax i32 addrspace(1)* %gep, i32 %in seq_cst
413 define void @atomic_umax_i32_ret_addr64_offset(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32…
415 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
416 %gep = getelementptr i32, i32 addrspace(1)* %ptr, i32 4
417 %0 = atomicrmw volatile umax i32 addrspace(1)* %gep, i32 %in seq_cst
418 store i32 %0, i32 addrspace(1)* %out2
424 define void @atomic_umax_i32(i32 addrspace(1)* %out, i32 %in) {
426 %0 = atomicrmw volatile umax i32 addrspace(1)* %out, i32 %in seq_cst
433 define void @atomic_umax_i32_ret(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in) {
435 %0 = atomicrmw volatile umax i32 addrspace(1)* %out, i32 %in seq_cst
436 store i32 %0, i32 addrspace(1)* %out2
445 define void @atomic_umax_i32_addr64(i32 addrspace(1)* %out, i32 %in, i64 %index) {
447 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
448 %0 = atomicrmw volatile umax i32 addrspace(1)* %ptr, i32 %in seq_cst
458 define void @atomic_umax_i32_ret_addr64(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in, i…
460 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
461 %0 = atomicrmw volatile umax i32 addrspace(1)* %ptr, i32 %in seq_cst
462 store i32 %0, i32 addrspace(1)* %out2
468 define void @atomic_min_i32_offset(i32 addrspace(1)* %out, i32 %in) {
470 %gep = getelementptr i32, i32 addrspace(1)* %out, i32 4
471 %0 = atomicrmw volatile min i32 addrspace(1)* %gep, i32 %in seq_cst
478 define void @atomic_min_i32_ret_offset(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in) {
480 %gep = getelementptr i32, i32 addrspace(1)* %out, i32 4
481 %0 = atomicrmw volatile min i32 addrspace(1)* %gep, i32 %in seq_cst
482 store i32 %0, i32 addrspace(1)* %out2
491 define void @atomic_min_i32_addr64_offset(i32 addrspace(1)* %out, i32 %in, i64 %index) {
493 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
494 %gep = getelementptr i32, i32 addrspace(1)* %ptr, i32 4
495 %0 = atomicrmw volatile min i32 addrspace(1)* %gep, i32 %in seq_cst
505 define void @atomic_min_i32_ret_addr64_offset(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 …
507 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
508 %gep = getelementptr i32, i32 addrspace(1)* %ptr, i32 4
509 %0 = atomicrmw volatile min i32 addrspace(1)* %gep, i32 %in seq_cst
510 store i32 %0, i32 addrspace(1)* %out2
516 define void @atomic_min_i32(i32 addrspace(1)* %out, i32 %in) {
518 %0 = atomicrmw volatile min i32 addrspace(1)* %out, i32 %in seq_cst
525 define void @atomic_min_i32_ret(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in) {
527 %0 = atomicrmw volatile min i32 addrspace(1)* %out, i32 %in seq_cst
528 store i32 %0, i32 addrspace(1)* %out2
537 define void @atomic_min_i32_addr64(i32 addrspace(1)* %out, i32 %in, i64 %index) {
539 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
540 %0 = atomicrmw volatile min i32 addrspace(1)* %ptr, i32 %in seq_cst
550 define void @atomic_min_i32_ret_addr64(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in, i6…
552 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
553 %0 = atomicrmw volatile min i32 addrspace(1)* %ptr, i32 %in seq_cst
554 store i32 %0, i32 addrspace(1)* %out2
560 define void @atomic_umin_i32_offset(i32 addrspace(1)* %out, i32 %in) {
562 %gep = getelementptr i32, i32 addrspace(1)* %out, i32 4
563 %0 = atomicrmw volatile umin i32 addrspace(1)* %gep, i32 %in seq_cst
570 define void @atomic_umin_i32_ret_offset(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in) {
572 %gep = getelementptr i32, i32 addrspace(1)* %out, i32 4
573 %0 = atomicrmw volatile umin i32 addrspace(1)* %gep, i32 %in seq_cst
574 store i32 %0, i32 addrspace(1)* %out2
583 define void @atomic_umin_i32_addr64_offset(i32 addrspace(1)* %out, i32 %in, i64 %index) {
585 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
586 %gep = getelementptr i32, i32 addrspace(1)* %ptr, i32 4
587 %0 = atomicrmw volatile umin i32 addrspace(1)* %gep, i32 %in seq_cst
597 define void @atomic_umin_i32_ret_addr64_offset(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32…
599 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
600 %gep = getelementptr i32, i32 addrspace(1)* %ptr, i32 4
601 %0 = atomicrmw volatile umin i32 addrspace(1)* %gep, i32 %in seq_cst
602 store i32 %0, i32 addrspace(1)* %out2
608 define void @atomic_umin_i32(i32 addrspace(1)* %out, i32 %in) {
610 %0 = atomicrmw volatile umin i32 addrspace(1)* %out, i32 %in seq_cst
617 define void @atomic_umin_i32_ret(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in) {
619 %0 = atomicrmw volatile umin i32 addrspace(1)* %out, i32 %in seq_cst
620 store i32 %0, i32 addrspace(1)* %out2
629 define void @atomic_umin_i32_addr64(i32 addrspace(1)* %out, i32 %in, i64 %index) {
631 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
632 %0 = atomicrmw volatile umin i32 addrspace(1)* %ptr, i32 %in seq_cst
642 define void @atomic_umin_i32_ret_addr64(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in, i…
644 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
645 %0 = atomicrmw volatile umin i32 addrspace(1)* %ptr, i32 %in seq_cst
646 store i32 %0, i32 addrspace(1)* %out2
652 define void @atomic_or_i32_offset(i32 addrspace(1)* %out, i32 %in) {
654 %gep = getelementptr i32, i32 addrspace(1)* %out, i32 4
655 %0 = atomicrmw volatile or i32 addrspace(1)* %gep, i32 %in seq_cst
662 define void @atomic_or_i32_ret_offset(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in) {
664 %gep = getelementptr i32, i32 addrspace(1)* %out, i32 4
665 %0 = atomicrmw volatile or i32 addrspace(1)* %gep, i32 %in seq_cst
666 store i32 %0, i32 addrspace(1)* %out2
675 define void @atomic_or_i32_addr64_offset(i32 addrspace(1)* %out, i32 %in, i64 %index) {
677 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
678 %gep = getelementptr i32, i32 addrspace(1)* %ptr, i32 4
679 %0 = atomicrmw volatile or i32 addrspace(1)* %gep, i32 %in seq_cst
689 define void @atomic_or_i32_ret_addr64_offset(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %…
691 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
692 %gep = getelementptr i32, i32 addrspace(1)* %ptr, i32 4
693 %0 = atomicrmw volatile or i32 addrspace(1)* %gep, i32 %in seq_cst
694 store i32 %0, i32 addrspace(1)* %out2
700 define void @atomic_or_i32(i32 addrspace(1)* %out, i32 %in) {
702 %0 = atomicrmw volatile or i32 addrspace(1)* %out, i32 %in seq_cst
709 define void @atomic_or_i32_ret(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in) {
711 %0 = atomicrmw volatile or i32 addrspace(1)* %out, i32 %in seq_cst
712 store i32 %0, i32 addrspace(1)* %out2
721 define void @atomic_or_i32_addr64(i32 addrspace(1)* %out, i32 %in, i64 %index) {
723 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
724 %0 = atomicrmw volatile or i32 addrspace(1)* %ptr, i32 %in seq_cst
734 define void @atomic_or_i32_ret_addr64(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in, i64…
736 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
737 %0 = atomicrmw volatile or i32 addrspace(1)* %ptr, i32 %in seq_cst
738 store i32 %0, i32 addrspace(1)* %out2
744 define void @atomic_xchg_i32_offset(i32 addrspace(1)* %out, i32 %in) {
746 %gep = getelementptr i32, i32 addrspace(1)* %out, i32 4
747 %0 = atomicrmw volatile xchg i32 addrspace(1)* %gep, i32 %in seq_cst
754 define void @atomic_xchg_i32_ret_offset(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in) {
756 %gep = getelementptr i32, i32 addrspace(1)* %out, i32 4
757 %0 = atomicrmw volatile xchg i32 addrspace(1)* %gep, i32 %in seq_cst
758 store i32 %0, i32 addrspace(1)* %out2
764 define void @atomic_xchg_i32_addr64_offset(i32 addrspace(1)* %out, i32 %in, i64 %index) {
766 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
767 %gep = getelementptr i32, i32 addrspace(1)* %ptr, i32 4
768 %0 = atomicrmw volatile xchg i32 addrspace(1)* %gep, i32 %in seq_cst
778 define void @atomic_xchg_i32_ret_addr64_offset(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32…
780 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
781 %gep = getelementptr i32, i32 addrspace(1)* %ptr, i32 4
782 %0 = atomicrmw volatile xchg i32 addrspace(1)* %gep, i32 %in seq_cst
783 store i32 %0, i32 addrspace(1)* %out2
789 define void @atomic_xchg_i32(i32 addrspace(1)* %out, i32 %in) {
791 %0 = atomicrmw volatile xchg i32 addrspace(1)* %out, i32 %in seq_cst
798 define void @atomic_xchg_i32_ret(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in) {
800 %0 = atomicrmw volatile xchg i32 addrspace(1)* %out, i32 %in seq_cst
801 store i32 %0, i32 addrspace(1)* %out2
810 define void @atomic_xchg_i32_addr64(i32 addrspace(1)* %out, i32 %in, i64 %index) {
812 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
813 %0 = atomicrmw volatile xchg i32 addrspace(1)* %ptr, i32 %in seq_cst
823 define void @atomic_xchg_i32_ret_addr64(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in, i…
825 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
826 %0 = atomicrmw volatile xchg i32 addrspace(1)* %ptr, i32 %in seq_cst
827 store i32 %0, i32 addrspace(1)* %out2
833 define void @atomic_xor_i32_offset(i32 addrspace(1)* %out, i32 %in) {
835 %gep = getelementptr i32, i32 addrspace(1)* %out, i32 4
836 %0 = atomicrmw volatile xor i32 addrspace(1)* %gep, i32 %in seq_cst
843 define void @atomic_xor_i32_ret_offset(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in) {
845 %gep = getelementptr i32, i32 addrspace(1)* %out, i32 4
846 %0 = atomicrmw volatile xor i32 addrspace(1)* %gep, i32 %in seq_cst
847 store i32 %0, i32 addrspace(1)* %out2
856 define void @atomic_xor_i32_addr64_offset(i32 addrspace(1)* %out, i32 %in, i64 %index) {
858 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
859 %gep = getelementptr i32, i32 addrspace(1)* %ptr, i32 4
860 %0 = atomicrmw volatile xor i32 addrspace(1)* %gep, i32 %in seq_cst
870 define void @atomic_xor_i32_ret_addr64_offset(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 …
872 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
873 %gep = getelementptr i32, i32 addrspace(1)* %ptr, i32 4
874 %0 = atomicrmw volatile xor i32 addrspace(1)* %gep, i32 %in seq_cst
875 store i32 %0, i32 addrspace(1)* %out2
881 define void @atomic_xor_i32(i32 addrspace(1)* %out, i32 %in) {
883 %0 = atomicrmw volatile xor i32 addrspace(1)* %out, i32 %in seq_cst
890 define void @atomic_xor_i32_ret(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in) {
892 %0 = atomicrmw volatile xor i32 addrspace(1)* %out, i32 %in seq_cst
893 store i32 %0, i32 addrspace(1)* %out2
902 define void @atomic_xor_i32_addr64(i32 addrspace(1)* %out, i32 %in, i64 %index) {
904 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
905 %0 = atomicrmw volatile xor i32 addrspace(1)* %ptr, i32 %in seq_cst
915 define void @atomic_xor_i32_ret_addr64(i32 addrspace(1)* %out, i32 addrspace(1)* %out2, i32 %in, i6…
917 %ptr = getelementptr i32, i32 addrspace(1)* %out, i64 %index
918 %0 = atomicrmw volatile xor i32 addrspace(1)* %ptr, i32 %in seq_cst
919 store i32 %0, i32 addrspace(1)* %out2