Lines Matching refs:addrspace

14 define void @lds_atomic_xchg_ret_i32(i32 addrspace(1)* %out, i32 addrspace(3)* %ptr) nounwind {
15 %result = atomicrmw xchg i32 addrspace(3)* %ptr, i32 4 seq_cst
16 store i32 %result, i32 addrspace(1)* %out, align 4
24 define void @lds_atomic_xchg_ret_i32_offset(i32 addrspace(1)* %out, i32 addrspace(3)* %ptr) nounwin…
25 %gep = getelementptr i32, i32 addrspace(3)* %ptr, i32 4
26 %result = atomicrmw xchg i32 addrspace(3)* %gep, i32 4 seq_cst
27 store i32 %result, i32 addrspace(1)* %out, align 4
40 define void @lds_atomic_add_ret_i32(i32 addrspace(1)* %out, i32 addrspace(3)* %ptr) nounwind {
41 %result = atomicrmw add i32 addrspace(3)* %ptr, i32 4 seq_cst
42 store i32 %result, i32 addrspace(1)* %out, align 4
50 define void @lds_atomic_add_ret_i32_offset(i32 addrspace(1)* %out, i32 addrspace(3)* %ptr) nounwind…
51 %gep = getelementptr i32, i32 addrspace(3)* %ptr, i32 4
52 %result = atomicrmw add i32 addrspace(3)* %gep, i32 4 seq_cst
53 store i32 %result, i32 addrspace(1)* %out, align 4
62 define void @lds_atomic_add_ret_i32_bad_si_offset(i32 addrspace(1)* %out, i32 addrspace(3)* %ptr, i…
65 %gep = getelementptr i32, i32 addrspace(3)* %ptr, i32 %add
66 %result = atomicrmw add i32 addrspace(3)* %gep, i32 4 seq_cst
67 store i32 %result, i32 addrspace(1)* %out, align 4
76 define void @lds_atomic_add1_ret_i32(i32 addrspace(1)* %out, i32 addrspace(3)* %ptr) nounwind {
77 %result = atomicrmw add i32 addrspace(3)* %ptr, i32 1 seq_cst
78 store i32 %result, i32 addrspace(1)* %out, align 4
87 define void @lds_atomic_add1_ret_i32_offset(i32 addrspace(1)* %out, i32 addrspace(3)* %ptr) nounwin…
88 %gep = getelementptr i32, i32 addrspace(3)* %ptr, i32 4
89 %result = atomicrmw add i32 addrspace(3)* %gep, i32 1 seq_cst
90 store i32 %result, i32 addrspace(1)* %out, align 4
99 define void @lds_atomic_add1_ret_i32_bad_si_offset(i32 addrspace(1)* %out, i32 addrspace(3)* %ptr, …
102 %gep = getelementptr i32, i32 addrspace(3)* %ptr, i32 %add
103 %result = atomicrmw add i32 addrspace(3)* %gep, i32 1 seq_cst
104 store i32 %result, i32 addrspace(1)* %out, align 4
112 define void @lds_atomic_sub_ret_i32(i32 addrspace(1)* %out, i32 addrspace(3)* %ptr) nounwind {
113 %result = atomicrmw sub i32 addrspace(3)* %ptr, i32 4 seq_cst
114 store i32 %result, i32 addrspace(1)* %out, align 4
122 define void @lds_atomic_sub_ret_i32_offset(i32 addrspace(1)* %out, i32 addrspace(3)* %ptr) nounwind…
123 %gep = getelementptr i32, i32 addrspace(3)* %ptr, i32 4
124 %result = atomicrmw sub i32 addrspace(3)* %gep, i32 4 seq_cst
125 store i32 %result, i32 addrspace(1)* %out, align 4
134 define void @lds_atomic_sub1_ret_i32(i32 addrspace(1)* %out, i32 addrspace(3)* %ptr) nounwind {
135 %result = atomicrmw sub i32 addrspace(3)* %ptr, i32 1 seq_cst
136 store i32 %result, i32 addrspace(1)* %out, align 4
145 define void @lds_atomic_sub1_ret_i32_offset(i32 addrspace(1)* %out, i32 addrspace(3)* %ptr) nounwin…
146 %gep = getelementptr i32, i32 addrspace(3)* %ptr, i32 4
147 %result = atomicrmw sub i32 addrspace(3)* %gep, i32 1 seq_cst
148 store i32 %result, i32 addrspace(1)* %out, align 4
156 define void @lds_atomic_and_ret_i32(i32 addrspace(1)* %out, i32 addrspace(3)* %ptr) nounwind {
157 %result = atomicrmw and i32 addrspace(3)* %ptr, i32 4 seq_cst
158 store i32 %result, i32 addrspace(1)* %out, align 4
166 define void @lds_atomic_and_ret_i32_offset(i32 addrspace(1)* %out, i32 addrspace(3)* %ptr) nounwind…
167 %gep = getelementptr i32, i32 addrspace(3)* %ptr, i32 4
168 %result = atomicrmw and i32 addrspace(3)* %gep, i32 4 seq_cst
169 store i32 %result, i32 addrspace(1)* %out, align 4
177 define void @lds_atomic_or_ret_i32(i32 addrspace(1)* %out, i32 addrspace(3)* %ptr) nounwind {
178 %result = atomicrmw or i32 addrspace(3)* %ptr, i32 4 seq_cst
179 store i32 %result, i32 addrspace(1)* %out, align 4
187 define void @lds_atomic_or_ret_i32_offset(i32 addrspace(1)* %out, i32 addrspace(3)* %ptr) nounwind {
188 %gep = getelementptr i32, i32 addrspace(3)* %ptr, i32 4
189 %result = atomicrmw or i32 addrspace(3)* %gep, i32 4 seq_cst
190 store i32 %result, i32 addrspace(1)* %out, align 4
198 define void @lds_atomic_xor_ret_i32(i32 addrspace(1)* %out, i32 addrspace(3)* %ptr) nounwind {
199 %result = atomicrmw xor i32 addrspace(3)* %ptr, i32 4 seq_cst
200 store i32 %result, i32 addrspace(1)* %out, align 4
208 define void @lds_atomic_xor_ret_i32_offset(i32 addrspace(1)* %out, i32 addrspace(3)* %ptr) nounwind…
209 %gep = getelementptr i32, i32 addrspace(3)* %ptr, i32 4
210 %result = atomicrmw xor i32 addrspace(3)* %gep, i32 4 seq_cst
211 store i32 %result, i32 addrspace(1)* %out, align 4
217 ; define void @lds_atomic_nand_ret_i32(i32 addrspace(1)* %out, i32 addrspace(3)* %ptr) nounwind {
218 ; %result = atomicrmw nand i32 addrspace(3)* %ptr, i32 4 seq_cst
219 ; store i32 %result, i32 addrspace(1)* %out, align 4
227 define void @lds_atomic_min_ret_i32(i32 addrspace(1)* %out, i32 addrspace(3)* %ptr) nounwind {
228 %result = atomicrmw min i32 addrspace(3)* %ptr, i32 4 seq_cst
229 store i32 %result, i32 addrspace(1)* %out, align 4
237 define void @lds_atomic_min_ret_i32_offset(i32 addrspace(1)* %out, i32 addrspace(3)* %ptr) nounwind…
238 %gep = getelementptr i32, i32 addrspace(3)* %ptr, i32 4
239 %result = atomicrmw min i32 addrspace(3)* %gep, i32 4 seq_cst
240 store i32 %result, i32 addrspace(1)* %out, align 4
248 define void @lds_atomic_max_ret_i32(i32 addrspace(1)* %out, i32 addrspace(3)* %ptr) nounwind {
249 %result = atomicrmw max i32 addrspace(3)* %ptr, i32 4 seq_cst
250 store i32 %result, i32 addrspace(1)* %out, align 4
258 define void @lds_atomic_max_ret_i32_offset(i32 addrspace(1)* %out, i32 addrspace(3)* %ptr) nounwind…
259 %gep = getelementptr i32, i32 addrspace(3)* %ptr, i32 4
260 %result = atomicrmw max i32 addrspace(3)* %gep, i32 4 seq_cst
261 store i32 %result, i32 addrspace(1)* %out, align 4
269 define void @lds_atomic_umin_ret_i32(i32 addrspace(1)* %out, i32 addrspace(3)* %ptr) nounwind {
270 %result = atomicrmw umin i32 addrspace(3)* %ptr, i32 4 seq_cst
271 store i32 %result, i32 addrspace(1)* %out, align 4
279 define void @lds_atomic_umin_ret_i32_offset(i32 addrspace(1)* %out, i32 addrspace(3)* %ptr) nounwin…
280 %gep = getelementptr i32, i32 addrspace(3)* %ptr, i32 4
281 %result = atomicrmw umin i32 addrspace(3)* %gep, i32 4 seq_cst
282 store i32 %result, i32 addrspace(1)* %out, align 4
290 define void @lds_atomic_umax_ret_i32(i32 addrspace(1)* %out, i32 addrspace(3)* %ptr) nounwind {
291 %result = atomicrmw umax i32 addrspace(3)* %ptr, i32 4 seq_cst
292 store i32 %result, i32 addrspace(1)* %out, align 4
300 define void @lds_atomic_umax_ret_i32_offset(i32 addrspace(1)* %out, i32 addrspace(3)* %ptr) nounwin…
301 %gep = getelementptr i32, i32 addrspace(3)* %ptr, i32 4
302 %result = atomicrmw umax i32 addrspace(3)* %gep, i32 4 seq_cst
303 store i32 %result, i32 addrspace(1)* %out, align 4
313 define void @lds_atomic_xchg_noret_i32(i32 addrspace(3)* %ptr) nounwind {
314 %result = atomicrmw xchg i32 addrspace(3)* %ptr, i32 4 seq_cst
321 define void @lds_atomic_xchg_noret_i32_offset(i32 addrspace(3)* %ptr) nounwind {
322 %gep = getelementptr i32, i32 addrspace(3)* %ptr, i32 4
323 %result = atomicrmw xchg i32 addrspace(3)* %gep, i32 4 seq_cst
333 define void @lds_atomic_add_noret_i32(i32 addrspace(3)* %ptr) nounwind {
334 %result = atomicrmw add i32 addrspace(3)* %ptr, i32 4 seq_cst
341 define void @lds_atomic_add_noret_i32_offset(i32 addrspace(3)* %ptr) nounwind {
342 %gep = getelementptr i32, i32 addrspace(3)* %ptr, i32 4
343 %result = atomicrmw add i32 addrspace(3)* %gep, i32 4 seq_cst
351 define void @lds_atomic_add_noret_i32_bad_si_offset(i32 addrspace(3)* %ptr, i32 %a, i32 %b) nounwin…
354 %gep = getelementptr i32, i32 addrspace(3)* %ptr, i32 %add
355 %result = atomicrmw add i32 addrspace(3)* %gep, i32 4 seq_cst
363 define void @lds_atomic_add1_noret_i32(i32 addrspace(3)* %ptr) nounwind {
364 %result = atomicrmw add i32 addrspace(3)* %ptr, i32 1 seq_cst
372 define void @lds_atomic_add1_noret_i32_offset(i32 addrspace(3)* %ptr) nounwind {
373 %gep = getelementptr i32, i32 addrspace(3)* %ptr, i32 4
374 %result = atomicrmw add i32 addrspace(3)* %gep, i32 1 seq_cst
382 define void @lds_atomic_add1_noret_i32_bad_si_offset(i32 addrspace(3)* %ptr, i32 %a, i32 %b) nounwi…
385 %gep = getelementptr i32, i32 addrspace(3)* %ptr, i32 %add
386 %result = atomicrmw add i32 addrspace(3)* %gep, i32 1 seq_cst
393 define void @lds_atomic_sub_noret_i32(i32 addrspace(3)* %ptr) nounwind {
394 %result = atomicrmw sub i32 addrspace(3)* %ptr, i32 4 seq_cst
401 define void @lds_atomic_sub_noret_i32_offset(i32 addrspace(3)* %ptr) nounwind {
402 %gep = getelementptr i32, i32 addrspace(3)* %ptr, i32 4
403 %result = atomicrmw sub i32 addrspace(3)* %gep, i32 4 seq_cst
411 define void @lds_atomic_sub1_noret_i32(i32 addrspace(3)* %ptr) nounwind {
412 %result = atomicrmw sub i32 addrspace(3)* %ptr, i32 1 seq_cst
420 define void @lds_atomic_sub1_noret_i32_offset(i32 addrspace(3)* %ptr) nounwind {
421 %gep = getelementptr i32, i32 addrspace(3)* %ptr, i32 4
422 %result = atomicrmw sub i32 addrspace(3)* %gep, i32 1 seq_cst
429 define void @lds_atomic_and_noret_i32(i32 addrspace(3)* %ptr) nounwind {
430 %result = atomicrmw and i32 addrspace(3)* %ptr, i32 4 seq_cst
437 define void @lds_atomic_and_noret_i32_offset(i32 addrspace(3)* %ptr) nounwind {
438 %gep = getelementptr i32, i32 addrspace(3)* %ptr, i32 4
439 %result = atomicrmw and i32 addrspace(3)* %gep, i32 4 seq_cst
446 define void @lds_atomic_or_noret_i32(i32 addrspace(3)* %ptr) nounwind {
447 %result = atomicrmw or i32 addrspace(3)* %ptr, i32 4 seq_cst
454 define void @lds_atomic_or_noret_i32_offset(i32 addrspace(3)* %ptr) nounwind {
455 %gep = getelementptr i32, i32 addrspace(3)* %ptr, i32 4
456 %result = atomicrmw or i32 addrspace(3)* %gep, i32 4 seq_cst
463 define void @lds_atomic_xor_noret_i32(i32 addrspace(3)* %ptr) nounwind {
464 %result = atomicrmw xor i32 addrspace(3)* %ptr, i32 4 seq_cst
471 define void @lds_atomic_xor_noret_i32_offset(i32 addrspace(3)* %ptr) nounwind {
472 %gep = getelementptr i32, i32 addrspace(3)* %ptr, i32 4
473 %result = atomicrmw xor i32 addrspace(3)* %gep, i32 4 seq_cst
479 ; define void @lds_atomic_nand_noret_i32(i32 addrspace(3)* %ptr) nounwind {
480 ; %result = atomicrmw nand i32 addrspace(3)* %ptr, i32 4 seq_cst
487 define void @lds_atomic_min_noret_i32(i32 addrspace(3)* %ptr) nounwind {
488 %result = atomicrmw min i32 addrspace(3)* %ptr, i32 4 seq_cst
495 define void @lds_atomic_min_noret_i32_offset(i32 addrspace(3)* %ptr) nounwind {
496 %gep = getelementptr i32, i32 addrspace(3)* %ptr, i32 4
497 %result = atomicrmw min i32 addrspace(3)* %gep, i32 4 seq_cst
504 define void @lds_atomic_max_noret_i32(i32 addrspace(3)* %ptr) nounwind {
505 %result = atomicrmw max i32 addrspace(3)* %ptr, i32 4 seq_cst
512 define void @lds_atomic_max_noret_i32_offset(i32 addrspace(3)* %ptr) nounwind {
513 %gep = getelementptr i32, i32 addrspace(3)* %ptr, i32 4
514 %result = atomicrmw max i32 addrspace(3)* %gep, i32 4 seq_cst
521 define void @lds_atomic_umin_noret_i32(i32 addrspace(3)* %ptr) nounwind {
522 %result = atomicrmw umin i32 addrspace(3)* %ptr, i32 4 seq_cst
529 define void @lds_atomic_umin_noret_i32_offset(i32 addrspace(3)* %ptr) nounwind {
530 %gep = getelementptr i32, i32 addrspace(3)* %ptr, i32 4
531 %result = atomicrmw umin i32 addrspace(3)* %gep, i32 4 seq_cst
538 define void @lds_atomic_umax_noret_i32(i32 addrspace(3)* %ptr) nounwind {
539 %result = atomicrmw umax i32 addrspace(3)* %ptr, i32 4 seq_cst
546 define void @lds_atomic_umax_noret_i32_offset(i32 addrspace(3)* %ptr) nounwind {
547 %gep = getelementptr i32, i32 addrspace(3)* %ptr, i32 4
548 %result = atomicrmw umax i32 addrspace(3)* %gep, i32 4 seq_cst