Lines Matching refs:addrspace

14 define void @lds_atomic_xchg_ret_i32(i32 addrspace(1)* %out, i32 addrspace(3)* %ptr) nounwind {
15 %result = atomicrmw xchg i32 addrspace(3)* %ptr, i32 4 seq_cst
16 store i32 %result, i32 addrspace(1)* %out, align 4
24 define void @lds_atomic_xchg_ret_i32_offset(i32 addrspace(1)* %out, i32 addrspace(3)* %ptr) nounwin…
25 %gep = getelementptr i32, i32 addrspace(3)* %ptr, i32 4
26 %result = atomicrmw xchg i32 addrspace(3)* %gep, i32 4 seq_cst
27 store i32 %result, i32 addrspace(1)* %out, align 4
40 define void @lds_atomic_add_ret_i32(i32 addrspace(1)* %out, i32 addrspace(3)* %ptr) nounwind {
41 %result = atomicrmw add i32 addrspace(3)* %ptr, i32 4 seq_cst
42 store i32 %result, i32 addrspace(1)* %out, align 4
50 define void @lds_atomic_add_ret_i32_offset(i32 addrspace(1)* %out, i32 addrspace(3)* %ptr) nounwind…
51 %gep = getelementptr i32, i32 addrspace(3)* %ptr, i32 4
52 %result = atomicrmw add i32 addrspace(3)* %gep, i32 4 seq_cst
53 store i32 %result, i32 addrspace(1)* %out, align 4
62 define void @lds_atomic_add_ret_i32_bad_si_offset(i32 addrspace(1)* %out, i32 addrspace(3)* %ptr, i…
65 %gep = getelementptr i32, i32 addrspace(3)* %ptr, i32 %add
66 %result = atomicrmw add i32 addrspace(3)* %gep, i32 4 seq_cst
67 store i32 %result, i32 addrspace(1)* %out, align 4
76 define void @lds_atomic_inc_ret_i32(i32 addrspace(1)* %out, i32 addrspace(3)* %ptr) nounwind {
77 %result = atomicrmw add i32 addrspace(3)* %ptr, i32 1 seq_cst
78 store i32 %result, i32 addrspace(1)* %out, align 4
87 define void @lds_atomic_inc_ret_i32_offset(i32 addrspace(1)* %out, i32 addrspace(3)* %ptr) nounwind…
88 %gep = getelementptr i32, i32 addrspace(3)* %ptr, i32 4
89 %result = atomicrmw add i32 addrspace(3)* %gep, i32 1 seq_cst
90 store i32 %result, i32 addrspace(1)* %out, align 4
99 define void @lds_atomic_inc_ret_i32_bad_si_offset(i32 addrspace(1)* %out, i32 addrspace(3)* %ptr, i…
102 %gep = getelementptr i32, i32 addrspace(3)* %ptr, i32 %add
103 %result = atomicrmw add i32 addrspace(3)* %gep, i32 1 seq_cst
104 store i32 %result, i32 addrspace(1)* %out, align 4
112 define void @lds_atomic_sub_ret_i32(i32 addrspace(1)* %out, i32 addrspace(3)* %ptr) nounwind {
113 %result = atomicrmw sub i32 addrspace(3)* %ptr, i32 4 seq_cst
114 store i32 %result, i32 addrspace(1)* %out, align 4
122 define void @lds_atomic_sub_ret_i32_offset(i32 addrspace(1)* %out, i32 addrspace(3)* %ptr) nounwind…
123 %gep = getelementptr i32, i32 addrspace(3)* %ptr, i32 4
124 %result = atomicrmw sub i32 addrspace(3)* %gep, i32 4 seq_cst
125 store i32 %result, i32 addrspace(1)* %out, align 4
134 define void @lds_atomic_dec_ret_i32(i32 addrspace(1)* %out, i32 addrspace(3)* %ptr) nounwind {
135 %result = atomicrmw sub i32 addrspace(3)* %ptr, i32 1 seq_cst
136 store i32 %result, i32 addrspace(1)* %out, align 4
145 define void @lds_atomic_dec_ret_i32_offset(i32 addrspace(1)* %out, i32 addrspace(3)* %ptr) nounwind…
146 %gep = getelementptr i32, i32 addrspace(3)* %ptr, i32 4
147 %result = atomicrmw sub i32 addrspace(3)* %gep, i32 1 seq_cst
148 store i32 %result, i32 addrspace(1)* %out, align 4
156 define void @lds_atomic_and_ret_i32(i32 addrspace(1)* %out, i32 addrspace(3)* %ptr) nounwind {
157 %result = atomicrmw and i32 addrspace(3)* %ptr, i32 4 seq_cst
158 store i32 %result, i32 addrspace(1)* %out, align 4
166 define void @lds_atomic_and_ret_i32_offset(i32 addrspace(1)* %out, i32 addrspace(3)* %ptr) nounwind…
167 %gep = getelementptr i32, i32 addrspace(3)* %ptr, i32 4
168 %result = atomicrmw and i32 addrspace(3)* %gep, i32 4 seq_cst
169 store i32 %result, i32 addrspace(1)* %out, align 4
177 define void @lds_atomic_or_ret_i32(i32 addrspace(1)* %out, i32 addrspace(3)* %ptr) nounwind {
178 %result = atomicrmw or i32 addrspace(3)* %ptr, i32 4 seq_cst
179 store i32 %result, i32 addrspace(1)* %out, align 4
187 define void @lds_atomic_or_ret_i32_offset(i32 addrspace(1)* %out, i32 addrspace(3)* %ptr) nounwind {
188 %gep = getelementptr i32, i32 addrspace(3)* %ptr, i32 4
189 %result = atomicrmw or i32 addrspace(3)* %gep, i32 4 seq_cst
190 store i32 %result, i32 addrspace(1)* %out, align 4
198 define void @lds_atomic_xor_ret_i32(i32 addrspace(1)* %out, i32 addrspace(3)* %ptr) nounwind {
199 %result = atomicrmw xor i32 addrspace(3)* %ptr, i32 4 seq_cst
200 store i32 %result, i32 addrspace(1)* %out, align 4
208 define void @lds_atomic_xor_ret_i32_offset(i32 addrspace(1)* %out, i32 addrspace(3)* %ptr) nounwind…
209 %gep = getelementptr i32, i32 addrspace(3)* %ptr, i32 4
210 %result = atomicrmw xor i32 addrspace(3)* %gep, i32 4 seq_cst
211 store i32 %result, i32 addrspace(1)* %out, align 4
217 ; define void @lds_atomic_nand_ret_i32(i32 addrspace(1)* %out, i32 addrspace(3)* %ptr) nounwind {
218 ; %result = atomicrmw nand i32 addrspace(3)* %ptr, i32 4 seq_cst
219 ; store i32 %result, i32 addrspace(1)* %out, align 4
227 define void @lds_atomic_min_ret_i32(i32 addrspace(1)* %out, i32 addrspace(3)* %ptr) nounwind {
228 %result = atomicrmw min i32 addrspace(3)* %ptr, i32 4 seq_cst
229 store i32 %result, i32 addrspace(1)* %out, align 4
237 define void @lds_atomic_min_ret_i32_offset(i32 addrspace(1)* %out, i32 addrspace(3)* %ptr) nounwind…
238 %gep = getelementptr i32, i32 addrspace(3)* %ptr, i32 4
239 %result = atomicrmw min i32 addrspace(3)* %gep, i32 4 seq_cst
240 store i32 %result, i32 addrspace(1)* %out, align 4
248 define void @lds_atomic_max_ret_i32(i32 addrspace(1)* %out, i32 addrspace(3)* %ptr) nounwind {
249 %result = atomicrmw max i32 addrspace(3)* %ptr, i32 4 seq_cst
250 store i32 %result, i32 addrspace(1)* %out, align 4
258 define void @lds_atomic_max_ret_i32_offset(i32 addrspace(1)* %out, i32 addrspace(3)* %ptr) nounwind…
259 %gep = getelementptr i32, i32 addrspace(3)* %ptr, i32 4
260 %result = atomicrmw max i32 addrspace(3)* %gep, i32 4 seq_cst
261 store i32 %result, i32 addrspace(1)* %out, align 4
269 define void @lds_atomic_umin_ret_i32(i32 addrspace(1)* %out, i32 addrspace(3)* %ptr) nounwind {
270 %result = atomicrmw umin i32 addrspace(3)* %ptr, i32 4 seq_cst
271 store i32 %result, i32 addrspace(1)* %out, align 4
279 define void @lds_atomic_umin_ret_i32_offset(i32 addrspace(1)* %out, i32 addrspace(3)* %ptr) nounwin…
280 %gep = getelementptr i32, i32 addrspace(3)* %ptr, i32 4
281 %result = atomicrmw umin i32 addrspace(3)* %gep, i32 4 seq_cst
282 store i32 %result, i32 addrspace(1)* %out, align 4
290 define void @lds_atomic_umax_ret_i32(i32 addrspace(1)* %out, i32 addrspace(3)* %ptr) nounwind {
291 %result = atomicrmw umax i32 addrspace(3)* %ptr, i32 4 seq_cst
292 store i32 %result, i32 addrspace(1)* %out, align 4
300 define void @lds_atomic_umax_ret_i32_offset(i32 addrspace(1)* %out, i32 addrspace(3)* %ptr) nounwin…
301 %gep = getelementptr i32, i32 addrspace(3)* %ptr, i32 4
302 %result = atomicrmw umax i32 addrspace(3)* %gep, i32 4 seq_cst
303 store i32 %result, i32 addrspace(1)* %out, align 4
313 define void @lds_atomic_xchg_noret_i32(i32 addrspace(3)* %ptr) nounwind {
314 %result = atomicrmw xchg i32 addrspace(3)* %ptr, i32 4 seq_cst
321 define void @lds_atomic_xchg_noret_i32_offset(i32 addrspace(3)* %ptr) nounwind {
322 %gep = getelementptr i32, i32 addrspace(3)* %ptr, i32 4
323 %result = atomicrmw xchg i32 addrspace(3)* %gep, i32 4 seq_cst
334 define void @lds_atomic_add_noret_i32(i32 addrspace(3)* %ptr) nounwind {
335 %result = atomicrmw add i32 addrspace(3)* %ptr, i32 4 seq_cst
342 define void @lds_atomic_add_noret_i32_offset(i32 addrspace(3)* %ptr) nounwind {
343 %gep = getelementptr i32, i32 addrspace(3)* %ptr, i32 4
344 %result = atomicrmw add i32 addrspace(3)* %gep, i32 4 seq_cst
352 define void @lds_atomic_add_noret_i32_bad_si_offset(i32 addrspace(3)* %ptr, i32 %a, i32 %b) nounwin…
355 %gep = getelementptr i32, i32 addrspace(3)* %ptr, i32 %add
356 %result = atomicrmw add i32 addrspace(3)* %gep, i32 4 seq_cst
364 define void @lds_atomic_inc_noret_i32(i32 addrspace(3)* %ptr) nounwind {
365 %result = atomicrmw add i32 addrspace(3)* %ptr, i32 1 seq_cst
373 define void @lds_atomic_inc_noret_i32_offset(i32 addrspace(3)* %ptr) nounwind {
374 %gep = getelementptr i32, i32 addrspace(3)* %ptr, i32 4
375 %result = atomicrmw add i32 addrspace(3)* %gep, i32 1 seq_cst
383 define void @lds_atomic_inc_noret_i32_bad_si_offset(i32 addrspace(3)* %ptr, i32 %a, i32 %b) nounwin…
386 %gep = getelementptr i32, i32 addrspace(3)* %ptr, i32 %add
387 %result = atomicrmw add i32 addrspace(3)* %gep, i32 1 seq_cst
394 define void @lds_atomic_sub_noret_i32(i32 addrspace(3)* %ptr) nounwind {
395 %result = atomicrmw sub i32 addrspace(3)* %ptr, i32 4 seq_cst
402 define void @lds_atomic_sub_noret_i32_offset(i32 addrspace(3)* %ptr) nounwind {
403 %gep = getelementptr i32, i32 addrspace(3)* %ptr, i32 4
404 %result = atomicrmw sub i32 addrspace(3)* %gep, i32 4 seq_cst
412 define void @lds_atomic_dec_noret_i32(i32 addrspace(3)* %ptr) nounwind {
413 %result = atomicrmw sub i32 addrspace(3)* %ptr, i32 1 seq_cst
421 define void @lds_atomic_dec_noret_i32_offset(i32 addrspace(3)* %ptr) nounwind {
422 %gep = getelementptr i32, i32 addrspace(3)* %ptr, i32 4
423 %result = atomicrmw sub i32 addrspace(3)* %gep, i32 1 seq_cst
430 define void @lds_atomic_and_noret_i32(i32 addrspace(3)* %ptr) nounwind {
431 %result = atomicrmw and i32 addrspace(3)* %ptr, i32 4 seq_cst
438 define void @lds_atomic_and_noret_i32_offset(i32 addrspace(3)* %ptr) nounwind {
439 %gep = getelementptr i32, i32 addrspace(3)* %ptr, i32 4
440 %result = atomicrmw and i32 addrspace(3)* %gep, i32 4 seq_cst
447 define void @lds_atomic_or_noret_i32(i32 addrspace(3)* %ptr) nounwind {
448 %result = atomicrmw or i32 addrspace(3)* %ptr, i32 4 seq_cst
455 define void @lds_atomic_or_noret_i32_offset(i32 addrspace(3)* %ptr) nounwind {
456 %gep = getelementptr i32, i32 addrspace(3)* %ptr, i32 4
457 %result = atomicrmw or i32 addrspace(3)* %gep, i32 4 seq_cst
464 define void @lds_atomic_xor_noret_i32(i32 addrspace(3)* %ptr) nounwind {
465 %result = atomicrmw xor i32 addrspace(3)* %ptr, i32 4 seq_cst
472 define void @lds_atomic_xor_noret_i32_offset(i32 addrspace(3)* %ptr) nounwind {
473 %gep = getelementptr i32, i32 addrspace(3)* %ptr, i32 4
474 %result = atomicrmw xor i32 addrspace(3)* %gep, i32 4 seq_cst
480 ; define void @lds_atomic_nand_noret_i32(i32 addrspace(3)* %ptr) nounwind {
481 ; %result = atomicrmw nand i32 addrspace(3)* %ptr, i32 4 seq_cst
488 define void @lds_atomic_min_noret_i32(i32 addrspace(3)* %ptr) nounwind {
489 %result = atomicrmw min i32 addrspace(3)* %ptr, i32 4 seq_cst
496 define void @lds_atomic_min_noret_i32_offset(i32 addrspace(3)* %ptr) nounwind {
497 %gep = getelementptr i32, i32 addrspace(3)* %ptr, i32 4
498 %result = atomicrmw min i32 addrspace(3)* %gep, i32 4 seq_cst
505 define void @lds_atomic_max_noret_i32(i32 addrspace(3)* %ptr) nounwind {
506 %result = atomicrmw max i32 addrspace(3)* %ptr, i32 4 seq_cst
513 define void @lds_atomic_max_noret_i32_offset(i32 addrspace(3)* %ptr) nounwind {
514 %gep = getelementptr i32, i32 addrspace(3)* %ptr, i32 4
515 %result = atomicrmw max i32 addrspace(3)* %gep, i32 4 seq_cst
522 define void @lds_atomic_umin_noret_i32(i32 addrspace(3)* %ptr) nounwind {
523 %result = atomicrmw umin i32 addrspace(3)* %ptr, i32 4 seq_cst
530 define void @lds_atomic_umin_noret_i32_offset(i32 addrspace(3)* %ptr) nounwind {
531 %gep = getelementptr i32, i32 addrspace(3)* %ptr, i32 4
532 %result = atomicrmw umin i32 addrspace(3)* %gep, i32 4 seq_cst
539 define void @lds_atomic_umax_noret_i32(i32 addrspace(3)* %ptr) nounwind {
540 %result = atomicrmw umax i32 addrspace(3)* %ptr, i32 4 seq_cst
547 define void @lds_atomic_umax_noret_i32_offset(i32 addrspace(3)* %ptr) nounwind {
548 %gep = getelementptr i32, i32 addrspace(3)* %ptr, i32 4
549 %result = atomicrmw umax i32 addrspace(3)* %gep, i32 4 seq_cst