Lines Matching refs:seq_cst
9 %val = atomicrmw volatile add i32 addrspace(4)* %gep, i32 %in seq_cst
19 %val = atomicrmw volatile add i32 addrspace(4)* %gep, i32 %in seq_cst
30 %val = atomicrmw volatile add i32 addrspace(4)* %gep, i32 %in seq_cst
41 %val = atomicrmw volatile add i32 addrspace(4)* %gep, i32 %in seq_cst
50 %val = atomicrmw volatile add i32 addrspace(4)* %out, i32 %in seq_cst
59 %val = atomicrmw volatile add i32 addrspace(4)* %out, i32 %in seq_cst
69 %val = atomicrmw volatile add i32 addrspace(4)* %ptr, i32 %in seq_cst
79 %val = atomicrmw volatile add i32 addrspace(4)* %ptr, i32 %in seq_cst
89 %val = atomicrmw volatile and i32 addrspace(4)* %gep, i32 %in seq_cst
99 %val = atomicrmw volatile and i32 addrspace(4)* %gep, i32 %in seq_cst
110 %val = atomicrmw volatile and i32 addrspace(4)* %gep, i32 %in seq_cst
121 %val = atomicrmw volatile and i32 addrspace(4)* %gep, i32 %in seq_cst
130 %val = atomicrmw volatile and i32 addrspace(4)* %out, i32 %in seq_cst
139 %val = atomicrmw volatile and i32 addrspace(4)* %out, i32 %in seq_cst
149 %val = atomicrmw volatile and i32 addrspace(4)* %ptr, i32 %in seq_cst
159 %val = atomicrmw volatile and i32 addrspace(4)* %ptr, i32 %in seq_cst
169 %val = atomicrmw volatile sub i32 addrspace(4)* %gep, i32 %in seq_cst
179 %val = atomicrmw volatile sub i32 addrspace(4)* %gep, i32 %in seq_cst
190 %val = atomicrmw volatile sub i32 addrspace(4)* %gep, i32 %in seq_cst
201 %val = atomicrmw volatile sub i32 addrspace(4)* %gep, i32 %in seq_cst
210 %val = atomicrmw volatile sub i32 addrspace(4)* %out, i32 %in seq_cst
219 %val = atomicrmw volatile sub i32 addrspace(4)* %out, i32 %in seq_cst
229 %val = atomicrmw volatile sub i32 addrspace(4)* %ptr, i32 %in seq_cst
239 %val = atomicrmw volatile sub i32 addrspace(4)* %ptr, i32 %in seq_cst
249 %val = atomicrmw volatile max i32 addrspace(4)* %gep, i32 %in seq_cst
259 %val = atomicrmw volatile max i32 addrspace(4)* %gep, i32 %in seq_cst
270 %val = atomicrmw volatile max i32 addrspace(4)* %gep, i32 %in seq_cst
281 %val = atomicrmw volatile max i32 addrspace(4)* %gep, i32 %in seq_cst
290 %val = atomicrmw volatile max i32 addrspace(4)* %out, i32 %in seq_cst
299 %val = atomicrmw volatile max i32 addrspace(4)* %out, i32 %in seq_cst
309 %val = atomicrmw volatile max i32 addrspace(4)* %ptr, i32 %in seq_cst
319 %val = atomicrmw volatile max i32 addrspace(4)* %ptr, i32 %in seq_cst
329 %val = atomicrmw volatile umax i32 addrspace(4)* %gep, i32 %in seq_cst
339 %val = atomicrmw volatile umax i32 addrspace(4)* %gep, i32 %in seq_cst
350 %val = atomicrmw volatile umax i32 addrspace(4)* %gep, i32 %in seq_cst
361 %val = atomicrmw volatile umax i32 addrspace(4)* %gep, i32 %in seq_cst
370 %val = atomicrmw volatile umax i32 addrspace(4)* %out, i32 %in seq_cst
379 %val = atomicrmw volatile umax i32 addrspace(4)* %out, i32 %in seq_cst
389 %val = atomicrmw volatile umax i32 addrspace(4)* %ptr, i32 %in seq_cst
399 %val = atomicrmw volatile umax i32 addrspace(4)* %ptr, i32 %in seq_cst
409 %val = atomicrmw volatile min i32 addrspace(4)* %gep, i32 %in seq_cst
419 %val = atomicrmw volatile min i32 addrspace(4)* %gep, i32 %in seq_cst
430 %val = atomicrmw volatile min i32 addrspace(4)* %gep, i32 %in seq_cst
441 %val = atomicrmw volatile min i32 addrspace(4)* %gep, i32 %in seq_cst
450 %val = atomicrmw volatile min i32 addrspace(4)* %out, i32 %in seq_cst
459 %val = atomicrmw volatile min i32 addrspace(4)* %out, i32 %in seq_cst
469 %val = atomicrmw volatile min i32 addrspace(4)* %ptr, i32 %in seq_cst
479 %val = atomicrmw volatile min i32 addrspace(4)* %ptr, i32 %in seq_cst
489 %val = atomicrmw volatile umin i32 addrspace(4)* %gep, i32 %in seq_cst
499 %val = atomicrmw volatile umin i32 addrspace(4)* %gep, i32 %in seq_cst
510 %val = atomicrmw volatile umin i32 addrspace(4)* %gep, i32 %in seq_cst
521 %val = atomicrmw volatile umin i32 addrspace(4)* %gep, i32 %in seq_cst
530 %val = atomicrmw volatile umin i32 addrspace(4)* %out, i32 %in seq_cst
539 %val = atomicrmw volatile umin i32 addrspace(4)* %out, i32 %in seq_cst
549 %val = atomicrmw volatile umin i32 addrspace(4)* %ptr, i32 %in seq_cst
559 %val = atomicrmw volatile umin i32 addrspace(4)* %ptr, i32 %in seq_cst
569 %val = atomicrmw volatile or i32 addrspace(4)* %gep, i32 %in seq_cst
579 %val = atomicrmw volatile or i32 addrspace(4)* %gep, i32 %in seq_cst
590 %val = atomicrmw volatile or i32 addrspace(4)* %gep, i32 %in seq_cst
601 %val = atomicrmw volatile or i32 addrspace(4)* %gep, i32 %in seq_cst
610 %val = atomicrmw volatile or i32 addrspace(4)* %out, i32 %in seq_cst
619 %val = atomicrmw volatile or i32 addrspace(4)* %out, i32 %in seq_cst
629 %val = atomicrmw volatile or i32 addrspace(4)* %ptr, i32 %in seq_cst
639 %val = atomicrmw volatile or i32 addrspace(4)* %ptr, i32 %in seq_cst
649 %val = atomicrmw volatile xchg i32 addrspace(4)* %gep, i32 %in seq_cst
659 %val = atomicrmw volatile xchg i32 addrspace(4)* %gep, i32 %in seq_cst
670 %val = atomicrmw volatile xchg i32 addrspace(4)* %gep, i32 %in seq_cst
681 %val = atomicrmw volatile xchg i32 addrspace(4)* %gep, i32 %in seq_cst
690 %val = atomicrmw volatile xchg i32 addrspace(4)* %out, i32 %in seq_cst
699 %val = atomicrmw volatile xchg i32 addrspace(4)* %out, i32 %in seq_cst
709 %val = atomicrmw volatile xchg i32 addrspace(4)* %ptr, i32 %in seq_cst
719 %val = atomicrmw volatile xchg i32 addrspace(4)* %ptr, i32 %in seq_cst
731 %val = cmpxchg volatile i32 addrspace(4)* %gep, i32 %old, i32 %in seq_cst seq_cst
741 %val = cmpxchg volatile i32 addrspace(4)* %gep, i32 %old, i32 %in seq_cst seq_cst
753 %val = cmpxchg volatile i32 addrspace(4)* %gep, i32 %old, i32 %in seq_cst seq_cst
764 %val = cmpxchg volatile i32 addrspace(4)* %gep, i32 %old, i32 %in seq_cst seq_cst
774 %val = cmpxchg volatile i32 addrspace(4)* %out, i32 %old, i32 %in seq_cst seq_cst
783 %val = cmpxchg volatile i32 addrspace(4)* %out, i32 %old, i32 %in seq_cst seq_cst
794 %val = cmpxchg volatile i32 addrspace(4)* %ptr, i32 %old, i32 %in seq_cst seq_cst
804 %val = cmpxchg volatile i32 addrspace(4)* %ptr, i32 %old, i32 %in seq_cst seq_cst
815 %val = atomicrmw volatile xor i32 addrspace(4)* %gep, i32 %in seq_cst
825 %val = atomicrmw volatile xor i32 addrspace(4)* %gep, i32 %in seq_cst
836 %val = atomicrmw volatile xor i32 addrspace(4)* %gep, i32 %in seq_cst
847 %val = atomicrmw volatile xor i32 addrspace(4)* %gep, i32 %in seq_cst
856 %val = atomicrmw volatile xor i32 addrspace(4)* %out, i32 %in seq_cst
865 %val = atomicrmw volatile xor i32 addrspace(4)* %out, i32 %in seq_cst
875 %val = atomicrmw volatile xor i32 addrspace(4)* %ptr, i32 %in seq_cst
885 %val = atomicrmw volatile xor i32 addrspace(4)* %ptr, i32 %in seq_cst
896 %val = load atomic i32, i32 addrspace(4)* %gep seq_cst, align 4
906 %val = load atomic i32, i32 addrspace(4)* %in seq_cst, align 4
918 %val = load atomic i32, i32 addrspace(4)* %gep seq_cst, align 4
929 %val = load atomic i32, i32 addrspace(4)* %ptr seq_cst, align 4
939 store atomic i32 %in, i32 addrspace(4)* %gep seq_cst, align 4
947 store atomic i32 %in, i32 addrspace(4)* %out seq_cst, align 4
957 store atomic i32 %in, i32 addrspace(4)* %gep seq_cst, align 4
966 store atomic i32 %in, i32 addrspace(4)* %ptr seq_cst, align 4