1 /*
2 * linux/include/asm-arm/atomic.h
3 *
4 * Copyright (c) 1996 Russell King.
5 *
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License version 2 as
8 * published by the Free Software Foundation.
9 *
10 * Changelog:
11 * 27-06-1996 RMK Created
12 * 13-04-1997 RMK Made functions atomic!
13 * 07-12-1997 RMK Upgraded for v2.1.
14 * 26-08-1998 PJB Added #ifdef __KERNEL__
15 */
16 #ifndef __ASM_ARM_ATOMIC_H
17 #define __ASM_ARM_ATOMIC_H
18
19 #ifdef CONFIG_SMP
20 #error SMP not supported
21 #endif
22
23 typedef struct { volatile int counter; } atomic_t;
24 #if BITS_PER_LONG == 32
25 typedef struct { volatile long long counter; } atomic64_t;
26 #else /* BIT_PER_LONG == 32 */
27 typedef struct { volatile long counter; } atomic64_t;
28 #endif
29
30 #define ATOMIC_INIT(i) { (i) }
31
32 #ifdef __KERNEL__
33 #include <asm/proc-armv/system.h>
34
35 #define atomic_read(v) ((v)->counter)
36 #define atomic_set(v, i) (((v)->counter) = (i))
37 #define atomic64_read(v) atomic_read(v)
38 #define atomic64_set(v, i) atomic_set(v, i)
39
atomic_add(int i,volatile atomic_t * v)40 static inline void atomic_add(int i, volatile atomic_t *v)
41 {
42 unsigned long flags = 0;
43
44 local_irq_save(flags);
45 v->counter += i;
46 local_irq_restore(flags);
47 }
48
atomic_sub(int i,volatile atomic_t * v)49 static inline void atomic_sub(int i, volatile atomic_t *v)
50 {
51 unsigned long flags = 0;
52
53 local_irq_save(flags);
54 v->counter -= i;
55 local_irq_restore(flags);
56 }
57
atomic_inc(volatile atomic_t * v)58 static inline void atomic_inc(volatile atomic_t *v)
59 {
60 unsigned long flags = 0;
61
62 local_irq_save(flags);
63 v->counter += 1;
64 local_irq_restore(flags);
65 }
66
atomic_dec(volatile atomic_t * v)67 static inline void atomic_dec(volatile atomic_t *v)
68 {
69 unsigned long flags = 0;
70
71 local_irq_save(flags);
72 v->counter -= 1;
73 local_irq_restore(flags);
74 }
75
atomic_dec_and_test(volatile atomic_t * v)76 static inline int atomic_dec_and_test(volatile atomic_t *v)
77 {
78 unsigned long flags = 0;
79 int val;
80
81 local_irq_save(flags);
82 val = v->counter;
83 v->counter = val -= 1;
84 local_irq_restore(flags);
85
86 return val == 0;
87 }
88
atomic_add_negative(int i,volatile atomic_t * v)89 static inline int atomic_add_negative(int i, volatile atomic_t *v)
90 {
91 unsigned long flags = 0;
92 int val;
93
94 local_irq_save(flags);
95 val = v->counter;
96 v->counter = val += i;
97 local_irq_restore(flags);
98
99 return val < 0;
100 }
101
atomic_clear_mask(unsigned long mask,unsigned long * addr)102 static inline void atomic_clear_mask(unsigned long mask, unsigned long *addr)
103 {
104 unsigned long flags = 0;
105
106 local_irq_save(flags);
107 *addr &= ~mask;
108 local_irq_restore(flags);
109 }
110
111 #if BITS_PER_LONG == 32
112
atomic64_add(long long i,volatile atomic64_t * v)113 static inline void atomic64_add(long long i, volatile atomic64_t *v)
114 {
115 unsigned long flags = 0;
116
117 local_irq_save(flags);
118 v->counter += i;
119 local_irq_restore(flags);
120 }
121
atomic64_sub(long long i,volatile atomic64_t * v)122 static inline void atomic64_sub(long long i, volatile atomic64_t *v)
123 {
124 unsigned long flags = 0;
125
126 local_irq_save(flags);
127 v->counter -= i;
128 local_irq_restore(flags);
129 }
130
131 #else /* BIT_PER_LONG == 32 */
132
atomic64_add(long i,volatile atomic64_t * v)133 static inline void atomic64_add(long i, volatile atomic64_t *v)
134 {
135 unsigned long flags = 0;
136
137 local_irq_save(flags);
138 v->counter += i;
139 local_irq_restore(flags);
140 }
141
atomic64_sub(long i,volatile atomic64_t * v)142 static inline void atomic64_sub(long i, volatile atomic64_t *v)
143 {
144 unsigned long flags = 0;
145
146 local_irq_save(flags);
147 v->counter -= i;
148 local_irq_restore(flags);
149 }
150 #endif
151
atomic64_inc(volatile atomic64_t * v)152 static inline void atomic64_inc(volatile atomic64_t *v)
153 {
154 unsigned long flags = 0;
155
156 local_irq_save(flags);
157 v->counter += 1;
158 local_irq_restore(flags);
159 }
160
atomic64_dec(volatile atomic64_t * v)161 static inline void atomic64_dec(volatile atomic64_t *v)
162 {
163 unsigned long flags = 0;
164
165 local_irq_save(flags);
166 v->counter -= 1;
167 local_irq_restore(flags);
168 }
169
170 /* Atomic operations are already serializing on ARM */
171 #define smp_mb__before_atomic_dec() barrier()
172 #define smp_mb__after_atomic_dec() barrier()
173 #define smp_mb__before_atomic_inc() barrier()
174 #define smp_mb__after_atomic_inc() barrier()
175
176 #endif
177 #endif
178