Commit | Line | Data |
---|---|---|
1da177e4 LT |
1 | #ifndef __ARCH_I386_ATOMIC__ |
2 | #define __ARCH_I386_ATOMIC__ | |
3 | ||
4 | #include <linux/config.h> | |
5 | #include <linux/compiler.h> | |
6 | #include <asm/processor.h> | |
7 | ||
8 | /* | |
9 | * Atomic operations that C can't guarantee us. Useful for | |
10 | * resource counting etc.. | |
11 | */ | |
12 | ||
1da177e4 LT |
13 | /* |
14 | * Make sure gcc doesn't try to be clever and move things around | |
15 | * on us. We need to use _exactly_ the address the user gave us, | |
16 | * not some alias that contains the same information. | |
17 | */ | |
18 | typedef struct { volatile int counter; } atomic_t; | |
19 | ||
20 | #define ATOMIC_INIT(i) { (i) } | |
21 | ||
22 | /** | |
23 | * atomic_read - read atomic variable | |
24 | * @v: pointer of type atomic_t | |
25 | * | |
26 | * Atomically reads the value of @v. | |
27 | */ | |
28 | #define atomic_read(v) ((v)->counter) | |
29 | ||
30 | /** | |
31 | * atomic_set - set atomic variable | |
32 | * @v: pointer of type atomic_t | |
33 | * @i: required value | |
34 | * | |
35 | * Atomically sets the value of @v to @i. | |
36 | */ | |
37 | #define atomic_set(v,i) (((v)->counter) = (i)) | |
38 | ||
39 | /** | |
40 | * atomic_add - add integer to atomic variable | |
41 | * @i: integer value to add | |
42 | * @v: pointer of type atomic_t | |
43 | * | |
44 | * Atomically adds @i to @v. | |
45 | */ | |
46 | static __inline__ void atomic_add(int i, atomic_t *v) | |
47 | { | |
48 | __asm__ __volatile__( | |
9a0b5817 | 49 | LOCK_PREFIX "addl %1,%0" |
1da177e4 LT |
50 | :"=m" (v->counter) |
51 | :"ir" (i), "m" (v->counter)); | |
52 | } | |
53 | ||
54 | /** | |
55 | * atomic_sub - subtract the atomic variable | |
56 | * @i: integer value to subtract | |
57 | * @v: pointer of type atomic_t | |
58 | * | |
59 | * Atomically subtracts @i from @v. | |
60 | */ | |
61 | static __inline__ void atomic_sub(int i, atomic_t *v) | |
62 | { | |
63 | __asm__ __volatile__( | |
9a0b5817 | 64 | LOCK_PREFIX "subl %1,%0" |
1da177e4 LT |
65 | :"=m" (v->counter) |
66 | :"ir" (i), "m" (v->counter)); | |
67 | } | |
68 | ||
69 | /** | |
70 | * atomic_sub_and_test - subtract value from variable and test result | |
71 | * @i: integer value to subtract | |
72 | * @v: pointer of type atomic_t | |
73 | * | |
74 | * Atomically subtracts @i from @v and returns | |
75 | * true if the result is zero, or false for all | |
76 | * other cases. | |
77 | */ | |
78 | static __inline__ int atomic_sub_and_test(int i, atomic_t *v) | |
79 | { | |
80 | unsigned char c; | |
81 | ||
82 | __asm__ __volatile__( | |
9a0b5817 | 83 | LOCK_PREFIX "subl %2,%0; sete %1" |
1da177e4 LT |
84 | :"=m" (v->counter), "=qm" (c) |
85 | :"ir" (i), "m" (v->counter) : "memory"); | |
86 | return c; | |
87 | } | |
88 | ||
89 | /** | |
90 | * atomic_inc - increment atomic variable | |
91 | * @v: pointer of type atomic_t | |
92 | * | |
93 | * Atomically increments @v by 1. | |
94 | */ | |
95 | static __inline__ void atomic_inc(atomic_t *v) | |
96 | { | |
97 | __asm__ __volatile__( | |
9a0b5817 | 98 | LOCK_PREFIX "incl %0" |
1da177e4 LT |
99 | :"=m" (v->counter) |
100 | :"m" (v->counter)); | |
101 | } | |
102 | ||
103 | /** | |
104 | * atomic_dec - decrement atomic variable | |
105 | * @v: pointer of type atomic_t | |
106 | * | |
107 | * Atomically decrements @v by 1. | |
108 | */ | |
109 | static __inline__ void atomic_dec(atomic_t *v) | |
110 | { | |
111 | __asm__ __volatile__( | |
9a0b5817 | 112 | LOCK_PREFIX "decl %0" |
1da177e4 LT |
113 | :"=m" (v->counter) |
114 | :"m" (v->counter)); | |
115 | } | |
116 | ||
117 | /** | |
118 | * atomic_dec_and_test - decrement and test | |
119 | * @v: pointer of type atomic_t | |
120 | * | |
121 | * Atomically decrements @v by 1 and | |
122 | * returns true if the result is 0, or false for all other | |
123 | * cases. | |
124 | */ | |
125 | static __inline__ int atomic_dec_and_test(atomic_t *v) | |
126 | { | |
127 | unsigned char c; | |
128 | ||
129 | __asm__ __volatile__( | |
9a0b5817 | 130 | LOCK_PREFIX "decl %0; sete %1" |
1da177e4 LT |
131 | :"=m" (v->counter), "=qm" (c) |
132 | :"m" (v->counter) : "memory"); | |
133 | return c != 0; | |
134 | } | |
135 | ||
136 | /** | |
137 | * atomic_inc_and_test - increment and test | |
138 | * @v: pointer of type atomic_t | |
139 | * | |
140 | * Atomically increments @v by 1 | |
141 | * and returns true if the result is zero, or false for all | |
142 | * other cases. | |
143 | */ | |
144 | static __inline__ int atomic_inc_and_test(atomic_t *v) | |
145 | { | |
146 | unsigned char c; | |
147 | ||
148 | __asm__ __volatile__( | |
9a0b5817 | 149 | LOCK_PREFIX "incl %0; sete %1" |
1da177e4 LT |
150 | :"=m" (v->counter), "=qm" (c) |
151 | :"m" (v->counter) : "memory"); | |
152 | return c != 0; | |
153 | } | |
154 | ||
155 | /** | |
156 | * atomic_add_negative - add and test if negative | |
157 | * @v: pointer of type atomic_t | |
158 | * @i: integer value to add | |
159 | * | |
160 | * Atomically adds @i to @v and returns true | |
161 | * if the result is negative, or false when | |
162 | * result is greater than or equal to zero. | |
163 | */ | |
164 | static __inline__ int atomic_add_negative(int i, atomic_t *v) | |
165 | { | |
166 | unsigned char c; | |
167 | ||
168 | __asm__ __volatile__( | |
9a0b5817 | 169 | LOCK_PREFIX "addl %2,%0; sets %1" |
1da177e4 LT |
170 | :"=m" (v->counter), "=qm" (c) |
171 | :"ir" (i), "m" (v->counter) : "memory"); | |
172 | return c; | |
173 | } | |
174 | ||
175 | /** | |
176 | * atomic_add_return - add and return | |
177 | * @v: pointer of type atomic_t | |
178 | * @i: integer value to add | |
179 | * | |
180 | * Atomically adds @i to @v and returns @i + @v | |
181 | */ | |
182 | static __inline__ int atomic_add_return(int i, atomic_t *v) | |
183 | { | |
184 | int __i; | |
185 | #ifdef CONFIG_M386 | |
1bb858f2 | 186 | unsigned long flags; |
1da177e4 LT |
187 | if(unlikely(boot_cpu_data.x86==3)) |
188 | goto no_xadd; | |
189 | #endif | |
190 | /* Modern 486+ processor */ | |
191 | __i = i; | |
192 | __asm__ __volatile__( | |
9a0b5817 | 193 | LOCK_PREFIX "xaddl %0, %1;" |
1da177e4 LT |
194 | :"=r"(i) |
195 | :"m"(v->counter), "0"(i)); | |
196 | return i + __i; | |
197 | ||
198 | #ifdef CONFIG_M386 | |
199 | no_xadd: /* Legacy 386 processor */ | |
1bb858f2 | 200 | local_irq_save(flags); |
1da177e4 LT |
201 | __i = atomic_read(v); |
202 | atomic_set(v, i + __i); | |
1bb858f2 | 203 | local_irq_restore(flags); |
1da177e4 LT |
204 | return i + __i; |
205 | #endif | |
206 | } | |
207 | ||
208 | static __inline__ int atomic_sub_return(int i, atomic_t *v) | |
209 | { | |
210 | return atomic_add_return(-i,v); | |
211 | } | |
212 | ||
4a6dae6d | 213 | #define atomic_cmpxchg(v, old, new) ((int)cmpxchg(&((v)->counter), old, new)) |
ffbf670f | 214 | #define atomic_xchg(v, new) (xchg(&((v)->counter), new)) |
4a6dae6d | 215 | |
8426e1f6 NP |
216 | /** |
217 | * atomic_add_unless - add unless the number is a given value | |
218 | * @v: pointer of type atomic_t | |
219 | * @a: the amount to add to v... | |
220 | * @u: ...unless v is equal to u. | |
221 | * | |
222 | * Atomically adds @a to @v, so long as it was not @u. | |
223 | * Returns non-zero if @v was not @u, and zero otherwise. | |
224 | */ | |
225 | #define atomic_add_unless(v, a, u) \ | |
226 | ({ \ | |
227 | int c, old; \ | |
228 | c = atomic_read(v); \ | |
0b2fcfdb NP |
229 | for (;;) { \ |
230 | if (unlikely(c == (u))) \ | |
231 | break; \ | |
232 | old = atomic_cmpxchg((v), c, c + (a)); \ | |
233 | if (likely(old == c)) \ | |
234 | break; \ | |
8426e1f6 | 235 | c = old; \ |
0b2fcfdb | 236 | } \ |
8426e1f6 NP |
237 | c != (u); \ |
238 | }) | |
239 | #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0) | |
240 | ||
1da177e4 LT |
241 | #define atomic_inc_return(v) (atomic_add_return(1,v)) |
242 | #define atomic_dec_return(v) (atomic_sub_return(1,v)) | |
243 | ||
244 | /* These are x86-specific, used by some header files */ | |
245 | #define atomic_clear_mask(mask, addr) \ | |
9a0b5817 | 246 | __asm__ __volatile__(LOCK_PREFIX "andl %0,%1" \ |
1da177e4 LT |
247 | : : "r" (~(mask)),"m" (*addr) : "memory") |
248 | ||
249 | #define atomic_set_mask(mask, addr) \ | |
9a0b5817 | 250 | __asm__ __volatile__(LOCK_PREFIX "orl %0,%1" \ |
1da177e4 LT |
251 | : : "r" (mask),"m" (*(addr)) : "memory") |
252 | ||
253 | /* Atomic operations are already serializing on x86 */ | |
254 | #define smp_mb__before_atomic_dec() barrier() | |
255 | #define smp_mb__after_atomic_dec() barrier() | |
256 | #define smp_mb__before_atomic_inc() barrier() | |
257 | #define smp_mb__after_atomic_inc() barrier() | |
258 | ||
d3cb4871 | 259 | #include <asm-generic/atomic.h> |
1da177e4 | 260 | #endif |