Commit | Line | Data |
---|---|---|
1da177e4 LT |
1 | #ifndef __ARCH_X86_64_ATOMIC__ |
2 | #define __ARCH_X86_64_ATOMIC__ | |
3 | ||
4 | #include <linux/config.h> | |
5 | ||
6 | /* atomic_t should be 32 bit signed type */ | |
7 | ||
8 | /* | |
9 | * Atomic operations that C can't guarantee us. Useful for | |
10 | * resource counting etc.. | |
11 | */ | |
12 | ||
13 | #ifdef CONFIG_SMP | |
14 | #define LOCK "lock ; " | |
15 | #else | |
16 | #define LOCK "" | |
17 | #endif | |
18 | ||
19 | /* | |
20 | * Make sure gcc doesn't try to be clever and move things around | |
21 | * on us. We need to use _exactly_ the address the user gave us, | |
22 | * not some alias that contains the same information. | |
23 | */ | |
24 | typedef struct { volatile int counter; } atomic_t; | |
25 | ||
26 | #define ATOMIC_INIT(i) { (i) } | |
27 | ||
28 | /** | |
29 | * atomic_read - read atomic variable | |
30 | * @v: pointer of type atomic_t | |
31 | * | |
32 | * Atomically reads the value of @v. | |
33 | */ | |
34 | #define atomic_read(v) ((v)->counter) | |
35 | ||
36 | /** | |
37 | * atomic_set - set atomic variable | |
38 | * @v: pointer of type atomic_t | |
39 | * @i: required value | |
40 | * | |
41 | * Atomically sets the value of @v to @i. | |
42 | */ | |
43 | #define atomic_set(v,i) (((v)->counter) = (i)) | |
44 | ||
45 | /** | |
46 | * atomic_add - add integer to atomic variable | |
47 | * @i: integer value to add | |
48 | * @v: pointer of type atomic_t | |
49 | * | |
50 | * Atomically adds @i to @v. | |
51 | */ | |
52 | static __inline__ void atomic_add(int i, atomic_t *v) | |
53 | { | |
54 | __asm__ __volatile__( | |
55 | LOCK "addl %1,%0" | |
56 | :"=m" (v->counter) | |
57 | :"ir" (i), "m" (v->counter)); | |
58 | } | |
59 | ||
60 | /** | |
61 | * atomic_sub - subtract the atomic variable | |
62 | * @i: integer value to subtract | |
63 | * @v: pointer of type atomic_t | |
64 | * | |
65 | * Atomically subtracts @i from @v. | |
66 | */ | |
67 | static __inline__ void atomic_sub(int i, atomic_t *v) | |
68 | { | |
69 | __asm__ __volatile__( | |
70 | LOCK "subl %1,%0" | |
71 | :"=m" (v->counter) | |
72 | :"ir" (i), "m" (v->counter)); | |
73 | } | |
74 | ||
75 | /** | |
76 | * atomic_sub_and_test - subtract value from variable and test result | |
77 | * @i: integer value to subtract | |
78 | * @v: pointer of type atomic_t | |
79 | * | |
80 | * Atomically subtracts @i from @v and returns | |
81 | * true if the result is zero, or false for all | |
82 | * other cases. | |
83 | */ | |
84 | static __inline__ int atomic_sub_and_test(int i, atomic_t *v) | |
85 | { | |
86 | unsigned char c; | |
87 | ||
88 | __asm__ __volatile__( | |
89 | LOCK "subl %2,%0; sete %1" | |
90 | :"=m" (v->counter), "=qm" (c) | |
91 | :"ir" (i), "m" (v->counter) : "memory"); | |
92 | return c; | |
93 | } | |
94 | ||
95 | /** | |
96 | * atomic_inc - increment atomic variable | |
97 | * @v: pointer of type atomic_t | |
98 | * | |
99 | * Atomically increments @v by 1. | |
100 | */ | |
101 | static __inline__ void atomic_inc(atomic_t *v) | |
102 | { | |
103 | __asm__ __volatile__( | |
104 | LOCK "incl %0" | |
105 | :"=m" (v->counter) | |
106 | :"m" (v->counter)); | |
107 | } | |
108 | ||
109 | /** | |
110 | * atomic_dec - decrement atomic variable | |
111 | * @v: pointer of type atomic_t | |
112 | * | |
113 | * Atomically decrements @v by 1. | |
114 | */ | |
115 | static __inline__ void atomic_dec(atomic_t *v) | |
116 | { | |
117 | __asm__ __volatile__( | |
118 | LOCK "decl %0" | |
119 | :"=m" (v->counter) | |
120 | :"m" (v->counter)); | |
121 | } | |
122 | ||
123 | /** | |
124 | * atomic_dec_and_test - decrement and test | |
125 | * @v: pointer of type atomic_t | |
126 | * | |
127 | * Atomically decrements @v by 1 and | |
128 | * returns true if the result is 0, or false for all other | |
129 | * cases. | |
130 | */ | |
131 | static __inline__ int atomic_dec_and_test(atomic_t *v) | |
132 | { | |
133 | unsigned char c; | |
134 | ||
135 | __asm__ __volatile__( | |
136 | LOCK "decl %0; sete %1" | |
137 | :"=m" (v->counter), "=qm" (c) | |
138 | :"m" (v->counter) : "memory"); | |
139 | return c != 0; | |
140 | } | |
141 | ||
142 | /** | |
143 | * atomic_inc_and_test - increment and test | |
144 | * @v: pointer of type atomic_t | |
145 | * | |
146 | * Atomically increments @v by 1 | |
147 | * and returns true if the result is zero, or false for all | |
148 | * other cases. | |
149 | */ | |
150 | static __inline__ int atomic_inc_and_test(atomic_t *v) | |
151 | { | |
152 | unsigned char c; | |
153 | ||
154 | __asm__ __volatile__( | |
155 | LOCK "incl %0; sete %1" | |
156 | :"=m" (v->counter), "=qm" (c) | |
157 | :"m" (v->counter) : "memory"); | |
158 | return c != 0; | |
159 | } | |
160 | ||
161 | /** | |
162 | * atomic_add_negative - add and test if negative | |
1da177e4 | 163 | * @i: integer value to add |
7c72aaf2 | 164 | * @v: pointer of type atomic_t |
1da177e4 LT |
165 | * |
166 | * Atomically adds @i to @v and returns true | |
167 | * if the result is negative, or false when | |
168 | * result is greater than or equal to zero. | |
169 | */ | |
170 | static __inline__ int atomic_add_negative(int i, atomic_t *v) | |
171 | { | |
172 | unsigned char c; | |
173 | ||
174 | __asm__ __volatile__( | |
175 | LOCK "addl %2,%0; sets %1" | |
176 | :"=m" (v->counter), "=qm" (c) | |
177 | :"ir" (i), "m" (v->counter) : "memory"); | |
178 | return c; | |
179 | } | |
180 | ||
7c72aaf2 HD |
181 | /** |
182 | * atomic_add_return - add and return | |
183 | * @i: integer value to add | |
184 | * @v: pointer of type atomic_t | |
185 | * | |
186 | * Atomically adds @i to @v and returns @i + @v | |
187 | */ | |
188 | static __inline__ int atomic_add_return(int i, atomic_t *v) | |
189 | { | |
190 | int __i = i; | |
191 | __asm__ __volatile__( | |
192 | LOCK "xaddl %0, %1;" | |
193 | :"=r"(i) | |
194 | :"m"(v->counter), "0"(i)); | |
195 | return i + __i; | |
196 | } | |
197 | ||
198 | static __inline__ int atomic_sub_return(int i, atomic_t *v) | |
199 | { | |
200 | return atomic_add_return(-i,v); | |
201 | } | |
202 | ||
203 | #define atomic_inc_return(v) (atomic_add_return(1,v)) | |
204 | #define atomic_dec_return(v) (atomic_sub_return(1,v)) | |
205 | ||
1da177e4 LT |
206 | /* An 64bit atomic type */ |
207 | ||
208 | typedef struct { volatile long counter; } atomic64_t; | |
209 | ||
210 | #define ATOMIC64_INIT(i) { (i) } | |
211 | ||
212 | /** | |
213 | * atomic64_read - read atomic64 variable | |
214 | * @v: pointer of type atomic64_t | |
215 | * | |
216 | * Atomically reads the value of @v. | |
217 | * Doesn't imply a read memory barrier. | |
218 | */ | |
219 | #define atomic64_read(v) ((v)->counter) | |
220 | ||
221 | /** | |
222 | * atomic64_set - set atomic64 variable | |
223 | * @v: pointer to type atomic64_t | |
224 | * @i: required value | |
225 | * | |
226 | * Atomically sets the value of @v to @i. | |
227 | */ | |
228 | #define atomic64_set(v,i) (((v)->counter) = (i)) | |
229 | ||
230 | /** | |
231 | * atomic64_add - add integer to atomic64 variable | |
232 | * @i: integer value to add | |
233 | * @v: pointer to type atomic64_t | |
234 | * | |
235 | * Atomically adds @i to @v. | |
236 | */ | |
237 | static __inline__ void atomic64_add(long i, atomic64_t *v) | |
238 | { | |
239 | __asm__ __volatile__( | |
240 | LOCK "addq %1,%0" | |
241 | :"=m" (v->counter) | |
242 | :"ir" (i), "m" (v->counter)); | |
243 | } | |
244 | ||
245 | /** | |
246 | * atomic64_sub - subtract the atomic64 variable | |
247 | * @i: integer value to subtract | |
248 | * @v: pointer to type atomic64_t | |
249 | * | |
250 | * Atomically subtracts @i from @v. | |
251 | */ | |
252 | static __inline__ void atomic64_sub(long i, atomic64_t *v) | |
253 | { | |
254 | __asm__ __volatile__( | |
255 | LOCK "subq %1,%0" | |
256 | :"=m" (v->counter) | |
257 | :"ir" (i), "m" (v->counter)); | |
258 | } | |
259 | ||
260 | /** | |
261 | * atomic64_sub_and_test - subtract value from variable and test result | |
262 | * @i: integer value to subtract | |
263 | * @v: pointer to type atomic64_t | |
264 | * | |
265 | * Atomically subtracts @i from @v and returns | |
266 | * true if the result is zero, or false for all | |
267 | * other cases. | |
268 | */ | |
269 | static __inline__ int atomic64_sub_and_test(long i, atomic64_t *v) | |
270 | { | |
271 | unsigned char c; | |
272 | ||
273 | __asm__ __volatile__( | |
274 | LOCK "subq %2,%0; sete %1" | |
275 | :"=m" (v->counter), "=qm" (c) | |
276 | :"ir" (i), "m" (v->counter) : "memory"); | |
277 | return c; | |
278 | } | |
279 | ||
280 | /** | |
281 | * atomic64_inc - increment atomic64 variable | |
282 | * @v: pointer to type atomic64_t | |
283 | * | |
284 | * Atomically increments @v by 1. | |
285 | */ | |
286 | static __inline__ void atomic64_inc(atomic64_t *v) | |
287 | { | |
288 | __asm__ __volatile__( | |
289 | LOCK "incq %0" | |
290 | :"=m" (v->counter) | |
291 | :"m" (v->counter)); | |
292 | } | |
293 | ||
294 | /** | |
295 | * atomic64_dec - decrement atomic64 variable | |
296 | * @v: pointer to type atomic64_t | |
297 | * | |
298 | * Atomically decrements @v by 1. | |
299 | */ | |
300 | static __inline__ void atomic64_dec(atomic64_t *v) | |
301 | { | |
302 | __asm__ __volatile__( | |
303 | LOCK "decq %0" | |
304 | :"=m" (v->counter) | |
305 | :"m" (v->counter)); | |
306 | } | |
307 | ||
308 | /** | |
309 | * atomic64_dec_and_test - decrement and test | |
310 | * @v: pointer to type atomic64_t | |
311 | * | |
312 | * Atomically decrements @v by 1 and | |
313 | * returns true if the result is 0, or false for all other | |
314 | * cases. | |
315 | */ | |
316 | static __inline__ int atomic64_dec_and_test(atomic64_t *v) | |
317 | { | |
318 | unsigned char c; | |
319 | ||
320 | __asm__ __volatile__( | |
321 | LOCK "decq %0; sete %1" | |
322 | :"=m" (v->counter), "=qm" (c) | |
323 | :"m" (v->counter) : "memory"); | |
324 | return c != 0; | |
325 | } | |
326 | ||
327 | /** | |
328 | * atomic64_inc_and_test - increment and test | |
329 | * @v: pointer to type atomic64_t | |
330 | * | |
331 | * Atomically increments @v by 1 | |
332 | * and returns true if the result is zero, or false for all | |
333 | * other cases. | |
334 | */ | |
335 | static __inline__ int atomic64_inc_and_test(atomic64_t *v) | |
336 | { | |
337 | unsigned char c; | |
338 | ||
339 | __asm__ __volatile__( | |
340 | LOCK "incq %0; sete %1" | |
341 | :"=m" (v->counter), "=qm" (c) | |
342 | :"m" (v->counter) : "memory"); | |
343 | return c != 0; | |
344 | } | |
345 | ||
346 | /** | |
347 | * atomic64_add_negative - add and test if negative | |
1da177e4 | 348 | * @i: integer value to add |
7c72aaf2 | 349 | * @v: pointer to type atomic64_t |
1da177e4 LT |
350 | * |
351 | * Atomically adds @i to @v and returns true | |
352 | * if the result is negative, or false when | |
353 | * result is greater than or equal to zero. | |
354 | */ | |
7c72aaf2 | 355 | static __inline__ int atomic64_add_negative(long i, atomic64_t *v) |
1da177e4 LT |
356 | { |
357 | unsigned char c; | |
358 | ||
359 | __asm__ __volatile__( | |
360 | LOCK "addq %2,%0; sets %1" | |
361 | :"=m" (v->counter), "=qm" (c) | |
362 | :"ir" (i), "m" (v->counter) : "memory"); | |
363 | return c; | |
364 | } | |
365 | ||
366 | /** | |
7c72aaf2 | 367 | * atomic64_add_return - add and return |
1da177e4 | 368 | * @i: integer value to add |
7c72aaf2 | 369 | * @v: pointer to type atomic64_t |
1da177e4 LT |
370 | * |
371 | * Atomically adds @i to @v and returns @i + @v | |
372 | */ | |
7c72aaf2 | 373 | static __inline__ long atomic64_add_return(long i, atomic64_t *v) |
1da177e4 | 374 | { |
7c72aaf2 | 375 | long __i = i; |
1da177e4 | 376 | __asm__ __volatile__( |
7c72aaf2 | 377 | LOCK "xaddq %0, %1;" |
1da177e4 LT |
378 | :"=r"(i) |
379 | :"m"(v->counter), "0"(i)); | |
380 | return i + __i; | |
381 | } | |
382 | ||
7c72aaf2 | 383 | static __inline__ long atomic64_sub_return(long i, atomic64_t *v) |
1da177e4 | 384 | { |
7c72aaf2 | 385 | return atomic64_add_return(-i,v); |
1da177e4 LT |
386 | } |
387 | ||
7c72aaf2 HD |
388 | #define atomic64_inc_return(v) (atomic64_add_return(1,v)) |
389 | #define atomic64_dec_return(v) (atomic64_sub_return(1,v)) | |
390 | ||
4a6dae6d NP |
391 | #define atomic_cmpxchg(v, old, new) ((int)cmpxchg(&((v)->counter), old, new)) |
392 | ||
8426e1f6 NP |
393 | /** |
394 | * atomic_add_unless - add unless the number is a given value | |
395 | * @v: pointer of type atomic_t | |
396 | * @a: the amount to add to v... | |
397 | * @u: ...unless v is equal to u. | |
398 | * | |
399 | * Atomically adds @a to @v, so long as it was not @u. | |
400 | * Returns non-zero if @v was not @u, and zero otherwise. | |
401 | */ | |
402 | #define atomic_add_unless(v, a, u) \ | |
403 | ({ \ | |
404 | int c, old; \ | |
405 | c = atomic_read(v); \ | |
406 | while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \ | |
407 | c = old; \ | |
408 | c != (u); \ | |
409 | }) | |
410 | #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0) | |
411 | ||
1da177e4 LT |
412 | /* These are x86-specific, used by some header files */ |
413 | #define atomic_clear_mask(mask, addr) \ | |
414 | __asm__ __volatile__(LOCK "andl %0,%1" \ | |
415 | : : "r" (~(mask)),"m" (*addr) : "memory") | |
416 | ||
417 | #define atomic_set_mask(mask, addr) \ | |
418 | __asm__ __volatile__(LOCK "orl %0,%1" \ | |
419 | : : "r" ((unsigned)mask),"m" (*(addr)) : "memory") | |
420 | ||
421 | /* Atomic operations are already serializing on x86 */ | |
422 | #define smp_mb__before_atomic_dec() barrier() | |
423 | #define smp_mb__after_atomic_dec() barrier() | |
424 | #define smp_mb__before_atomic_inc() barrier() | |
425 | #define smp_mb__after_atomic_inc() barrier() | |
426 | ||
427 | #endif |