x86: Fix ASM_X86__ header guards
[linux-2.6-block.git] / arch / x86 / include / asm / atomic_64.h
CommitLineData
1965aae3
PA
1#ifndef _ASM_X86_ATOMIC_64_H
2#define _ASM_X86_ATOMIC_64_H
1da177e4 3
d167a518 4#include <asm/alternative.h>
a436ed9c 5#include <asm/cmpxchg.h>
1da177e4
LT
6
7/* atomic_t should be 32 bit signed type */
8
9/*
10 * Atomic operations that C can't guarantee us. Useful for
11 * resource counting etc..
12 */
13
1da177e4
LT
14/*
15 * Make sure gcc doesn't try to be clever and move things around
16 * on us. We need to use _exactly_ the address the user gave us,
17 * not some alias that contains the same information.
18 */
7edb3cd6
JP
19typedef struct {
20 int counter;
21} atomic_t;
1da177e4
LT
22
23#define ATOMIC_INIT(i) { (i) }
24
25/**
26 * atomic_read - read atomic variable
27 * @v: pointer of type atomic_t
7edb3cd6 28 *
1da177e4 29 * Atomically reads the value of @v.
7edb3cd6 30 */
1da177e4
LT
31#define atomic_read(v) ((v)->counter)
32
33/**
34 * atomic_set - set atomic variable
35 * @v: pointer of type atomic_t
36 * @i: required value
7edb3cd6 37 *
1da177e4 38 * Atomically sets the value of @v to @i.
7edb3cd6
JP
39 */
40#define atomic_set(v, i) (((v)->counter) = (i))
1da177e4
LT
41
42/**
43 * atomic_add - add integer to atomic variable
44 * @i: integer value to add
45 * @v: pointer of type atomic_t
7edb3cd6 46 *
1da177e4
LT
47 * Atomically adds @i to @v.
48 */
7edb3cd6 49static inline void atomic_add(int i, atomic_t *v)
1da177e4 50{
7edb3cd6
JP
51 asm volatile(LOCK_PREFIX "addl %1,%0"
52 : "=m" (v->counter)
53 : "ir" (i), "m" (v->counter));
1da177e4
LT
54}
55
56/**
57 * atomic_sub - subtract the atomic variable
58 * @i: integer value to subtract
59 * @v: pointer of type atomic_t
7edb3cd6 60 *
1da177e4
LT
61 * Atomically subtracts @i from @v.
62 */
7edb3cd6 63static inline void atomic_sub(int i, atomic_t *v)
1da177e4 64{
7edb3cd6
JP
65 asm volatile(LOCK_PREFIX "subl %1,%0"
66 : "=m" (v->counter)
67 : "ir" (i), "m" (v->counter));
1da177e4
LT
68}
69
70/**
71 * atomic_sub_and_test - subtract value from variable and test result
72 * @i: integer value to subtract
73 * @v: pointer of type atomic_t
7edb3cd6 74 *
1da177e4
LT
75 * Atomically subtracts @i from @v and returns
76 * true if the result is zero, or false for all
77 * other cases.
78 */
7edb3cd6 79static inline int atomic_sub_and_test(int i, atomic_t *v)
1da177e4
LT
80{
81 unsigned char c;
82
7edb3cd6
JP
83 asm volatile(LOCK_PREFIX "subl %2,%0; sete %1"
84 : "=m" (v->counter), "=qm" (c)
85 : "ir" (i), "m" (v->counter) : "memory");
1da177e4
LT
86 return c;
87}
88
89/**
90 * atomic_inc - increment atomic variable
91 * @v: pointer of type atomic_t
7edb3cd6 92 *
1da177e4 93 * Atomically increments @v by 1.
7edb3cd6
JP
94 */
95static inline void atomic_inc(atomic_t *v)
1da177e4 96{
7edb3cd6
JP
97 asm volatile(LOCK_PREFIX "incl %0"
98 : "=m" (v->counter)
99 : "m" (v->counter));
1da177e4
LT
100}
101
102/**
103 * atomic_dec - decrement atomic variable
104 * @v: pointer of type atomic_t
7edb3cd6 105 *
1da177e4 106 * Atomically decrements @v by 1.
7edb3cd6
JP
107 */
108static inline void atomic_dec(atomic_t *v)
1da177e4 109{
7edb3cd6
JP
110 asm volatile(LOCK_PREFIX "decl %0"
111 : "=m" (v->counter)
112 : "m" (v->counter));
1da177e4
LT
113}
114
115/**
116 * atomic_dec_and_test - decrement and test
117 * @v: pointer of type atomic_t
7edb3cd6 118 *
1da177e4
LT
119 * Atomically decrements @v by 1 and
120 * returns true if the result is 0, or false for all other
121 * cases.
7edb3cd6
JP
122 */
123static inline int atomic_dec_and_test(atomic_t *v)
1da177e4
LT
124{
125 unsigned char c;
126
7edb3cd6
JP
127 asm volatile(LOCK_PREFIX "decl %0; sete %1"
128 : "=m" (v->counter), "=qm" (c)
129 : "m" (v->counter) : "memory");
1da177e4
LT
130 return c != 0;
131}
132
133/**
7edb3cd6 134 * atomic_inc_and_test - increment and test
1da177e4 135 * @v: pointer of type atomic_t
7edb3cd6 136 *
1da177e4
LT
137 * Atomically increments @v by 1
138 * and returns true if the result is zero, or false for all
139 * other cases.
7edb3cd6
JP
140 */
141static inline int atomic_inc_and_test(atomic_t *v)
1da177e4
LT
142{
143 unsigned char c;
144
7edb3cd6
JP
145 asm volatile(LOCK_PREFIX "incl %0; sete %1"
146 : "=m" (v->counter), "=qm" (c)
147 : "m" (v->counter) : "memory");
1da177e4
LT
148 return c != 0;
149}
150
151/**
152 * atomic_add_negative - add and test if negative
1da177e4 153 * @i: integer value to add
7c72aaf2 154 * @v: pointer of type atomic_t
7edb3cd6 155 *
1da177e4
LT
156 * Atomically adds @i to @v and returns true
157 * if the result is negative, or false when
158 * result is greater than or equal to zero.
7edb3cd6
JP
159 */
160static inline int atomic_add_negative(int i, atomic_t *v)
1da177e4
LT
161{
162 unsigned char c;
163
7edb3cd6
JP
164 asm volatile(LOCK_PREFIX "addl %2,%0; sets %1"
165 : "=m" (v->counter), "=qm" (c)
166 : "ir" (i), "m" (v->counter) : "memory");
1da177e4
LT
167 return c;
168}
169
7c72aaf2
HD
170/**
171 * atomic_add_return - add and return
172 * @i: integer value to add
173 * @v: pointer of type atomic_t
174 *
175 * Atomically adds @i to @v and returns @i + @v
176 */
7edb3cd6 177static inline int atomic_add_return(int i, atomic_t *v)
7c72aaf2
HD
178{
179 int __i = i;
7edb3cd6
JP
180 asm volatile(LOCK_PREFIX "xaddl %0, %1"
181 : "+r" (i), "+m" (v->counter)
182 : : "memory");
7c72aaf2
HD
183 return i + __i;
184}
185
7edb3cd6 186static inline int atomic_sub_return(int i, atomic_t *v)
7c72aaf2 187{
7edb3cd6 188 return atomic_add_return(-i, v);
7c72aaf2
HD
189}
190
7edb3cd6
JP
191#define atomic_inc_return(v) (atomic_add_return(1, v))
192#define atomic_dec_return(v) (atomic_sub_return(1, v))
7c72aaf2 193
1da177e4
LT
194/* An 64bit atomic type */
195
7edb3cd6
JP
196typedef struct {
197 long counter;
198} atomic64_t;
1da177e4
LT
199
200#define ATOMIC64_INIT(i) { (i) }
201
202/**
203 * atomic64_read - read atomic64 variable
204 * @v: pointer of type atomic64_t
205 *
206 * Atomically reads the value of @v.
207 * Doesn't imply a read memory barrier.
208 */
209#define atomic64_read(v) ((v)->counter)
210
211/**
212 * atomic64_set - set atomic64 variable
213 * @v: pointer to type atomic64_t
214 * @i: required value
215 *
216 * Atomically sets the value of @v to @i.
217 */
7edb3cd6 218#define atomic64_set(v, i) (((v)->counter) = (i))
1da177e4
LT
219
220/**
221 * atomic64_add - add integer to atomic64 variable
222 * @i: integer value to add
223 * @v: pointer to type atomic64_t
224 *
225 * Atomically adds @i to @v.
226 */
7edb3cd6 227static inline void atomic64_add(long i, atomic64_t *v)
1da177e4 228{
7edb3cd6
JP
229 asm volatile(LOCK_PREFIX "addq %1,%0"
230 : "=m" (v->counter)
3c3b5c3b 231 : "er" (i), "m" (v->counter));
1da177e4
LT
232}
233
234/**
235 * atomic64_sub - subtract the atomic64 variable
236 * @i: integer value to subtract
237 * @v: pointer to type atomic64_t
238 *
239 * Atomically subtracts @i from @v.
240 */
7edb3cd6 241static inline void atomic64_sub(long i, atomic64_t *v)
1da177e4 242{
7edb3cd6
JP
243 asm volatile(LOCK_PREFIX "subq %1,%0"
244 : "=m" (v->counter)
3c3b5c3b 245 : "er" (i), "m" (v->counter));
1da177e4
LT
246}
247
248/**
249 * atomic64_sub_and_test - subtract value from variable and test result
250 * @i: integer value to subtract
251 * @v: pointer to type atomic64_t
252 *
253 * Atomically subtracts @i from @v and returns
254 * true if the result is zero, or false for all
255 * other cases.
256 */
7edb3cd6 257static inline int atomic64_sub_and_test(long i, atomic64_t *v)
1da177e4
LT
258{
259 unsigned char c;
260
7edb3cd6
JP
261 asm volatile(LOCK_PREFIX "subq %2,%0; sete %1"
262 : "=m" (v->counter), "=qm" (c)
3c3b5c3b 263 : "er" (i), "m" (v->counter) : "memory");
1da177e4
LT
264 return c;
265}
266
267/**
268 * atomic64_inc - increment atomic64 variable
269 * @v: pointer to type atomic64_t
270 *
271 * Atomically increments @v by 1.
272 */
7edb3cd6 273static inline void atomic64_inc(atomic64_t *v)
1da177e4 274{
7edb3cd6
JP
275 asm volatile(LOCK_PREFIX "incq %0"
276 : "=m" (v->counter)
277 : "m" (v->counter));
1da177e4
LT
278}
279
280/**
281 * atomic64_dec - decrement atomic64 variable
282 * @v: pointer to type atomic64_t
283 *
284 * Atomically decrements @v by 1.
285 */
7edb3cd6 286static inline void atomic64_dec(atomic64_t *v)
1da177e4 287{
7edb3cd6
JP
288 asm volatile(LOCK_PREFIX "decq %0"
289 : "=m" (v->counter)
290 : "m" (v->counter));
1da177e4
LT
291}
292
293/**
294 * atomic64_dec_and_test - decrement and test
295 * @v: pointer to type atomic64_t
296 *
297 * Atomically decrements @v by 1 and
298 * returns true if the result is 0, or false for all other
299 * cases.
300 */
7edb3cd6 301static inline int atomic64_dec_and_test(atomic64_t *v)
1da177e4
LT
302{
303 unsigned char c;
304
7edb3cd6
JP
305 asm volatile(LOCK_PREFIX "decq %0; sete %1"
306 : "=m" (v->counter), "=qm" (c)
307 : "m" (v->counter) : "memory");
1da177e4
LT
308 return c != 0;
309}
310
311/**
312 * atomic64_inc_and_test - increment and test
313 * @v: pointer to type atomic64_t
314 *
315 * Atomically increments @v by 1
316 * and returns true if the result is zero, or false for all
317 * other cases.
318 */
7edb3cd6 319static inline int atomic64_inc_and_test(atomic64_t *v)
1da177e4
LT
320{
321 unsigned char c;
322
7edb3cd6
JP
323 asm volatile(LOCK_PREFIX "incq %0; sete %1"
324 : "=m" (v->counter), "=qm" (c)
325 : "m" (v->counter) : "memory");
1da177e4
LT
326 return c != 0;
327}
328
329/**
330 * atomic64_add_negative - add and test if negative
1da177e4 331 * @i: integer value to add
7c72aaf2 332 * @v: pointer to type atomic64_t
1da177e4
LT
333 *
334 * Atomically adds @i to @v and returns true
335 * if the result is negative, or false when
336 * result is greater than or equal to zero.
337 */
7edb3cd6 338static inline int atomic64_add_negative(long i, atomic64_t *v)
1da177e4
LT
339{
340 unsigned char c;
341
7edb3cd6
JP
342 asm volatile(LOCK_PREFIX "addq %2,%0; sets %1"
343 : "=m" (v->counter), "=qm" (c)
3c3b5c3b 344 : "er" (i), "m" (v->counter) : "memory");
1da177e4
LT
345 return c;
346}
347
348/**
7c72aaf2 349 * atomic64_add_return - add and return
1da177e4 350 * @i: integer value to add
7c72aaf2 351 * @v: pointer to type atomic64_t
1da177e4
LT
352 *
353 * Atomically adds @i to @v and returns @i + @v
354 */
7edb3cd6 355static inline long atomic64_add_return(long i, atomic64_t *v)
1da177e4 356{
7c72aaf2 357 long __i = i;
7edb3cd6
JP
358 asm volatile(LOCK_PREFIX "xaddq %0, %1;"
359 : "+r" (i), "+m" (v->counter)
360 : : "memory");
1da177e4
LT
361 return i + __i;
362}
363
7edb3cd6 364static inline long atomic64_sub_return(long i, atomic64_t *v)
1da177e4 365{
7edb3cd6 366 return atomic64_add_return(-i, v);
1da177e4
LT
367}
368
7edb3cd6
JP
369#define atomic64_inc_return(v) (atomic64_add_return(1, (v)))
370#define atomic64_dec_return(v) (atomic64_sub_return(1, (v)))
7c72aaf2 371
7edb3cd6 372#define atomic64_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), (old), (new)))
79d365a3
MD
373#define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
374
7edb3cd6
JP
375#define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), (old), (new)))
376#define atomic_xchg(v, new) (xchg(&((v)->counter), (new)))
4a6dae6d 377
8426e1f6
NP
378/**
379 * atomic_add_unless - add unless the number is a given value
380 * @v: pointer of type atomic_t
381 * @a: the amount to add to v...
382 * @u: ...unless v is equal to u.
383 *
384 * Atomically adds @a to @v, so long as it was not @u.
385 * Returns non-zero if @v was not @u, and zero otherwise.
386 */
7edb3cd6 387static inline int atomic_add_unless(atomic_t *v, int a, int u)
2856f5e3
MD
388{
389 int c, old;
390 c = atomic_read(v);
391 for (;;) {
392 if (unlikely(c == (u)))
393 break;
394 old = atomic_cmpxchg((v), c, c + (a));
395 if (likely(old == c))
396 break;
397 c = old;
398 }
399 return c != (u);
400}
401
8426e1f6
NP
402#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
403
79d365a3
MD
404/**
405 * atomic64_add_unless - add unless the number is a given value
406 * @v: pointer of type atomic64_t
407 * @a: the amount to add to v...
408 * @u: ...unless v is equal to u.
409 *
410 * Atomically adds @a to @v, so long as it was not @u.
411 * Returns non-zero if @v was not @u, and zero otherwise.
412 */
7edb3cd6 413static inline int atomic64_add_unless(atomic64_t *v, long a, long u)
2856f5e3
MD
414{
415 long c, old;
416 c = atomic64_read(v);
417 for (;;) {
418 if (unlikely(c == (u)))
419 break;
420 old = atomic64_cmpxchg((v), c, c + (a));
421 if (likely(old == c))
422 break;
423 c = old;
424 }
425 return c != (u);
426}
427
73e991f4
CW
428/**
429 * atomic_inc_short - increment of a short integer
430 * @v: pointer to type int
431 *
432 * Atomically adds 1 to @v
433 * Returns the new value of @u
434 */
435static inline short int atomic_inc_short(short int *v)
436{
437 asm(LOCK_PREFIX "addw $1, %0" : "+m" (*v));
438 return *v;
439}
440
441/**
442 * atomic_or_long - OR of two long integers
443 * @v1: pointer to type unsigned long
444 * @v2: pointer to type unsigned long
445 *
446 * Atomically ORs @v1 and @v2
447 * Returns the result of the OR
448 */
449static inline void atomic_or_long(unsigned long *v1, unsigned long v2)
450{
451 asm(LOCK_PREFIX "orq %1, %0" : "+m" (*v1) : "r" (v2));
452}
453
79d365a3
MD
454#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
455
1da177e4 456/* These are x86-specific, used by some header files */
7edb3cd6
JP
457#define atomic_clear_mask(mask, addr) \
458 asm volatile(LOCK_PREFIX "andl %0,%1" \
459 : : "r" (~(mask)), "m" (*(addr)) : "memory")
460
461#define atomic_set_mask(mask, addr) \
462 asm volatile(LOCK_PREFIX "orl %0,%1" \
463 : : "r" ((unsigned)(mask)), "m" (*(addr)) \
464 : "memory")
1da177e4
LT
465
466/* Atomic operations are already serializing on x86 */
467#define smp_mb__before_atomic_dec() barrier()
468#define smp_mb__after_atomic_dec() barrier()
469#define smp_mb__before_atomic_inc() barrier()
470#define smp_mb__after_atomic_inc() barrier()
471
d3cb4871 472#include <asm-generic/atomic.h>
1965aae3 473#endif /* _ASM_X86_ATOMIC_64_H */