locking/atomic: Introduce atomic_try_cmpxchg()
[linux-2.6-block.git] / include / linux / atomic.h
1 /* Atomic operations usable in machine independent code */
2 #ifndef _LINUX_ATOMIC_H
3 #define _LINUX_ATOMIC_H
4 #include <asm/atomic.h>
5 #include <asm/barrier.h>
6
7 /*
8  * Relaxed variants of xchg, cmpxchg and some atomic operations.
9  *
10  * We support four variants:
11  *
12  * - Fully ordered: The default implementation, no suffix required.
13  * - Acquire: Provides ACQUIRE semantics, _acquire suffix.
14  * - Release: Provides RELEASE semantics, _release suffix.
15  * - Relaxed: No ordering guarantees, _relaxed suffix.
16  *
17  * For compound atomics performing both a load and a store, ACQUIRE
18  * semantics apply only to the load and RELEASE semantics only to the
19  * store portion of the operation. Note that a failed cmpxchg_acquire
20  * does -not- imply any memory ordering constraints.
21  *
22  * See Documentation/memory-barriers.txt for ACQUIRE/RELEASE definitions.
23  */
24
25 #ifndef atomic_read_acquire
26 #define  atomic_read_acquire(v)         smp_load_acquire(&(v)->counter)
27 #endif
28
29 #ifndef atomic_set_release
30 #define  atomic_set_release(v, i)       smp_store_release(&(v)->counter, (i))
31 #endif
32
33 /*
34  * The idea here is to build acquire/release variants by adding explicit
35  * barriers on top of the relaxed variant. In the case where the relaxed
36  * variant is already fully ordered, no additional barriers are needed.
37  *
38  * Besides, if an arch has a special barrier for acquire/release, it could
39  * implement its own __atomic_op_* and use the same framework for building
40  * variants
41  */
42 #ifndef __atomic_op_acquire
43 #define __atomic_op_acquire(op, args...)                                \
44 ({                                                                      \
45         typeof(op##_relaxed(args)) __ret  = op##_relaxed(args);         \
46         smp_mb__after_atomic();                                         \
47         __ret;                                                          \
48 })
49 #endif
50
51 #ifndef __atomic_op_release
52 #define __atomic_op_release(op, args...)                                \
53 ({                                                                      \
54         smp_mb__before_atomic();                                        \
55         op##_relaxed(args);                                             \
56 })
57 #endif
58
59 #ifndef __atomic_op_fence
60 #define __atomic_op_fence(op, args...)                                  \
61 ({                                                                      \
62         typeof(op##_relaxed(args)) __ret;                               \
63         smp_mb__before_atomic();                                        \
64         __ret = op##_relaxed(args);                                     \
65         smp_mb__after_atomic();                                         \
66         __ret;                                                          \
67 })
68 #endif
69
70 /* atomic_add_return_relaxed */
71 #ifndef atomic_add_return_relaxed
72 #define  atomic_add_return_relaxed      atomic_add_return
73 #define  atomic_add_return_acquire      atomic_add_return
74 #define  atomic_add_return_release      atomic_add_return
75
76 #else /* atomic_add_return_relaxed */
77
78 #ifndef atomic_add_return_acquire
79 #define  atomic_add_return_acquire(...)                                 \
80         __atomic_op_acquire(atomic_add_return, __VA_ARGS__)
81 #endif
82
83 #ifndef atomic_add_return_release
84 #define  atomic_add_return_release(...)                                 \
85         __atomic_op_release(atomic_add_return, __VA_ARGS__)
86 #endif
87
88 #ifndef atomic_add_return
89 #define  atomic_add_return(...)                                         \
90         __atomic_op_fence(atomic_add_return, __VA_ARGS__)
91 #endif
92 #endif /* atomic_add_return_relaxed */
93
94 /* atomic_inc_return_relaxed */
95 #ifndef atomic_inc_return_relaxed
96 #define  atomic_inc_return_relaxed      atomic_inc_return
97 #define  atomic_inc_return_acquire      atomic_inc_return
98 #define  atomic_inc_return_release      atomic_inc_return
99
100 #else /* atomic_inc_return_relaxed */
101
102 #ifndef atomic_inc_return_acquire
103 #define  atomic_inc_return_acquire(...)                                 \
104         __atomic_op_acquire(atomic_inc_return, __VA_ARGS__)
105 #endif
106
107 #ifndef atomic_inc_return_release
108 #define  atomic_inc_return_release(...)                                 \
109         __atomic_op_release(atomic_inc_return, __VA_ARGS__)
110 #endif
111
112 #ifndef atomic_inc_return
113 #define  atomic_inc_return(...)                                         \
114         __atomic_op_fence(atomic_inc_return, __VA_ARGS__)
115 #endif
116 #endif /* atomic_inc_return_relaxed */
117
118 /* atomic_sub_return_relaxed */
119 #ifndef atomic_sub_return_relaxed
120 #define  atomic_sub_return_relaxed      atomic_sub_return
121 #define  atomic_sub_return_acquire      atomic_sub_return
122 #define  atomic_sub_return_release      atomic_sub_return
123
124 #else /* atomic_sub_return_relaxed */
125
126 #ifndef atomic_sub_return_acquire
127 #define  atomic_sub_return_acquire(...)                                 \
128         __atomic_op_acquire(atomic_sub_return, __VA_ARGS__)
129 #endif
130
131 #ifndef atomic_sub_return_release
132 #define  atomic_sub_return_release(...)                                 \
133         __atomic_op_release(atomic_sub_return, __VA_ARGS__)
134 #endif
135
136 #ifndef atomic_sub_return
137 #define  atomic_sub_return(...)                                         \
138         __atomic_op_fence(atomic_sub_return, __VA_ARGS__)
139 #endif
140 #endif /* atomic_sub_return_relaxed */
141
142 /* atomic_dec_return_relaxed */
143 #ifndef atomic_dec_return_relaxed
144 #define  atomic_dec_return_relaxed      atomic_dec_return
145 #define  atomic_dec_return_acquire      atomic_dec_return
146 #define  atomic_dec_return_release      atomic_dec_return
147
148 #else /* atomic_dec_return_relaxed */
149
150 #ifndef atomic_dec_return_acquire
151 #define  atomic_dec_return_acquire(...)                                 \
152         __atomic_op_acquire(atomic_dec_return, __VA_ARGS__)
153 #endif
154
155 #ifndef atomic_dec_return_release
156 #define  atomic_dec_return_release(...)                                 \
157         __atomic_op_release(atomic_dec_return, __VA_ARGS__)
158 #endif
159
160 #ifndef atomic_dec_return
161 #define  atomic_dec_return(...)                                         \
162         __atomic_op_fence(atomic_dec_return, __VA_ARGS__)
163 #endif
164 #endif /* atomic_dec_return_relaxed */
165
166
167 /* atomic_fetch_add_relaxed */
168 #ifndef atomic_fetch_add_relaxed
169 #define atomic_fetch_add_relaxed        atomic_fetch_add
170 #define atomic_fetch_add_acquire        atomic_fetch_add
171 #define atomic_fetch_add_release        atomic_fetch_add
172
173 #else /* atomic_fetch_add_relaxed */
174
175 #ifndef atomic_fetch_add_acquire
176 #define atomic_fetch_add_acquire(...)                                   \
177         __atomic_op_acquire(atomic_fetch_add, __VA_ARGS__)
178 #endif
179
180 #ifndef atomic_fetch_add_release
181 #define atomic_fetch_add_release(...)                                   \
182         __atomic_op_release(atomic_fetch_add, __VA_ARGS__)
183 #endif
184
185 #ifndef atomic_fetch_add
186 #define atomic_fetch_add(...)                                           \
187         __atomic_op_fence(atomic_fetch_add, __VA_ARGS__)
188 #endif
189 #endif /* atomic_fetch_add_relaxed */
190
191 /* atomic_fetch_inc_relaxed */
192 #ifndef atomic_fetch_inc_relaxed
193
194 #ifndef atomic_fetch_inc
195 #define atomic_fetch_inc(v)             atomic_fetch_add(1, (v))
196 #define atomic_fetch_inc_relaxed(v)     atomic_fetch_add_relaxed(1, (v))
197 #define atomic_fetch_inc_acquire(v)     atomic_fetch_add_acquire(1, (v))
198 #define atomic_fetch_inc_release(v)     atomic_fetch_add_release(1, (v))
199 #else /* atomic_fetch_inc */
200 #define atomic_fetch_inc_relaxed        atomic_fetch_inc
201 #define atomic_fetch_inc_acquire        atomic_fetch_inc
202 #define atomic_fetch_inc_release        atomic_fetch_inc
203 #endif /* atomic_fetch_inc */
204
205 #else /* atomic_fetch_inc_relaxed */
206
207 #ifndef atomic_fetch_inc_acquire
208 #define atomic_fetch_inc_acquire(...)                                   \
209         __atomic_op_acquire(atomic_fetch_inc, __VA_ARGS__)
210 #endif
211
212 #ifndef atomic_fetch_inc_release
213 #define atomic_fetch_inc_release(...)                                   \
214         __atomic_op_release(atomic_fetch_inc, __VA_ARGS__)
215 #endif
216
217 #ifndef atomic_fetch_inc
218 #define atomic_fetch_inc(...)                                           \
219         __atomic_op_fence(atomic_fetch_inc, __VA_ARGS__)
220 #endif
221 #endif /* atomic_fetch_inc_relaxed */
222
223 /* atomic_fetch_sub_relaxed */
224 #ifndef atomic_fetch_sub_relaxed
225 #define atomic_fetch_sub_relaxed        atomic_fetch_sub
226 #define atomic_fetch_sub_acquire        atomic_fetch_sub
227 #define atomic_fetch_sub_release        atomic_fetch_sub
228
229 #else /* atomic_fetch_sub_relaxed */
230
231 #ifndef atomic_fetch_sub_acquire
232 #define atomic_fetch_sub_acquire(...)                                   \
233         __atomic_op_acquire(atomic_fetch_sub, __VA_ARGS__)
234 #endif
235
236 #ifndef atomic_fetch_sub_release
237 #define atomic_fetch_sub_release(...)                                   \
238         __atomic_op_release(atomic_fetch_sub, __VA_ARGS__)
239 #endif
240
241 #ifndef atomic_fetch_sub
242 #define atomic_fetch_sub(...)                                           \
243         __atomic_op_fence(atomic_fetch_sub, __VA_ARGS__)
244 #endif
245 #endif /* atomic_fetch_sub_relaxed */
246
247 /* atomic_fetch_dec_relaxed */
248 #ifndef atomic_fetch_dec_relaxed
249
250 #ifndef atomic_fetch_dec
251 #define atomic_fetch_dec(v)             atomic_fetch_sub(1, (v))
252 #define atomic_fetch_dec_relaxed(v)     atomic_fetch_sub_relaxed(1, (v))
253 #define atomic_fetch_dec_acquire(v)     atomic_fetch_sub_acquire(1, (v))
254 #define atomic_fetch_dec_release(v)     atomic_fetch_sub_release(1, (v))
255 #else /* atomic_fetch_dec */
256 #define atomic_fetch_dec_relaxed        atomic_fetch_dec
257 #define atomic_fetch_dec_acquire        atomic_fetch_dec
258 #define atomic_fetch_dec_release        atomic_fetch_dec
259 #endif /* atomic_fetch_dec */
260
261 #else /* atomic_fetch_dec_relaxed */
262
263 #ifndef atomic_fetch_dec_acquire
264 #define atomic_fetch_dec_acquire(...)                                   \
265         __atomic_op_acquire(atomic_fetch_dec, __VA_ARGS__)
266 #endif
267
268 #ifndef atomic_fetch_dec_release
269 #define atomic_fetch_dec_release(...)                                   \
270         __atomic_op_release(atomic_fetch_dec, __VA_ARGS__)
271 #endif
272
273 #ifndef atomic_fetch_dec
274 #define atomic_fetch_dec(...)                                           \
275         __atomic_op_fence(atomic_fetch_dec, __VA_ARGS__)
276 #endif
277 #endif /* atomic_fetch_dec_relaxed */
278
279 /* atomic_fetch_or_relaxed */
280 #ifndef atomic_fetch_or_relaxed
281 #define atomic_fetch_or_relaxed atomic_fetch_or
282 #define atomic_fetch_or_acquire atomic_fetch_or
283 #define atomic_fetch_or_release atomic_fetch_or
284
285 #else /* atomic_fetch_or_relaxed */
286
287 #ifndef atomic_fetch_or_acquire
288 #define atomic_fetch_or_acquire(...)                                    \
289         __atomic_op_acquire(atomic_fetch_or, __VA_ARGS__)
290 #endif
291
292 #ifndef atomic_fetch_or_release
293 #define atomic_fetch_or_release(...)                                    \
294         __atomic_op_release(atomic_fetch_or, __VA_ARGS__)
295 #endif
296
297 #ifndef atomic_fetch_or
298 #define atomic_fetch_or(...)                                            \
299         __atomic_op_fence(atomic_fetch_or, __VA_ARGS__)
300 #endif
301 #endif /* atomic_fetch_or_relaxed */
302
303 /* atomic_fetch_and_relaxed */
304 #ifndef atomic_fetch_and_relaxed
305 #define atomic_fetch_and_relaxed        atomic_fetch_and
306 #define atomic_fetch_and_acquire        atomic_fetch_and
307 #define atomic_fetch_and_release        atomic_fetch_and
308
309 #else /* atomic_fetch_and_relaxed */
310
311 #ifndef atomic_fetch_and_acquire
312 #define atomic_fetch_and_acquire(...)                                   \
313         __atomic_op_acquire(atomic_fetch_and, __VA_ARGS__)
314 #endif
315
316 #ifndef atomic_fetch_and_release
317 #define atomic_fetch_and_release(...)                                   \
318         __atomic_op_release(atomic_fetch_and, __VA_ARGS__)
319 #endif
320
321 #ifndef atomic_fetch_and
322 #define atomic_fetch_and(...)                                           \
323         __atomic_op_fence(atomic_fetch_and, __VA_ARGS__)
324 #endif
325 #endif /* atomic_fetch_and_relaxed */
326
327 #ifdef atomic_andnot
328 /* atomic_fetch_andnot_relaxed */
329 #ifndef atomic_fetch_andnot_relaxed
330 #define atomic_fetch_andnot_relaxed     atomic_fetch_andnot
331 #define atomic_fetch_andnot_acquire     atomic_fetch_andnot
332 #define atomic_fetch_andnot_release     atomic_fetch_andnot
333
334 #else /* atomic_fetch_andnot_relaxed */
335
336 #ifndef atomic_fetch_andnot_acquire
337 #define atomic_fetch_andnot_acquire(...)                                        \
338         __atomic_op_acquire(atomic_fetch_andnot, __VA_ARGS__)
339 #endif
340
341 #ifndef atomic_fetch_andnot_release
342 #define atomic_fetch_andnot_release(...)                                        \
343         __atomic_op_release(atomic_fetch_andnot, __VA_ARGS__)
344 #endif
345
346 #ifndef atomic_fetch_andnot
347 #define atomic_fetch_andnot(...)                                                \
348         __atomic_op_fence(atomic_fetch_andnot, __VA_ARGS__)
349 #endif
350 #endif /* atomic_fetch_andnot_relaxed */
351 #endif /* atomic_andnot */
352
353 /* atomic_fetch_xor_relaxed */
354 #ifndef atomic_fetch_xor_relaxed
355 #define atomic_fetch_xor_relaxed        atomic_fetch_xor
356 #define atomic_fetch_xor_acquire        atomic_fetch_xor
357 #define atomic_fetch_xor_release        atomic_fetch_xor
358
359 #else /* atomic_fetch_xor_relaxed */
360
361 #ifndef atomic_fetch_xor_acquire
362 #define atomic_fetch_xor_acquire(...)                                   \
363         __atomic_op_acquire(atomic_fetch_xor, __VA_ARGS__)
364 #endif
365
366 #ifndef atomic_fetch_xor_release
367 #define atomic_fetch_xor_release(...)                                   \
368         __atomic_op_release(atomic_fetch_xor, __VA_ARGS__)
369 #endif
370
371 #ifndef atomic_fetch_xor
372 #define atomic_fetch_xor(...)                                           \
373         __atomic_op_fence(atomic_fetch_xor, __VA_ARGS__)
374 #endif
375 #endif /* atomic_fetch_xor_relaxed */
376
377
378 /* atomic_xchg_relaxed */
379 #ifndef atomic_xchg_relaxed
380 #define  atomic_xchg_relaxed            atomic_xchg
381 #define  atomic_xchg_acquire            atomic_xchg
382 #define  atomic_xchg_release            atomic_xchg
383
384 #else /* atomic_xchg_relaxed */
385
386 #ifndef atomic_xchg_acquire
387 #define  atomic_xchg_acquire(...)                                       \
388         __atomic_op_acquire(atomic_xchg, __VA_ARGS__)
389 #endif
390
391 #ifndef atomic_xchg_release
392 #define  atomic_xchg_release(...)                                       \
393         __atomic_op_release(atomic_xchg, __VA_ARGS__)
394 #endif
395
396 #ifndef atomic_xchg
397 #define  atomic_xchg(...)                                               \
398         __atomic_op_fence(atomic_xchg, __VA_ARGS__)
399 #endif
400 #endif /* atomic_xchg_relaxed */
401
402 /* atomic_cmpxchg_relaxed */
403 #ifndef atomic_cmpxchg_relaxed
404 #define  atomic_cmpxchg_relaxed         atomic_cmpxchg
405 #define  atomic_cmpxchg_acquire         atomic_cmpxchg
406 #define  atomic_cmpxchg_release         atomic_cmpxchg
407
408 #else /* atomic_cmpxchg_relaxed */
409
410 #ifndef atomic_cmpxchg_acquire
411 #define  atomic_cmpxchg_acquire(...)                                    \
412         __atomic_op_acquire(atomic_cmpxchg, __VA_ARGS__)
413 #endif
414
415 #ifndef atomic_cmpxchg_release
416 #define  atomic_cmpxchg_release(...)                                    \
417         __atomic_op_release(atomic_cmpxchg, __VA_ARGS__)
418 #endif
419
420 #ifndef atomic_cmpxchg
421 #define  atomic_cmpxchg(...)                                            \
422         __atomic_op_fence(atomic_cmpxchg, __VA_ARGS__)
423 #endif
424 #endif /* atomic_cmpxchg_relaxed */
425
426 #ifndef atomic_try_cmpxchg
427
428 #define __atomic_try_cmpxchg(type, _p, _po, _n)                         \
429 ({                                                                      \
430         typeof(_po) __po = (_po);                                       \
431         typeof(*(_po)) __o = *__po;                                     \
432         *__po = atomic_cmpxchg##type((_p), __o, (_n));                  \
433         (*__po == __o);                                                 \
434 })
435
436 #define atomic_try_cmpxchg(_p, _po, _n)         __atomic_try_cmpxchg(, _p, _po, _n)
437 #define atomic_try_cmpxchg_relaxed(_p, _po, _n) __atomic_try_cmpxchg(_relaxed, _p, _po, _n)
438 #define atomic_try_cmpxchg_acquire(_p, _po, _n) __atomic_try_cmpxchg(_acquire, _p, _po, _n)
439 #define atomic_try_cmpxchg_release(_p, _po, _n) __atomic_try_cmpxchg(_release, _p, _po, _n)
440
441 #else /* atomic_try_cmpxchg */
442 #define atomic_try_cmpxchg_relaxed      atomic_try_cmpxchg
443 #define atomic_try_cmpxchg_acquire      atomic_try_cmpxchg
444 #define atomic_try_cmpxchg_release      atomic_try_cmpxchg
445 #endif /* atomic_try_cmpxchg */
446
447 /* cmpxchg_relaxed */
448 #ifndef cmpxchg_relaxed
449 #define  cmpxchg_relaxed                cmpxchg
450 #define  cmpxchg_acquire                cmpxchg
451 #define  cmpxchg_release                cmpxchg
452
453 #else /* cmpxchg_relaxed */
454
455 #ifndef cmpxchg_acquire
456 #define  cmpxchg_acquire(...)                                           \
457         __atomic_op_acquire(cmpxchg, __VA_ARGS__)
458 #endif
459
460 #ifndef cmpxchg_release
461 #define  cmpxchg_release(...)                                           \
462         __atomic_op_release(cmpxchg, __VA_ARGS__)
463 #endif
464
465 #ifndef cmpxchg
466 #define  cmpxchg(...)                                                   \
467         __atomic_op_fence(cmpxchg, __VA_ARGS__)
468 #endif
469 #endif /* cmpxchg_relaxed */
470
471 /* cmpxchg64_relaxed */
472 #ifndef cmpxchg64_relaxed
473 #define  cmpxchg64_relaxed              cmpxchg64
474 #define  cmpxchg64_acquire              cmpxchg64
475 #define  cmpxchg64_release              cmpxchg64
476
477 #else /* cmpxchg64_relaxed */
478
479 #ifndef cmpxchg64_acquire
480 #define  cmpxchg64_acquire(...)                                         \
481         __atomic_op_acquire(cmpxchg64, __VA_ARGS__)
482 #endif
483
484 #ifndef cmpxchg64_release
485 #define  cmpxchg64_release(...)                                         \
486         __atomic_op_release(cmpxchg64, __VA_ARGS__)
487 #endif
488
489 #ifndef cmpxchg64
490 #define  cmpxchg64(...)                                                 \
491         __atomic_op_fence(cmpxchg64, __VA_ARGS__)
492 #endif
493 #endif /* cmpxchg64_relaxed */
494
495 /* xchg_relaxed */
496 #ifndef xchg_relaxed
497 #define  xchg_relaxed                   xchg
498 #define  xchg_acquire                   xchg
499 #define  xchg_release                   xchg
500
501 #else /* xchg_relaxed */
502
503 #ifndef xchg_acquire
504 #define  xchg_acquire(...)              __atomic_op_acquire(xchg, __VA_ARGS__)
505 #endif
506
507 #ifndef xchg_release
508 #define  xchg_release(...)              __atomic_op_release(xchg, __VA_ARGS__)
509 #endif
510
511 #ifndef xchg
512 #define  xchg(...)                      __atomic_op_fence(xchg, __VA_ARGS__)
513 #endif
514 #endif /* xchg_relaxed */
515
516 /**
517  * atomic_add_unless - add unless the number is already a given value
518  * @v: pointer of type atomic_t
519  * @a: the amount to add to v...
520  * @u: ...unless v is equal to u.
521  *
522  * Atomically adds @a to @v, so long as @v was not already @u.
523  * Returns non-zero if @v was not @u, and zero otherwise.
524  */
525 static inline int atomic_add_unless(atomic_t *v, int a, int u)
526 {
527         return __atomic_add_unless(v, a, u) != u;
528 }
529
530 /**
531  * atomic_inc_not_zero - increment unless the number is zero
532  * @v: pointer of type atomic_t
533  *
534  * Atomically increments @v by 1, so long as @v is non-zero.
535  * Returns non-zero if @v was non-zero, and zero otherwise.
536  */
537 #ifndef atomic_inc_not_zero
538 #define atomic_inc_not_zero(v)          atomic_add_unless((v), 1, 0)
539 #endif
540
541 #ifndef atomic_andnot
542 static inline void atomic_andnot(int i, atomic_t *v)
543 {
544         atomic_and(~i, v);
545 }
546
547 static inline int atomic_fetch_andnot(int i, atomic_t *v)
548 {
549         return atomic_fetch_and(~i, v);
550 }
551
552 static inline int atomic_fetch_andnot_relaxed(int i, atomic_t *v)
553 {
554         return atomic_fetch_and_relaxed(~i, v);
555 }
556
557 static inline int atomic_fetch_andnot_acquire(int i, atomic_t *v)
558 {
559         return atomic_fetch_and_acquire(~i, v);
560 }
561
562 static inline int atomic_fetch_andnot_release(int i, atomic_t *v)
563 {
564         return atomic_fetch_and_release(~i, v);
565 }
566 #endif
567
568 /**
569  * atomic_inc_not_zero_hint - increment if not null
570  * @v: pointer of type atomic_t
571  * @hint: probable value of the atomic before the increment
572  *
573  * This version of atomic_inc_not_zero() gives a hint of probable
574  * value of the atomic. This helps processor to not read the memory
575  * before doing the atomic read/modify/write cycle, lowering
576  * number of bus transactions on some arches.
577  *
578  * Returns: 0 if increment was not done, 1 otherwise.
579  */
580 #ifndef atomic_inc_not_zero_hint
581 static inline int atomic_inc_not_zero_hint(atomic_t *v, int hint)
582 {
583         int val, c = hint;
584
585         /* sanity test, should be removed by compiler if hint is a constant */
586         if (!hint)
587                 return atomic_inc_not_zero(v);
588
589         do {
590                 val = atomic_cmpxchg(v, c, c + 1);
591                 if (val == c)
592                         return 1;
593                 c = val;
594         } while (c);
595
596         return 0;
597 }
598 #endif
599
600 #ifndef atomic_inc_unless_negative
601 static inline int atomic_inc_unless_negative(atomic_t *p)
602 {
603         int v, v1;
604         for (v = 0; v >= 0; v = v1) {
605                 v1 = atomic_cmpxchg(p, v, v + 1);
606                 if (likely(v1 == v))
607                         return 1;
608         }
609         return 0;
610 }
611 #endif
612
613 #ifndef atomic_dec_unless_positive
614 static inline int atomic_dec_unless_positive(atomic_t *p)
615 {
616         int v, v1;
617         for (v = 0; v <= 0; v = v1) {
618                 v1 = atomic_cmpxchg(p, v, v - 1);
619                 if (likely(v1 == v))
620                         return 1;
621         }
622         return 0;
623 }
624 #endif
625
626 /*
627  * atomic_dec_if_positive - decrement by 1 if old value positive
628  * @v: pointer of type atomic_t
629  *
630  * The function returns the old value of *v minus 1, even if
631  * the atomic variable, v, was not decremented.
632  */
633 #ifndef atomic_dec_if_positive
634 static inline int atomic_dec_if_positive(atomic_t *v)
635 {
636         int c, old, dec;
637         c = atomic_read(v);
638         for (;;) {
639                 dec = c - 1;
640                 if (unlikely(dec < 0))
641                         break;
642                 old = atomic_cmpxchg((v), c, dec);
643                 if (likely(old == c))
644                         break;
645                 c = old;
646         }
647         return dec;
648 }
649 #endif
650
651 #ifdef CONFIG_GENERIC_ATOMIC64
652 #include <asm-generic/atomic64.h>
653 #endif
654
655 #ifndef atomic64_read_acquire
656 #define  atomic64_read_acquire(v)       smp_load_acquire(&(v)->counter)
657 #endif
658
659 #ifndef atomic64_set_release
660 #define  atomic64_set_release(v, i)     smp_store_release(&(v)->counter, (i))
661 #endif
662
663 /* atomic64_add_return_relaxed */
664 #ifndef atomic64_add_return_relaxed
665 #define  atomic64_add_return_relaxed    atomic64_add_return
666 #define  atomic64_add_return_acquire    atomic64_add_return
667 #define  atomic64_add_return_release    atomic64_add_return
668
669 #else /* atomic64_add_return_relaxed */
670
671 #ifndef atomic64_add_return_acquire
672 #define  atomic64_add_return_acquire(...)                               \
673         __atomic_op_acquire(atomic64_add_return, __VA_ARGS__)
674 #endif
675
676 #ifndef atomic64_add_return_release
677 #define  atomic64_add_return_release(...)                               \
678         __atomic_op_release(atomic64_add_return, __VA_ARGS__)
679 #endif
680
681 #ifndef atomic64_add_return
682 #define  atomic64_add_return(...)                                       \
683         __atomic_op_fence(atomic64_add_return, __VA_ARGS__)
684 #endif
685 #endif /* atomic64_add_return_relaxed */
686
687 /* atomic64_inc_return_relaxed */
688 #ifndef atomic64_inc_return_relaxed
689 #define  atomic64_inc_return_relaxed    atomic64_inc_return
690 #define  atomic64_inc_return_acquire    atomic64_inc_return
691 #define  atomic64_inc_return_release    atomic64_inc_return
692
693 #else /* atomic64_inc_return_relaxed */
694
695 #ifndef atomic64_inc_return_acquire
696 #define  atomic64_inc_return_acquire(...)                               \
697         __atomic_op_acquire(atomic64_inc_return, __VA_ARGS__)
698 #endif
699
700 #ifndef atomic64_inc_return_release
701 #define  atomic64_inc_return_release(...)                               \
702         __atomic_op_release(atomic64_inc_return, __VA_ARGS__)
703 #endif
704
705 #ifndef atomic64_inc_return
706 #define  atomic64_inc_return(...)                                       \
707         __atomic_op_fence(atomic64_inc_return, __VA_ARGS__)
708 #endif
709 #endif /* atomic64_inc_return_relaxed */
710
711
712 /* atomic64_sub_return_relaxed */
713 #ifndef atomic64_sub_return_relaxed
714 #define  atomic64_sub_return_relaxed    atomic64_sub_return
715 #define  atomic64_sub_return_acquire    atomic64_sub_return
716 #define  atomic64_sub_return_release    atomic64_sub_return
717
718 #else /* atomic64_sub_return_relaxed */
719
720 #ifndef atomic64_sub_return_acquire
721 #define  atomic64_sub_return_acquire(...)                               \
722         __atomic_op_acquire(atomic64_sub_return, __VA_ARGS__)
723 #endif
724
725 #ifndef atomic64_sub_return_release
726 #define  atomic64_sub_return_release(...)                               \
727         __atomic_op_release(atomic64_sub_return, __VA_ARGS__)
728 #endif
729
730 #ifndef atomic64_sub_return
731 #define  atomic64_sub_return(...)                                       \
732         __atomic_op_fence(atomic64_sub_return, __VA_ARGS__)
733 #endif
734 #endif /* atomic64_sub_return_relaxed */
735
736 /* atomic64_dec_return_relaxed */
737 #ifndef atomic64_dec_return_relaxed
738 #define  atomic64_dec_return_relaxed    atomic64_dec_return
739 #define  atomic64_dec_return_acquire    atomic64_dec_return
740 #define  atomic64_dec_return_release    atomic64_dec_return
741
742 #else /* atomic64_dec_return_relaxed */
743
744 #ifndef atomic64_dec_return_acquire
745 #define  atomic64_dec_return_acquire(...)                               \
746         __atomic_op_acquire(atomic64_dec_return, __VA_ARGS__)
747 #endif
748
749 #ifndef atomic64_dec_return_release
750 #define  atomic64_dec_return_release(...)                               \
751         __atomic_op_release(atomic64_dec_return, __VA_ARGS__)
752 #endif
753
754 #ifndef atomic64_dec_return
755 #define  atomic64_dec_return(...)                                       \
756         __atomic_op_fence(atomic64_dec_return, __VA_ARGS__)
757 #endif
758 #endif /* atomic64_dec_return_relaxed */
759
760
761 /* atomic64_fetch_add_relaxed */
762 #ifndef atomic64_fetch_add_relaxed
763 #define atomic64_fetch_add_relaxed      atomic64_fetch_add
764 #define atomic64_fetch_add_acquire      atomic64_fetch_add
765 #define atomic64_fetch_add_release      atomic64_fetch_add
766
767 #else /* atomic64_fetch_add_relaxed */
768
769 #ifndef atomic64_fetch_add_acquire
770 #define atomic64_fetch_add_acquire(...)                                 \
771         __atomic_op_acquire(atomic64_fetch_add, __VA_ARGS__)
772 #endif
773
774 #ifndef atomic64_fetch_add_release
775 #define atomic64_fetch_add_release(...)                                 \
776         __atomic_op_release(atomic64_fetch_add, __VA_ARGS__)
777 #endif
778
779 #ifndef atomic64_fetch_add
780 #define atomic64_fetch_add(...)                                         \
781         __atomic_op_fence(atomic64_fetch_add, __VA_ARGS__)
782 #endif
783 #endif /* atomic64_fetch_add_relaxed */
784
785 /* atomic64_fetch_inc_relaxed */
786 #ifndef atomic64_fetch_inc_relaxed
787
788 #ifndef atomic64_fetch_inc
789 #define atomic64_fetch_inc(v)           atomic64_fetch_add(1, (v))
790 #define atomic64_fetch_inc_relaxed(v)   atomic64_fetch_add_relaxed(1, (v))
791 #define atomic64_fetch_inc_acquire(v)   atomic64_fetch_add_acquire(1, (v))
792 #define atomic64_fetch_inc_release(v)   atomic64_fetch_add_release(1, (v))
793 #else /* atomic64_fetch_inc */
794 #define atomic64_fetch_inc_relaxed      atomic64_fetch_inc
795 #define atomic64_fetch_inc_acquire      atomic64_fetch_inc
796 #define atomic64_fetch_inc_release      atomic64_fetch_inc
797 #endif /* atomic64_fetch_inc */
798
799 #else /* atomic64_fetch_inc_relaxed */
800
801 #ifndef atomic64_fetch_inc_acquire
802 #define atomic64_fetch_inc_acquire(...)                                 \
803         __atomic_op_acquire(atomic64_fetch_inc, __VA_ARGS__)
804 #endif
805
806 #ifndef atomic64_fetch_inc_release
807 #define atomic64_fetch_inc_release(...)                                 \
808         __atomic_op_release(atomic64_fetch_inc, __VA_ARGS__)
809 #endif
810
811 #ifndef atomic64_fetch_inc
812 #define atomic64_fetch_inc(...)                                         \
813         __atomic_op_fence(atomic64_fetch_inc, __VA_ARGS__)
814 #endif
815 #endif /* atomic64_fetch_inc_relaxed */
816
817 /* atomic64_fetch_sub_relaxed */
818 #ifndef atomic64_fetch_sub_relaxed
819 #define atomic64_fetch_sub_relaxed      atomic64_fetch_sub
820 #define atomic64_fetch_sub_acquire      atomic64_fetch_sub
821 #define atomic64_fetch_sub_release      atomic64_fetch_sub
822
823 #else /* atomic64_fetch_sub_relaxed */
824
825 #ifndef atomic64_fetch_sub_acquire
826 #define atomic64_fetch_sub_acquire(...)                                 \
827         __atomic_op_acquire(atomic64_fetch_sub, __VA_ARGS__)
828 #endif
829
830 #ifndef atomic64_fetch_sub_release
831 #define atomic64_fetch_sub_release(...)                                 \
832         __atomic_op_release(atomic64_fetch_sub, __VA_ARGS__)
833 #endif
834
835 #ifndef atomic64_fetch_sub
836 #define atomic64_fetch_sub(...)                                         \
837         __atomic_op_fence(atomic64_fetch_sub, __VA_ARGS__)
838 #endif
839 #endif /* atomic64_fetch_sub_relaxed */
840
841 /* atomic64_fetch_dec_relaxed */
842 #ifndef atomic64_fetch_dec_relaxed
843
844 #ifndef atomic64_fetch_dec
845 #define atomic64_fetch_dec(v)           atomic64_fetch_sub(1, (v))
846 #define atomic64_fetch_dec_relaxed(v)   atomic64_fetch_sub_relaxed(1, (v))
847 #define atomic64_fetch_dec_acquire(v)   atomic64_fetch_sub_acquire(1, (v))
848 #define atomic64_fetch_dec_release(v)   atomic64_fetch_sub_release(1, (v))
849 #else /* atomic64_fetch_dec */
850 #define atomic64_fetch_dec_relaxed      atomic64_fetch_dec
851 #define atomic64_fetch_dec_acquire      atomic64_fetch_dec
852 #define atomic64_fetch_dec_release      atomic64_fetch_dec
853 #endif /* atomic64_fetch_dec */
854
855 #else /* atomic64_fetch_dec_relaxed */
856
857 #ifndef atomic64_fetch_dec_acquire
858 #define atomic64_fetch_dec_acquire(...)                                 \
859         __atomic_op_acquire(atomic64_fetch_dec, __VA_ARGS__)
860 #endif
861
862 #ifndef atomic64_fetch_dec_release
863 #define atomic64_fetch_dec_release(...)                                 \
864         __atomic_op_release(atomic64_fetch_dec, __VA_ARGS__)
865 #endif
866
867 #ifndef atomic64_fetch_dec
868 #define atomic64_fetch_dec(...)                                         \
869         __atomic_op_fence(atomic64_fetch_dec, __VA_ARGS__)
870 #endif
871 #endif /* atomic64_fetch_dec_relaxed */
872
873 /* atomic64_fetch_or_relaxed */
874 #ifndef atomic64_fetch_or_relaxed
875 #define atomic64_fetch_or_relaxed       atomic64_fetch_or
876 #define atomic64_fetch_or_acquire       atomic64_fetch_or
877 #define atomic64_fetch_or_release       atomic64_fetch_or
878
879 #else /* atomic64_fetch_or_relaxed */
880
881 #ifndef atomic64_fetch_or_acquire
882 #define atomic64_fetch_or_acquire(...)                                  \
883         __atomic_op_acquire(atomic64_fetch_or, __VA_ARGS__)
884 #endif
885
886 #ifndef atomic64_fetch_or_release
887 #define atomic64_fetch_or_release(...)                                  \
888         __atomic_op_release(atomic64_fetch_or, __VA_ARGS__)
889 #endif
890
891 #ifndef atomic64_fetch_or
892 #define atomic64_fetch_or(...)                                          \
893         __atomic_op_fence(atomic64_fetch_or, __VA_ARGS__)
894 #endif
895 #endif /* atomic64_fetch_or_relaxed */
896
897 /* atomic64_fetch_and_relaxed */
898 #ifndef atomic64_fetch_and_relaxed
899 #define atomic64_fetch_and_relaxed      atomic64_fetch_and
900 #define atomic64_fetch_and_acquire      atomic64_fetch_and
901 #define atomic64_fetch_and_release      atomic64_fetch_and
902
903 #else /* atomic64_fetch_and_relaxed */
904
905 #ifndef atomic64_fetch_and_acquire
906 #define atomic64_fetch_and_acquire(...)                                 \
907         __atomic_op_acquire(atomic64_fetch_and, __VA_ARGS__)
908 #endif
909
910 #ifndef atomic64_fetch_and_release
911 #define atomic64_fetch_and_release(...)                                 \
912         __atomic_op_release(atomic64_fetch_and, __VA_ARGS__)
913 #endif
914
915 #ifndef atomic64_fetch_and
916 #define atomic64_fetch_and(...)                                         \
917         __atomic_op_fence(atomic64_fetch_and, __VA_ARGS__)
918 #endif
919 #endif /* atomic64_fetch_and_relaxed */
920
921 #ifdef atomic64_andnot
922 /* atomic64_fetch_andnot_relaxed */
923 #ifndef atomic64_fetch_andnot_relaxed
924 #define atomic64_fetch_andnot_relaxed   atomic64_fetch_andnot
925 #define atomic64_fetch_andnot_acquire   atomic64_fetch_andnot
926 #define atomic64_fetch_andnot_release   atomic64_fetch_andnot
927
928 #else /* atomic64_fetch_andnot_relaxed */
929
930 #ifndef atomic64_fetch_andnot_acquire
931 #define atomic64_fetch_andnot_acquire(...)                                      \
932         __atomic_op_acquire(atomic64_fetch_andnot, __VA_ARGS__)
933 #endif
934
935 #ifndef atomic64_fetch_andnot_release
936 #define atomic64_fetch_andnot_release(...)                                      \
937         __atomic_op_release(atomic64_fetch_andnot, __VA_ARGS__)
938 #endif
939
940 #ifndef atomic64_fetch_andnot
941 #define atomic64_fetch_andnot(...)                                              \
942         __atomic_op_fence(atomic64_fetch_andnot, __VA_ARGS__)
943 #endif
944 #endif /* atomic64_fetch_andnot_relaxed */
945 #endif /* atomic64_andnot */
946
947 /* atomic64_fetch_xor_relaxed */
948 #ifndef atomic64_fetch_xor_relaxed
949 #define atomic64_fetch_xor_relaxed      atomic64_fetch_xor
950 #define atomic64_fetch_xor_acquire      atomic64_fetch_xor
951 #define atomic64_fetch_xor_release      atomic64_fetch_xor
952
953 #else /* atomic64_fetch_xor_relaxed */
954
955 #ifndef atomic64_fetch_xor_acquire
956 #define atomic64_fetch_xor_acquire(...)                                 \
957         __atomic_op_acquire(atomic64_fetch_xor, __VA_ARGS__)
958 #endif
959
960 #ifndef atomic64_fetch_xor_release
961 #define atomic64_fetch_xor_release(...)                                 \
962         __atomic_op_release(atomic64_fetch_xor, __VA_ARGS__)
963 #endif
964
965 #ifndef atomic64_fetch_xor
966 #define atomic64_fetch_xor(...)                                         \
967         __atomic_op_fence(atomic64_fetch_xor, __VA_ARGS__)
968 #endif
969 #endif /* atomic64_fetch_xor_relaxed */
970
971
972 /* atomic64_xchg_relaxed */
973 #ifndef atomic64_xchg_relaxed
974 #define  atomic64_xchg_relaxed          atomic64_xchg
975 #define  atomic64_xchg_acquire          atomic64_xchg
976 #define  atomic64_xchg_release          atomic64_xchg
977
978 #else /* atomic64_xchg_relaxed */
979
980 #ifndef atomic64_xchg_acquire
981 #define  atomic64_xchg_acquire(...)                                     \
982         __atomic_op_acquire(atomic64_xchg, __VA_ARGS__)
983 #endif
984
985 #ifndef atomic64_xchg_release
986 #define  atomic64_xchg_release(...)                                     \
987         __atomic_op_release(atomic64_xchg, __VA_ARGS__)
988 #endif
989
990 #ifndef atomic64_xchg
991 #define  atomic64_xchg(...)                                             \
992         __atomic_op_fence(atomic64_xchg, __VA_ARGS__)
993 #endif
994 #endif /* atomic64_xchg_relaxed */
995
996 /* atomic64_cmpxchg_relaxed */
997 #ifndef atomic64_cmpxchg_relaxed
998 #define  atomic64_cmpxchg_relaxed       atomic64_cmpxchg
999 #define  atomic64_cmpxchg_acquire       atomic64_cmpxchg
1000 #define  atomic64_cmpxchg_release       atomic64_cmpxchg
1001
1002 #else /* atomic64_cmpxchg_relaxed */
1003
1004 #ifndef atomic64_cmpxchg_acquire
1005 #define  atomic64_cmpxchg_acquire(...)                                  \
1006         __atomic_op_acquire(atomic64_cmpxchg, __VA_ARGS__)
1007 #endif
1008
1009 #ifndef atomic64_cmpxchg_release
1010 #define  atomic64_cmpxchg_release(...)                                  \
1011         __atomic_op_release(atomic64_cmpxchg, __VA_ARGS__)
1012 #endif
1013
1014 #ifndef atomic64_cmpxchg
1015 #define  atomic64_cmpxchg(...)                                          \
1016         __atomic_op_fence(atomic64_cmpxchg, __VA_ARGS__)
1017 #endif
1018 #endif /* atomic64_cmpxchg_relaxed */
1019
1020 #ifndef atomic64_try_cmpxchg
1021
1022 #define __atomic64_try_cmpxchg(type, _p, _po, _n)                       \
1023 ({                                                                      \
1024         typeof(_po) __po = (_po);                                       \
1025         typeof(*(_po)) __o = *__po;                                     \
1026         *__po = atomic64_cmpxchg##type((_p), __o, (_n));                \
1027         (*__po == __o);                                                 \
1028 })
1029
1030 #define atomic64_try_cmpxchg(_p, _po, _n)               __atomic64_try_cmpxchg(, _p, _po, _n)
1031 #define atomic64_try_cmpxchg_relaxed(_p, _po, _n)       __atomic64_try_cmpxchg(_relaxed, _p, _po, _n)
1032 #define atomic64_try_cmpxchg_acquire(_p, _po, _n)       __atomic64_try_cmpxchg(_acquire, _p, _po, _n)
1033 #define atomic64_try_cmpxchg_release(_p, _po, _n)       __atomic64_try_cmpxchg(_release, _p, _po, _n)
1034
1035 #else /* atomic64_try_cmpxchg */
1036 #define atomic64_try_cmpxchg_relaxed    atomic64_try_cmpxchg
1037 #define atomic64_try_cmpxchg_acquire    atomic64_try_cmpxchg
1038 #define atomic64_try_cmpxchg_release    atomic64_try_cmpxchg
1039 #endif /* atomic64_try_cmpxchg */
1040
1041 #ifndef atomic64_andnot
1042 static inline void atomic64_andnot(long long i, atomic64_t *v)
1043 {
1044         atomic64_and(~i, v);
1045 }
1046
1047 static inline long long atomic64_fetch_andnot(long long i, atomic64_t *v)
1048 {
1049         return atomic64_fetch_and(~i, v);
1050 }
1051
1052 static inline long long atomic64_fetch_andnot_relaxed(long long i, atomic64_t *v)
1053 {
1054         return atomic64_fetch_and_relaxed(~i, v);
1055 }
1056
1057 static inline long long atomic64_fetch_andnot_acquire(long long i, atomic64_t *v)
1058 {
1059         return atomic64_fetch_and_acquire(~i, v);
1060 }
1061
1062 static inline long long atomic64_fetch_andnot_release(long long i, atomic64_t *v)
1063 {
1064         return atomic64_fetch_and_release(~i, v);
1065 }
1066 #endif
1067
1068 #include <asm-generic/atomic-long.h>
1069
1070 #endif /* _LINUX_ATOMIC_H */