1 /* Atomic operations usable in machine independent code */
2 #ifndef _LINUX_ATOMIC_H
3 #define _LINUX_ATOMIC_H
4 #include <asm/atomic.h>
5 #include <asm/barrier.h>
8 * Relaxed variants of xchg, cmpxchg and some atomic operations.
10 * We support four variants:
12 * - Fully ordered: The default implementation, no suffix required.
13 * - Acquire: Provides ACQUIRE semantics, _acquire suffix.
14 * - Release: Provides RELEASE semantics, _release suffix.
15 * - Relaxed: No ordering guarantees, _relaxed suffix.
17 * For compound atomics performing both a load and a store, ACQUIRE
18 * semantics apply only to the load and RELEASE semantics only to the
19 * store portion of the operation. Note that a failed cmpxchg_acquire
20 * does -not- imply any memory ordering constraints.
22 * See Documentation/memory-barriers.txt for ACQUIRE/RELEASE definitions.
25 #ifndef atomic_read_acquire
26 #define atomic_read_acquire(v) smp_load_acquire(&(v)->counter)
29 #ifndef atomic_set_release
30 #define atomic_set_release(v, i) smp_store_release(&(v)->counter, (i))
34 * The idea here is to build acquire/release variants by adding explicit
35 * barriers on top of the relaxed variant. In the case where the relaxed
36 * variant is already fully ordered, no additional barriers are needed.
38 * Besides, if an arch has a special barrier for acquire/release, it could
39 * implement its own __atomic_op_* and use the same framework for building
42 #ifndef __atomic_op_acquire
43 #define __atomic_op_acquire(op, args...) \
45 typeof(op##_relaxed(args)) __ret = op##_relaxed(args); \
46 smp_mb__after_atomic(); \
51 #ifndef __atomic_op_release
52 #define __atomic_op_release(op, args...) \
54 smp_mb__before_atomic(); \
59 #ifndef __atomic_op_fence
60 #define __atomic_op_fence(op, args...) \
62 typeof(op##_relaxed(args)) __ret; \
63 smp_mb__before_atomic(); \
64 __ret = op##_relaxed(args); \
65 smp_mb__after_atomic(); \
70 /* atomic_add_return_relaxed */
71 #ifndef atomic_add_return_relaxed
72 #define atomic_add_return_relaxed atomic_add_return
73 #define atomic_add_return_acquire atomic_add_return
74 #define atomic_add_return_release atomic_add_return
76 #else /* atomic_add_return_relaxed */
78 #ifndef atomic_add_return_acquire
79 #define atomic_add_return_acquire(...) \
80 __atomic_op_acquire(atomic_add_return, __VA_ARGS__)
83 #ifndef atomic_add_return_release
84 #define atomic_add_return_release(...) \
85 __atomic_op_release(atomic_add_return, __VA_ARGS__)
88 #ifndef atomic_add_return
89 #define atomic_add_return(...) \
90 __atomic_op_fence(atomic_add_return, __VA_ARGS__)
92 #endif /* atomic_add_return_relaxed */
94 /* atomic_inc_return_relaxed */
95 #ifndef atomic_inc_return_relaxed
96 #define atomic_inc_return_relaxed atomic_inc_return
97 #define atomic_inc_return_acquire atomic_inc_return
98 #define atomic_inc_return_release atomic_inc_return
100 #else /* atomic_inc_return_relaxed */
102 #ifndef atomic_inc_return_acquire
103 #define atomic_inc_return_acquire(...) \
104 __atomic_op_acquire(atomic_inc_return, __VA_ARGS__)
107 #ifndef atomic_inc_return_release
108 #define atomic_inc_return_release(...) \
109 __atomic_op_release(atomic_inc_return, __VA_ARGS__)
112 #ifndef atomic_inc_return
113 #define atomic_inc_return(...) \
114 __atomic_op_fence(atomic_inc_return, __VA_ARGS__)
116 #endif /* atomic_inc_return_relaxed */
118 /* atomic_sub_return_relaxed */
119 #ifndef atomic_sub_return_relaxed
120 #define atomic_sub_return_relaxed atomic_sub_return
121 #define atomic_sub_return_acquire atomic_sub_return
122 #define atomic_sub_return_release atomic_sub_return
124 #else /* atomic_sub_return_relaxed */
126 #ifndef atomic_sub_return_acquire
127 #define atomic_sub_return_acquire(...) \
128 __atomic_op_acquire(atomic_sub_return, __VA_ARGS__)
131 #ifndef atomic_sub_return_release
132 #define atomic_sub_return_release(...) \
133 __atomic_op_release(atomic_sub_return, __VA_ARGS__)
136 #ifndef atomic_sub_return
137 #define atomic_sub_return(...) \
138 __atomic_op_fence(atomic_sub_return, __VA_ARGS__)
140 #endif /* atomic_sub_return_relaxed */
142 /* atomic_dec_return_relaxed */
143 #ifndef atomic_dec_return_relaxed
144 #define atomic_dec_return_relaxed atomic_dec_return
145 #define atomic_dec_return_acquire atomic_dec_return
146 #define atomic_dec_return_release atomic_dec_return
148 #else /* atomic_dec_return_relaxed */
150 #ifndef atomic_dec_return_acquire
151 #define atomic_dec_return_acquire(...) \
152 __atomic_op_acquire(atomic_dec_return, __VA_ARGS__)
155 #ifndef atomic_dec_return_release
156 #define atomic_dec_return_release(...) \
157 __atomic_op_release(atomic_dec_return, __VA_ARGS__)
160 #ifndef atomic_dec_return
161 #define atomic_dec_return(...) \
162 __atomic_op_fence(atomic_dec_return, __VA_ARGS__)
164 #endif /* atomic_dec_return_relaxed */
167 /* atomic_fetch_add_relaxed */
168 #ifndef atomic_fetch_add_relaxed
169 #define atomic_fetch_add_relaxed atomic_fetch_add
170 #define atomic_fetch_add_acquire atomic_fetch_add
171 #define atomic_fetch_add_release atomic_fetch_add
173 #else /* atomic_fetch_add_relaxed */
175 #ifndef atomic_fetch_add_acquire
176 #define atomic_fetch_add_acquire(...) \
177 __atomic_op_acquire(atomic_fetch_add, __VA_ARGS__)
180 #ifndef atomic_fetch_add_release
181 #define atomic_fetch_add_release(...) \
182 __atomic_op_release(atomic_fetch_add, __VA_ARGS__)
185 #ifndef atomic_fetch_add
186 #define atomic_fetch_add(...) \
187 __atomic_op_fence(atomic_fetch_add, __VA_ARGS__)
189 #endif /* atomic_fetch_add_relaxed */
191 /* atomic_fetch_inc_relaxed */
192 #ifndef atomic_fetch_inc_relaxed
194 #ifndef atomic_fetch_inc
195 #define atomic_fetch_inc(v) atomic_fetch_add(1, (v))
196 #define atomic_fetch_inc_relaxed(v) atomic_fetch_add_relaxed(1, (v))
197 #define atomic_fetch_inc_acquire(v) atomic_fetch_add_acquire(1, (v))
198 #define atomic_fetch_inc_release(v) atomic_fetch_add_release(1, (v))
199 #else /* atomic_fetch_inc */
200 #define atomic_fetch_inc_relaxed atomic_fetch_inc
201 #define atomic_fetch_inc_acquire atomic_fetch_inc
202 #define atomic_fetch_inc_release atomic_fetch_inc
203 #endif /* atomic_fetch_inc */
205 #else /* atomic_fetch_inc_relaxed */
207 #ifndef atomic_fetch_inc_acquire
208 #define atomic_fetch_inc_acquire(...) \
209 __atomic_op_acquire(atomic_fetch_inc, __VA_ARGS__)
212 #ifndef atomic_fetch_inc_release
213 #define atomic_fetch_inc_release(...) \
214 __atomic_op_release(atomic_fetch_inc, __VA_ARGS__)
217 #ifndef atomic_fetch_inc
218 #define atomic_fetch_inc(...) \
219 __atomic_op_fence(atomic_fetch_inc, __VA_ARGS__)
221 #endif /* atomic_fetch_inc_relaxed */
223 /* atomic_fetch_sub_relaxed */
224 #ifndef atomic_fetch_sub_relaxed
225 #define atomic_fetch_sub_relaxed atomic_fetch_sub
226 #define atomic_fetch_sub_acquire atomic_fetch_sub
227 #define atomic_fetch_sub_release atomic_fetch_sub
229 #else /* atomic_fetch_sub_relaxed */
231 #ifndef atomic_fetch_sub_acquire
232 #define atomic_fetch_sub_acquire(...) \
233 __atomic_op_acquire(atomic_fetch_sub, __VA_ARGS__)
236 #ifndef atomic_fetch_sub_release
237 #define atomic_fetch_sub_release(...) \
238 __atomic_op_release(atomic_fetch_sub, __VA_ARGS__)
241 #ifndef atomic_fetch_sub
242 #define atomic_fetch_sub(...) \
243 __atomic_op_fence(atomic_fetch_sub, __VA_ARGS__)
245 #endif /* atomic_fetch_sub_relaxed */
247 /* atomic_fetch_dec_relaxed */
248 #ifndef atomic_fetch_dec_relaxed
250 #ifndef atomic_fetch_dec
251 #define atomic_fetch_dec(v) atomic_fetch_sub(1, (v))
252 #define atomic_fetch_dec_relaxed(v) atomic_fetch_sub_relaxed(1, (v))
253 #define atomic_fetch_dec_acquire(v) atomic_fetch_sub_acquire(1, (v))
254 #define atomic_fetch_dec_release(v) atomic_fetch_sub_release(1, (v))
255 #else /* atomic_fetch_dec */
256 #define atomic_fetch_dec_relaxed atomic_fetch_dec
257 #define atomic_fetch_dec_acquire atomic_fetch_dec
258 #define atomic_fetch_dec_release atomic_fetch_dec
259 #endif /* atomic_fetch_dec */
261 #else /* atomic_fetch_dec_relaxed */
263 #ifndef atomic_fetch_dec_acquire
264 #define atomic_fetch_dec_acquire(...) \
265 __atomic_op_acquire(atomic_fetch_dec, __VA_ARGS__)
268 #ifndef atomic_fetch_dec_release
269 #define atomic_fetch_dec_release(...) \
270 __atomic_op_release(atomic_fetch_dec, __VA_ARGS__)
273 #ifndef atomic_fetch_dec
274 #define atomic_fetch_dec(...) \
275 __atomic_op_fence(atomic_fetch_dec, __VA_ARGS__)
277 #endif /* atomic_fetch_dec_relaxed */
279 /* atomic_fetch_or_relaxed */
280 #ifndef atomic_fetch_or_relaxed
281 #define atomic_fetch_or_relaxed atomic_fetch_or
282 #define atomic_fetch_or_acquire atomic_fetch_or
283 #define atomic_fetch_or_release atomic_fetch_or
285 #else /* atomic_fetch_or_relaxed */
287 #ifndef atomic_fetch_or_acquire
288 #define atomic_fetch_or_acquire(...) \
289 __atomic_op_acquire(atomic_fetch_or, __VA_ARGS__)
292 #ifndef atomic_fetch_or_release
293 #define atomic_fetch_or_release(...) \
294 __atomic_op_release(atomic_fetch_or, __VA_ARGS__)
297 #ifndef atomic_fetch_or
298 #define atomic_fetch_or(...) \
299 __atomic_op_fence(atomic_fetch_or, __VA_ARGS__)
301 #endif /* atomic_fetch_or_relaxed */
303 /* atomic_fetch_and_relaxed */
304 #ifndef atomic_fetch_and_relaxed
305 #define atomic_fetch_and_relaxed atomic_fetch_and
306 #define atomic_fetch_and_acquire atomic_fetch_and
307 #define atomic_fetch_and_release atomic_fetch_and
309 #else /* atomic_fetch_and_relaxed */
311 #ifndef atomic_fetch_and_acquire
312 #define atomic_fetch_and_acquire(...) \
313 __atomic_op_acquire(atomic_fetch_and, __VA_ARGS__)
316 #ifndef atomic_fetch_and_release
317 #define atomic_fetch_and_release(...) \
318 __atomic_op_release(atomic_fetch_and, __VA_ARGS__)
321 #ifndef atomic_fetch_and
322 #define atomic_fetch_and(...) \
323 __atomic_op_fence(atomic_fetch_and, __VA_ARGS__)
325 #endif /* atomic_fetch_and_relaxed */
328 /* atomic_fetch_andnot_relaxed */
329 #ifndef atomic_fetch_andnot_relaxed
330 #define atomic_fetch_andnot_relaxed atomic_fetch_andnot
331 #define atomic_fetch_andnot_acquire atomic_fetch_andnot
332 #define atomic_fetch_andnot_release atomic_fetch_andnot
334 #else /* atomic_fetch_andnot_relaxed */
336 #ifndef atomic_fetch_andnot_acquire
337 #define atomic_fetch_andnot_acquire(...) \
338 __atomic_op_acquire(atomic_fetch_andnot, __VA_ARGS__)
341 #ifndef atomic_fetch_andnot_release
342 #define atomic_fetch_andnot_release(...) \
343 __atomic_op_release(atomic_fetch_andnot, __VA_ARGS__)
346 #ifndef atomic_fetch_andnot
347 #define atomic_fetch_andnot(...) \
348 __atomic_op_fence(atomic_fetch_andnot, __VA_ARGS__)
350 #endif /* atomic_fetch_andnot_relaxed */
351 #endif /* atomic_andnot */
353 /* atomic_fetch_xor_relaxed */
354 #ifndef atomic_fetch_xor_relaxed
355 #define atomic_fetch_xor_relaxed atomic_fetch_xor
356 #define atomic_fetch_xor_acquire atomic_fetch_xor
357 #define atomic_fetch_xor_release atomic_fetch_xor
359 #else /* atomic_fetch_xor_relaxed */
361 #ifndef atomic_fetch_xor_acquire
362 #define atomic_fetch_xor_acquire(...) \
363 __atomic_op_acquire(atomic_fetch_xor, __VA_ARGS__)
366 #ifndef atomic_fetch_xor_release
367 #define atomic_fetch_xor_release(...) \
368 __atomic_op_release(atomic_fetch_xor, __VA_ARGS__)
371 #ifndef atomic_fetch_xor
372 #define atomic_fetch_xor(...) \
373 __atomic_op_fence(atomic_fetch_xor, __VA_ARGS__)
375 #endif /* atomic_fetch_xor_relaxed */
378 /* atomic_xchg_relaxed */
379 #ifndef atomic_xchg_relaxed
380 #define atomic_xchg_relaxed atomic_xchg
381 #define atomic_xchg_acquire atomic_xchg
382 #define atomic_xchg_release atomic_xchg
384 #else /* atomic_xchg_relaxed */
386 #ifndef atomic_xchg_acquire
387 #define atomic_xchg_acquire(...) \
388 __atomic_op_acquire(atomic_xchg, __VA_ARGS__)
391 #ifndef atomic_xchg_release
392 #define atomic_xchg_release(...) \
393 __atomic_op_release(atomic_xchg, __VA_ARGS__)
397 #define atomic_xchg(...) \
398 __atomic_op_fence(atomic_xchg, __VA_ARGS__)
400 #endif /* atomic_xchg_relaxed */
402 /* atomic_cmpxchg_relaxed */
403 #ifndef atomic_cmpxchg_relaxed
404 #define atomic_cmpxchg_relaxed atomic_cmpxchg
405 #define atomic_cmpxchg_acquire atomic_cmpxchg
406 #define atomic_cmpxchg_release atomic_cmpxchg
408 #else /* atomic_cmpxchg_relaxed */
410 #ifndef atomic_cmpxchg_acquire
411 #define atomic_cmpxchg_acquire(...) \
412 __atomic_op_acquire(atomic_cmpxchg, __VA_ARGS__)
415 #ifndef atomic_cmpxchg_release
416 #define atomic_cmpxchg_release(...) \
417 __atomic_op_release(atomic_cmpxchg, __VA_ARGS__)
420 #ifndef atomic_cmpxchg
421 #define atomic_cmpxchg(...) \
422 __atomic_op_fence(atomic_cmpxchg, __VA_ARGS__)
424 #endif /* atomic_cmpxchg_relaxed */
426 #ifndef atomic_try_cmpxchg
428 #define __atomic_try_cmpxchg(type, _p, _po, _n) \
430 typeof(_po) __po = (_po); \
431 typeof(*(_po)) __o = *__po; \
432 *__po = atomic_cmpxchg##type((_p), __o, (_n)); \
436 #define atomic_try_cmpxchg(_p, _po, _n) __atomic_try_cmpxchg(, _p, _po, _n)
437 #define atomic_try_cmpxchg_relaxed(_p, _po, _n) __atomic_try_cmpxchg(_relaxed, _p, _po, _n)
438 #define atomic_try_cmpxchg_acquire(_p, _po, _n) __atomic_try_cmpxchg(_acquire, _p, _po, _n)
439 #define atomic_try_cmpxchg_release(_p, _po, _n) __atomic_try_cmpxchg(_release, _p, _po, _n)
441 #else /* atomic_try_cmpxchg */
442 #define atomic_try_cmpxchg_relaxed atomic_try_cmpxchg
443 #define atomic_try_cmpxchg_acquire atomic_try_cmpxchg
444 #define atomic_try_cmpxchg_release atomic_try_cmpxchg
445 #endif /* atomic_try_cmpxchg */
447 /* cmpxchg_relaxed */
448 #ifndef cmpxchg_relaxed
449 #define cmpxchg_relaxed cmpxchg
450 #define cmpxchg_acquire cmpxchg
451 #define cmpxchg_release cmpxchg
453 #else /* cmpxchg_relaxed */
455 #ifndef cmpxchg_acquire
456 #define cmpxchg_acquire(...) \
457 __atomic_op_acquire(cmpxchg, __VA_ARGS__)
460 #ifndef cmpxchg_release
461 #define cmpxchg_release(...) \
462 __atomic_op_release(cmpxchg, __VA_ARGS__)
466 #define cmpxchg(...) \
467 __atomic_op_fence(cmpxchg, __VA_ARGS__)
469 #endif /* cmpxchg_relaxed */
471 /* cmpxchg64_relaxed */
472 #ifndef cmpxchg64_relaxed
473 #define cmpxchg64_relaxed cmpxchg64
474 #define cmpxchg64_acquire cmpxchg64
475 #define cmpxchg64_release cmpxchg64
477 #else /* cmpxchg64_relaxed */
479 #ifndef cmpxchg64_acquire
480 #define cmpxchg64_acquire(...) \
481 __atomic_op_acquire(cmpxchg64, __VA_ARGS__)
484 #ifndef cmpxchg64_release
485 #define cmpxchg64_release(...) \
486 __atomic_op_release(cmpxchg64, __VA_ARGS__)
490 #define cmpxchg64(...) \
491 __atomic_op_fence(cmpxchg64, __VA_ARGS__)
493 #endif /* cmpxchg64_relaxed */
497 #define xchg_relaxed xchg
498 #define xchg_acquire xchg
499 #define xchg_release xchg
501 #else /* xchg_relaxed */
504 #define xchg_acquire(...) __atomic_op_acquire(xchg, __VA_ARGS__)
508 #define xchg_release(...) __atomic_op_release(xchg, __VA_ARGS__)
512 #define xchg(...) __atomic_op_fence(xchg, __VA_ARGS__)
514 #endif /* xchg_relaxed */
517 * atomic_add_unless - add unless the number is already a given value
518 * @v: pointer of type atomic_t
519 * @a: the amount to add to v...
520 * @u: ...unless v is equal to u.
522 * Atomically adds @a to @v, so long as @v was not already @u.
523 * Returns non-zero if @v was not @u, and zero otherwise.
525 static inline int atomic_add_unless(atomic_t *v, int a, int u)
527 return __atomic_add_unless(v, a, u) != u;
531 * atomic_inc_not_zero - increment unless the number is zero
532 * @v: pointer of type atomic_t
534 * Atomically increments @v by 1, so long as @v is non-zero.
535 * Returns non-zero if @v was non-zero, and zero otherwise.
537 #ifndef atomic_inc_not_zero
538 #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
541 #ifndef atomic_andnot
542 static inline void atomic_andnot(int i, atomic_t *v)
547 static inline int atomic_fetch_andnot(int i, atomic_t *v)
549 return atomic_fetch_and(~i, v);
552 static inline int atomic_fetch_andnot_relaxed(int i, atomic_t *v)
554 return atomic_fetch_and_relaxed(~i, v);
557 static inline int atomic_fetch_andnot_acquire(int i, atomic_t *v)
559 return atomic_fetch_and_acquire(~i, v);
562 static inline int atomic_fetch_andnot_release(int i, atomic_t *v)
564 return atomic_fetch_and_release(~i, v);
569 * atomic_inc_not_zero_hint - increment if not null
570 * @v: pointer of type atomic_t
571 * @hint: probable value of the atomic before the increment
573 * This version of atomic_inc_not_zero() gives a hint of probable
574 * value of the atomic. This helps processor to not read the memory
575 * before doing the atomic read/modify/write cycle, lowering
576 * number of bus transactions on some arches.
578 * Returns: 0 if increment was not done, 1 otherwise.
580 #ifndef atomic_inc_not_zero_hint
581 static inline int atomic_inc_not_zero_hint(atomic_t *v, int hint)
585 /* sanity test, should be removed by compiler if hint is a constant */
587 return atomic_inc_not_zero(v);
590 val = atomic_cmpxchg(v, c, c + 1);
600 #ifndef atomic_inc_unless_negative
601 static inline int atomic_inc_unless_negative(atomic_t *p)
604 for (v = 0; v >= 0; v = v1) {
605 v1 = atomic_cmpxchg(p, v, v + 1);
613 #ifndef atomic_dec_unless_positive
614 static inline int atomic_dec_unless_positive(atomic_t *p)
617 for (v = 0; v <= 0; v = v1) {
618 v1 = atomic_cmpxchg(p, v, v - 1);
627 * atomic_dec_if_positive - decrement by 1 if old value positive
628 * @v: pointer of type atomic_t
630 * The function returns the old value of *v minus 1, even if
631 * the atomic variable, v, was not decremented.
633 #ifndef atomic_dec_if_positive
634 static inline int atomic_dec_if_positive(atomic_t *v)
640 if (unlikely(dec < 0))
642 old = atomic_cmpxchg((v), c, dec);
643 if (likely(old == c))
651 #ifdef CONFIG_GENERIC_ATOMIC64
652 #include <asm-generic/atomic64.h>
655 #ifndef atomic64_read_acquire
656 #define atomic64_read_acquire(v) smp_load_acquire(&(v)->counter)
659 #ifndef atomic64_set_release
660 #define atomic64_set_release(v, i) smp_store_release(&(v)->counter, (i))
663 /* atomic64_add_return_relaxed */
664 #ifndef atomic64_add_return_relaxed
665 #define atomic64_add_return_relaxed atomic64_add_return
666 #define atomic64_add_return_acquire atomic64_add_return
667 #define atomic64_add_return_release atomic64_add_return
669 #else /* atomic64_add_return_relaxed */
671 #ifndef atomic64_add_return_acquire
672 #define atomic64_add_return_acquire(...) \
673 __atomic_op_acquire(atomic64_add_return, __VA_ARGS__)
676 #ifndef atomic64_add_return_release
677 #define atomic64_add_return_release(...) \
678 __atomic_op_release(atomic64_add_return, __VA_ARGS__)
681 #ifndef atomic64_add_return
682 #define atomic64_add_return(...) \
683 __atomic_op_fence(atomic64_add_return, __VA_ARGS__)
685 #endif /* atomic64_add_return_relaxed */
687 /* atomic64_inc_return_relaxed */
688 #ifndef atomic64_inc_return_relaxed
689 #define atomic64_inc_return_relaxed atomic64_inc_return
690 #define atomic64_inc_return_acquire atomic64_inc_return
691 #define atomic64_inc_return_release atomic64_inc_return
693 #else /* atomic64_inc_return_relaxed */
695 #ifndef atomic64_inc_return_acquire
696 #define atomic64_inc_return_acquire(...) \
697 __atomic_op_acquire(atomic64_inc_return, __VA_ARGS__)
700 #ifndef atomic64_inc_return_release
701 #define atomic64_inc_return_release(...) \
702 __atomic_op_release(atomic64_inc_return, __VA_ARGS__)
705 #ifndef atomic64_inc_return
706 #define atomic64_inc_return(...) \
707 __atomic_op_fence(atomic64_inc_return, __VA_ARGS__)
709 #endif /* atomic64_inc_return_relaxed */
712 /* atomic64_sub_return_relaxed */
713 #ifndef atomic64_sub_return_relaxed
714 #define atomic64_sub_return_relaxed atomic64_sub_return
715 #define atomic64_sub_return_acquire atomic64_sub_return
716 #define atomic64_sub_return_release atomic64_sub_return
718 #else /* atomic64_sub_return_relaxed */
720 #ifndef atomic64_sub_return_acquire
721 #define atomic64_sub_return_acquire(...) \
722 __atomic_op_acquire(atomic64_sub_return, __VA_ARGS__)
725 #ifndef atomic64_sub_return_release
726 #define atomic64_sub_return_release(...) \
727 __atomic_op_release(atomic64_sub_return, __VA_ARGS__)
730 #ifndef atomic64_sub_return
731 #define atomic64_sub_return(...) \
732 __atomic_op_fence(atomic64_sub_return, __VA_ARGS__)
734 #endif /* atomic64_sub_return_relaxed */
736 /* atomic64_dec_return_relaxed */
737 #ifndef atomic64_dec_return_relaxed
738 #define atomic64_dec_return_relaxed atomic64_dec_return
739 #define atomic64_dec_return_acquire atomic64_dec_return
740 #define atomic64_dec_return_release atomic64_dec_return
742 #else /* atomic64_dec_return_relaxed */
744 #ifndef atomic64_dec_return_acquire
745 #define atomic64_dec_return_acquire(...) \
746 __atomic_op_acquire(atomic64_dec_return, __VA_ARGS__)
749 #ifndef atomic64_dec_return_release
750 #define atomic64_dec_return_release(...) \
751 __atomic_op_release(atomic64_dec_return, __VA_ARGS__)
754 #ifndef atomic64_dec_return
755 #define atomic64_dec_return(...) \
756 __atomic_op_fence(atomic64_dec_return, __VA_ARGS__)
758 #endif /* atomic64_dec_return_relaxed */
761 /* atomic64_fetch_add_relaxed */
762 #ifndef atomic64_fetch_add_relaxed
763 #define atomic64_fetch_add_relaxed atomic64_fetch_add
764 #define atomic64_fetch_add_acquire atomic64_fetch_add
765 #define atomic64_fetch_add_release atomic64_fetch_add
767 #else /* atomic64_fetch_add_relaxed */
769 #ifndef atomic64_fetch_add_acquire
770 #define atomic64_fetch_add_acquire(...) \
771 __atomic_op_acquire(atomic64_fetch_add, __VA_ARGS__)
774 #ifndef atomic64_fetch_add_release
775 #define atomic64_fetch_add_release(...) \
776 __atomic_op_release(atomic64_fetch_add, __VA_ARGS__)
779 #ifndef atomic64_fetch_add
780 #define atomic64_fetch_add(...) \
781 __atomic_op_fence(atomic64_fetch_add, __VA_ARGS__)
783 #endif /* atomic64_fetch_add_relaxed */
785 /* atomic64_fetch_inc_relaxed */
786 #ifndef atomic64_fetch_inc_relaxed
788 #ifndef atomic64_fetch_inc
789 #define atomic64_fetch_inc(v) atomic64_fetch_add(1, (v))
790 #define atomic64_fetch_inc_relaxed(v) atomic64_fetch_add_relaxed(1, (v))
791 #define atomic64_fetch_inc_acquire(v) atomic64_fetch_add_acquire(1, (v))
792 #define atomic64_fetch_inc_release(v) atomic64_fetch_add_release(1, (v))
793 #else /* atomic64_fetch_inc */
794 #define atomic64_fetch_inc_relaxed atomic64_fetch_inc
795 #define atomic64_fetch_inc_acquire atomic64_fetch_inc
796 #define atomic64_fetch_inc_release atomic64_fetch_inc
797 #endif /* atomic64_fetch_inc */
799 #else /* atomic64_fetch_inc_relaxed */
801 #ifndef atomic64_fetch_inc_acquire
802 #define atomic64_fetch_inc_acquire(...) \
803 __atomic_op_acquire(atomic64_fetch_inc, __VA_ARGS__)
806 #ifndef atomic64_fetch_inc_release
807 #define atomic64_fetch_inc_release(...) \
808 __atomic_op_release(atomic64_fetch_inc, __VA_ARGS__)
811 #ifndef atomic64_fetch_inc
812 #define atomic64_fetch_inc(...) \
813 __atomic_op_fence(atomic64_fetch_inc, __VA_ARGS__)
815 #endif /* atomic64_fetch_inc_relaxed */
817 /* atomic64_fetch_sub_relaxed */
818 #ifndef atomic64_fetch_sub_relaxed
819 #define atomic64_fetch_sub_relaxed atomic64_fetch_sub
820 #define atomic64_fetch_sub_acquire atomic64_fetch_sub
821 #define atomic64_fetch_sub_release atomic64_fetch_sub
823 #else /* atomic64_fetch_sub_relaxed */
825 #ifndef atomic64_fetch_sub_acquire
826 #define atomic64_fetch_sub_acquire(...) \
827 __atomic_op_acquire(atomic64_fetch_sub, __VA_ARGS__)
830 #ifndef atomic64_fetch_sub_release
831 #define atomic64_fetch_sub_release(...) \
832 __atomic_op_release(atomic64_fetch_sub, __VA_ARGS__)
835 #ifndef atomic64_fetch_sub
836 #define atomic64_fetch_sub(...) \
837 __atomic_op_fence(atomic64_fetch_sub, __VA_ARGS__)
839 #endif /* atomic64_fetch_sub_relaxed */
841 /* atomic64_fetch_dec_relaxed */
842 #ifndef atomic64_fetch_dec_relaxed
844 #ifndef atomic64_fetch_dec
845 #define atomic64_fetch_dec(v) atomic64_fetch_sub(1, (v))
846 #define atomic64_fetch_dec_relaxed(v) atomic64_fetch_sub_relaxed(1, (v))
847 #define atomic64_fetch_dec_acquire(v) atomic64_fetch_sub_acquire(1, (v))
848 #define atomic64_fetch_dec_release(v) atomic64_fetch_sub_release(1, (v))
849 #else /* atomic64_fetch_dec */
850 #define atomic64_fetch_dec_relaxed atomic64_fetch_dec
851 #define atomic64_fetch_dec_acquire atomic64_fetch_dec
852 #define atomic64_fetch_dec_release atomic64_fetch_dec
853 #endif /* atomic64_fetch_dec */
855 #else /* atomic64_fetch_dec_relaxed */
857 #ifndef atomic64_fetch_dec_acquire
858 #define atomic64_fetch_dec_acquire(...) \
859 __atomic_op_acquire(atomic64_fetch_dec, __VA_ARGS__)
862 #ifndef atomic64_fetch_dec_release
863 #define atomic64_fetch_dec_release(...) \
864 __atomic_op_release(atomic64_fetch_dec, __VA_ARGS__)
867 #ifndef atomic64_fetch_dec
868 #define atomic64_fetch_dec(...) \
869 __atomic_op_fence(atomic64_fetch_dec, __VA_ARGS__)
871 #endif /* atomic64_fetch_dec_relaxed */
873 /* atomic64_fetch_or_relaxed */
874 #ifndef atomic64_fetch_or_relaxed
875 #define atomic64_fetch_or_relaxed atomic64_fetch_or
876 #define atomic64_fetch_or_acquire atomic64_fetch_or
877 #define atomic64_fetch_or_release atomic64_fetch_or
879 #else /* atomic64_fetch_or_relaxed */
881 #ifndef atomic64_fetch_or_acquire
882 #define atomic64_fetch_or_acquire(...) \
883 __atomic_op_acquire(atomic64_fetch_or, __VA_ARGS__)
886 #ifndef atomic64_fetch_or_release
887 #define atomic64_fetch_or_release(...) \
888 __atomic_op_release(atomic64_fetch_or, __VA_ARGS__)
891 #ifndef atomic64_fetch_or
892 #define atomic64_fetch_or(...) \
893 __atomic_op_fence(atomic64_fetch_or, __VA_ARGS__)
895 #endif /* atomic64_fetch_or_relaxed */
897 /* atomic64_fetch_and_relaxed */
898 #ifndef atomic64_fetch_and_relaxed
899 #define atomic64_fetch_and_relaxed atomic64_fetch_and
900 #define atomic64_fetch_and_acquire atomic64_fetch_and
901 #define atomic64_fetch_and_release atomic64_fetch_and
903 #else /* atomic64_fetch_and_relaxed */
905 #ifndef atomic64_fetch_and_acquire
906 #define atomic64_fetch_and_acquire(...) \
907 __atomic_op_acquire(atomic64_fetch_and, __VA_ARGS__)
910 #ifndef atomic64_fetch_and_release
911 #define atomic64_fetch_and_release(...) \
912 __atomic_op_release(atomic64_fetch_and, __VA_ARGS__)
915 #ifndef atomic64_fetch_and
916 #define atomic64_fetch_and(...) \
917 __atomic_op_fence(atomic64_fetch_and, __VA_ARGS__)
919 #endif /* atomic64_fetch_and_relaxed */
921 #ifdef atomic64_andnot
922 /* atomic64_fetch_andnot_relaxed */
923 #ifndef atomic64_fetch_andnot_relaxed
924 #define atomic64_fetch_andnot_relaxed atomic64_fetch_andnot
925 #define atomic64_fetch_andnot_acquire atomic64_fetch_andnot
926 #define atomic64_fetch_andnot_release atomic64_fetch_andnot
928 #else /* atomic64_fetch_andnot_relaxed */
930 #ifndef atomic64_fetch_andnot_acquire
931 #define atomic64_fetch_andnot_acquire(...) \
932 __atomic_op_acquire(atomic64_fetch_andnot, __VA_ARGS__)
935 #ifndef atomic64_fetch_andnot_release
936 #define atomic64_fetch_andnot_release(...) \
937 __atomic_op_release(atomic64_fetch_andnot, __VA_ARGS__)
940 #ifndef atomic64_fetch_andnot
941 #define atomic64_fetch_andnot(...) \
942 __atomic_op_fence(atomic64_fetch_andnot, __VA_ARGS__)
944 #endif /* atomic64_fetch_andnot_relaxed */
945 #endif /* atomic64_andnot */
947 /* atomic64_fetch_xor_relaxed */
948 #ifndef atomic64_fetch_xor_relaxed
949 #define atomic64_fetch_xor_relaxed atomic64_fetch_xor
950 #define atomic64_fetch_xor_acquire atomic64_fetch_xor
951 #define atomic64_fetch_xor_release atomic64_fetch_xor
953 #else /* atomic64_fetch_xor_relaxed */
955 #ifndef atomic64_fetch_xor_acquire
956 #define atomic64_fetch_xor_acquire(...) \
957 __atomic_op_acquire(atomic64_fetch_xor, __VA_ARGS__)
960 #ifndef atomic64_fetch_xor_release
961 #define atomic64_fetch_xor_release(...) \
962 __atomic_op_release(atomic64_fetch_xor, __VA_ARGS__)
965 #ifndef atomic64_fetch_xor
966 #define atomic64_fetch_xor(...) \
967 __atomic_op_fence(atomic64_fetch_xor, __VA_ARGS__)
969 #endif /* atomic64_fetch_xor_relaxed */
972 /* atomic64_xchg_relaxed */
973 #ifndef atomic64_xchg_relaxed
974 #define atomic64_xchg_relaxed atomic64_xchg
975 #define atomic64_xchg_acquire atomic64_xchg
976 #define atomic64_xchg_release atomic64_xchg
978 #else /* atomic64_xchg_relaxed */
980 #ifndef atomic64_xchg_acquire
981 #define atomic64_xchg_acquire(...) \
982 __atomic_op_acquire(atomic64_xchg, __VA_ARGS__)
985 #ifndef atomic64_xchg_release
986 #define atomic64_xchg_release(...) \
987 __atomic_op_release(atomic64_xchg, __VA_ARGS__)
990 #ifndef atomic64_xchg
991 #define atomic64_xchg(...) \
992 __atomic_op_fence(atomic64_xchg, __VA_ARGS__)
994 #endif /* atomic64_xchg_relaxed */
996 /* atomic64_cmpxchg_relaxed */
997 #ifndef atomic64_cmpxchg_relaxed
998 #define atomic64_cmpxchg_relaxed atomic64_cmpxchg
999 #define atomic64_cmpxchg_acquire atomic64_cmpxchg
1000 #define atomic64_cmpxchg_release atomic64_cmpxchg
1002 #else /* atomic64_cmpxchg_relaxed */
1004 #ifndef atomic64_cmpxchg_acquire
1005 #define atomic64_cmpxchg_acquire(...) \
1006 __atomic_op_acquire(atomic64_cmpxchg, __VA_ARGS__)
1009 #ifndef atomic64_cmpxchg_release
1010 #define atomic64_cmpxchg_release(...) \
1011 __atomic_op_release(atomic64_cmpxchg, __VA_ARGS__)
1014 #ifndef atomic64_cmpxchg
1015 #define atomic64_cmpxchg(...) \
1016 __atomic_op_fence(atomic64_cmpxchg, __VA_ARGS__)
1018 #endif /* atomic64_cmpxchg_relaxed */
1020 #ifndef atomic64_try_cmpxchg
1022 #define __atomic64_try_cmpxchg(type, _p, _po, _n) \
1024 typeof(_po) __po = (_po); \
1025 typeof(*(_po)) __o = *__po; \
1026 *__po = atomic64_cmpxchg##type((_p), __o, (_n)); \
1030 #define atomic64_try_cmpxchg(_p, _po, _n) __atomic64_try_cmpxchg(, _p, _po, _n)
1031 #define atomic64_try_cmpxchg_relaxed(_p, _po, _n) __atomic64_try_cmpxchg(_relaxed, _p, _po, _n)
1032 #define atomic64_try_cmpxchg_acquire(_p, _po, _n) __atomic64_try_cmpxchg(_acquire, _p, _po, _n)
1033 #define atomic64_try_cmpxchg_release(_p, _po, _n) __atomic64_try_cmpxchg(_release, _p, _po, _n)
1035 #else /* atomic64_try_cmpxchg */
1036 #define atomic64_try_cmpxchg_relaxed atomic64_try_cmpxchg
1037 #define atomic64_try_cmpxchg_acquire atomic64_try_cmpxchg
1038 #define atomic64_try_cmpxchg_release atomic64_try_cmpxchg
1039 #endif /* atomic64_try_cmpxchg */
1041 #ifndef atomic64_andnot
1042 static inline void atomic64_andnot(long long i, atomic64_t *v)
1044 atomic64_and(~i, v);
1047 static inline long long atomic64_fetch_andnot(long long i, atomic64_t *v)
1049 return atomic64_fetch_and(~i, v);
1052 static inline long long atomic64_fetch_andnot_relaxed(long long i, atomic64_t *v)
1054 return atomic64_fetch_and_relaxed(~i, v);
1057 static inline long long atomic64_fetch_andnot_acquire(long long i, atomic64_t *v)
1059 return atomic64_fetch_and_acquire(~i, v);
1062 static inline long long atomic64_fetch_andnot_release(long long i, atomic64_t *v)
1064 return atomic64_fetch_and_release(~i, v);
1068 #include <asm-generic/atomic-long.h>
1070 #endif /* _LINUX_ATOMIC_H */