1 // SPDX-License-Identifier: GPL-2.0
3 // Generated by scripts/atomic/gen-atomic-fallback.sh
4 // DO NOT MODIFY THIS FILE DIRECTLY
6 #ifndef _LINUX_ATOMIC_FALLBACK_H
7 #define _LINUX_ATOMIC_FALLBACK_H
9 #include <linux/compiler.h>
11 #ifndef arch_xchg_relaxed
12 #define arch_xchg_acquire arch_xchg
13 #define arch_xchg_release arch_xchg
14 #define arch_xchg_relaxed arch_xchg
15 #else /* arch_xchg_relaxed */
17 #ifndef arch_xchg_acquire
18 #define arch_xchg_acquire(...) \
19 __atomic_op_acquire(arch_xchg, __VA_ARGS__)
22 #ifndef arch_xchg_release
23 #define arch_xchg_release(...) \
24 __atomic_op_release(arch_xchg, __VA_ARGS__)
28 #define arch_xchg(...) \
29 __atomic_op_fence(arch_xchg, __VA_ARGS__)
32 #endif /* arch_xchg_relaxed */
34 #ifndef arch_cmpxchg_relaxed
35 #define arch_cmpxchg_acquire arch_cmpxchg
36 #define arch_cmpxchg_release arch_cmpxchg
37 #define arch_cmpxchg_relaxed arch_cmpxchg
38 #else /* arch_cmpxchg_relaxed */
40 #ifndef arch_cmpxchg_acquire
41 #define arch_cmpxchg_acquire(...) \
42 __atomic_op_acquire(arch_cmpxchg, __VA_ARGS__)
45 #ifndef arch_cmpxchg_release
46 #define arch_cmpxchg_release(...) \
47 __atomic_op_release(arch_cmpxchg, __VA_ARGS__)
51 #define arch_cmpxchg(...) \
52 __atomic_op_fence(arch_cmpxchg, __VA_ARGS__)
55 #endif /* arch_cmpxchg_relaxed */
57 #ifndef arch_cmpxchg64_relaxed
58 #define arch_cmpxchg64_acquire arch_cmpxchg64
59 #define arch_cmpxchg64_release arch_cmpxchg64
60 #define arch_cmpxchg64_relaxed arch_cmpxchg64
61 #else /* arch_cmpxchg64_relaxed */
63 #ifndef arch_cmpxchg64_acquire
64 #define arch_cmpxchg64_acquire(...) \
65 __atomic_op_acquire(arch_cmpxchg64, __VA_ARGS__)
68 #ifndef arch_cmpxchg64_release
69 #define arch_cmpxchg64_release(...) \
70 __atomic_op_release(arch_cmpxchg64, __VA_ARGS__)
73 #ifndef arch_cmpxchg64
74 #define arch_cmpxchg64(...) \
75 __atomic_op_fence(arch_cmpxchg64, __VA_ARGS__)
78 #endif /* arch_cmpxchg64_relaxed */
80 #ifndef arch_try_cmpxchg_relaxed
81 #ifdef arch_try_cmpxchg
82 #define arch_try_cmpxchg_acquire arch_try_cmpxchg
83 #define arch_try_cmpxchg_release arch_try_cmpxchg
84 #define arch_try_cmpxchg_relaxed arch_try_cmpxchg
85 #endif /* arch_try_cmpxchg */
87 #ifndef arch_try_cmpxchg
88 #define arch_try_cmpxchg(_ptr, _oldp, _new) \
90 typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
91 ___r = arch_cmpxchg((_ptr), ___o, (_new)); \
92 if (unlikely(___r != ___o)) \
94 likely(___r == ___o); \
96 #endif /* arch_try_cmpxchg */
98 #ifndef arch_try_cmpxchg_acquire
99 #define arch_try_cmpxchg_acquire(_ptr, _oldp, _new) \
101 typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
102 ___r = arch_cmpxchg_acquire((_ptr), ___o, (_new)); \
103 if (unlikely(___r != ___o)) \
105 likely(___r == ___o); \
107 #endif /* arch_try_cmpxchg_acquire */
109 #ifndef arch_try_cmpxchg_release
110 #define arch_try_cmpxchg_release(_ptr, _oldp, _new) \
112 typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
113 ___r = arch_cmpxchg_release((_ptr), ___o, (_new)); \
114 if (unlikely(___r != ___o)) \
116 likely(___r == ___o); \
118 #endif /* arch_try_cmpxchg_release */
120 #ifndef arch_try_cmpxchg_relaxed
121 #define arch_try_cmpxchg_relaxed(_ptr, _oldp, _new) \
123 typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
124 ___r = arch_cmpxchg_relaxed((_ptr), ___o, (_new)); \
125 if (unlikely(___r != ___o)) \
127 likely(___r == ___o); \
129 #endif /* arch_try_cmpxchg_relaxed */
131 #else /* arch_try_cmpxchg_relaxed */
133 #ifndef arch_try_cmpxchg_acquire
134 #define arch_try_cmpxchg_acquire(...) \
135 __atomic_op_acquire(arch_try_cmpxchg, __VA_ARGS__)
138 #ifndef arch_try_cmpxchg_release
139 #define arch_try_cmpxchg_release(...) \
140 __atomic_op_release(arch_try_cmpxchg, __VA_ARGS__)
143 #ifndef arch_try_cmpxchg
144 #define arch_try_cmpxchg(...) \
145 __atomic_op_fence(arch_try_cmpxchg, __VA_ARGS__)
148 #endif /* arch_try_cmpxchg_relaxed */
150 #ifndef arch_try_cmpxchg64_relaxed
151 #ifdef arch_try_cmpxchg64
152 #define arch_try_cmpxchg64_acquire arch_try_cmpxchg64
153 #define arch_try_cmpxchg64_release arch_try_cmpxchg64
154 #define arch_try_cmpxchg64_relaxed arch_try_cmpxchg64
155 #endif /* arch_try_cmpxchg64 */
157 #ifndef arch_try_cmpxchg64
158 #define arch_try_cmpxchg64(_ptr, _oldp, _new) \
160 typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
161 ___r = arch_cmpxchg64((_ptr), ___o, (_new)); \
162 if (unlikely(___r != ___o)) \
164 likely(___r == ___o); \
166 #endif /* arch_try_cmpxchg64 */
168 #ifndef arch_try_cmpxchg64_acquire
169 #define arch_try_cmpxchg64_acquire(_ptr, _oldp, _new) \
171 typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
172 ___r = arch_cmpxchg64_acquire((_ptr), ___o, (_new)); \
173 if (unlikely(___r != ___o)) \
175 likely(___r == ___o); \
177 #endif /* arch_try_cmpxchg64_acquire */
179 #ifndef arch_try_cmpxchg64_release
180 #define arch_try_cmpxchg64_release(_ptr, _oldp, _new) \
182 typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
183 ___r = arch_cmpxchg64_release((_ptr), ___o, (_new)); \
184 if (unlikely(___r != ___o)) \
186 likely(___r == ___o); \
188 #endif /* arch_try_cmpxchg64_release */
190 #ifndef arch_try_cmpxchg64_relaxed
191 #define arch_try_cmpxchg64_relaxed(_ptr, _oldp, _new) \
193 typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
194 ___r = arch_cmpxchg64_relaxed((_ptr), ___o, (_new)); \
195 if (unlikely(___r != ___o)) \
197 likely(___r == ___o); \
199 #endif /* arch_try_cmpxchg64_relaxed */
201 #else /* arch_try_cmpxchg64_relaxed */
203 #ifndef arch_try_cmpxchg64_acquire
204 #define arch_try_cmpxchg64_acquire(...) \
205 __atomic_op_acquire(arch_try_cmpxchg64, __VA_ARGS__)
208 #ifndef arch_try_cmpxchg64_release
209 #define arch_try_cmpxchg64_release(...) \
210 __atomic_op_release(arch_try_cmpxchg64, __VA_ARGS__)
213 #ifndef arch_try_cmpxchg64
214 #define arch_try_cmpxchg64(...) \
215 __atomic_op_fence(arch_try_cmpxchg64, __VA_ARGS__)
218 #endif /* arch_try_cmpxchg64_relaxed */
220 #ifndef arch_atomic_read_acquire
221 static __always_inline int
222 arch_atomic_read_acquire(const atomic_t *v)
226 if (__native_word(atomic_t)) {
227 ret = smp_load_acquire(&(v)->counter);
229 ret = arch_atomic_read(v);
230 __atomic_acquire_fence();
235 #define arch_atomic_read_acquire arch_atomic_read_acquire
238 #ifndef arch_atomic_set_release
239 static __always_inline void
240 arch_atomic_set_release(atomic_t *v, int i)
242 if (__native_word(atomic_t)) {
243 smp_store_release(&(v)->counter, i);
245 __atomic_release_fence();
246 arch_atomic_set(v, i);
249 #define arch_atomic_set_release arch_atomic_set_release
252 #ifndef arch_atomic_add_return_relaxed
253 #define arch_atomic_add_return_acquire arch_atomic_add_return
254 #define arch_atomic_add_return_release arch_atomic_add_return
255 #define arch_atomic_add_return_relaxed arch_atomic_add_return
256 #else /* arch_atomic_add_return_relaxed */
258 #ifndef arch_atomic_add_return_acquire
259 static __always_inline int
260 arch_atomic_add_return_acquire(int i, atomic_t *v)
262 int ret = arch_atomic_add_return_relaxed(i, v);
263 __atomic_acquire_fence();
266 #define arch_atomic_add_return_acquire arch_atomic_add_return_acquire
269 #ifndef arch_atomic_add_return_release
270 static __always_inline int
271 arch_atomic_add_return_release(int i, atomic_t *v)
273 __atomic_release_fence();
274 return arch_atomic_add_return_relaxed(i, v);
276 #define arch_atomic_add_return_release arch_atomic_add_return_release
279 #ifndef arch_atomic_add_return
280 static __always_inline int
281 arch_atomic_add_return(int i, atomic_t *v)
284 __atomic_pre_full_fence();
285 ret = arch_atomic_add_return_relaxed(i, v);
286 __atomic_post_full_fence();
289 #define arch_atomic_add_return arch_atomic_add_return
292 #endif /* arch_atomic_add_return_relaxed */
294 #ifndef arch_atomic_fetch_add_relaxed
295 #define arch_atomic_fetch_add_acquire arch_atomic_fetch_add
296 #define arch_atomic_fetch_add_release arch_atomic_fetch_add
297 #define arch_atomic_fetch_add_relaxed arch_atomic_fetch_add
298 #else /* arch_atomic_fetch_add_relaxed */
300 #ifndef arch_atomic_fetch_add_acquire
301 static __always_inline int
302 arch_atomic_fetch_add_acquire(int i, atomic_t *v)
304 int ret = arch_atomic_fetch_add_relaxed(i, v);
305 __atomic_acquire_fence();
308 #define arch_atomic_fetch_add_acquire arch_atomic_fetch_add_acquire
311 #ifndef arch_atomic_fetch_add_release
312 static __always_inline int
313 arch_atomic_fetch_add_release(int i, atomic_t *v)
315 __atomic_release_fence();
316 return arch_atomic_fetch_add_relaxed(i, v);
318 #define arch_atomic_fetch_add_release arch_atomic_fetch_add_release
321 #ifndef arch_atomic_fetch_add
322 static __always_inline int
323 arch_atomic_fetch_add(int i, atomic_t *v)
326 __atomic_pre_full_fence();
327 ret = arch_atomic_fetch_add_relaxed(i, v);
328 __atomic_post_full_fence();
331 #define arch_atomic_fetch_add arch_atomic_fetch_add
334 #endif /* arch_atomic_fetch_add_relaxed */
336 #ifndef arch_atomic_sub_return_relaxed
337 #define arch_atomic_sub_return_acquire arch_atomic_sub_return
338 #define arch_atomic_sub_return_release arch_atomic_sub_return
339 #define arch_atomic_sub_return_relaxed arch_atomic_sub_return
340 #else /* arch_atomic_sub_return_relaxed */
342 #ifndef arch_atomic_sub_return_acquire
343 static __always_inline int
344 arch_atomic_sub_return_acquire(int i, atomic_t *v)
346 int ret = arch_atomic_sub_return_relaxed(i, v);
347 __atomic_acquire_fence();
350 #define arch_atomic_sub_return_acquire arch_atomic_sub_return_acquire
353 #ifndef arch_atomic_sub_return_release
354 static __always_inline int
355 arch_atomic_sub_return_release(int i, atomic_t *v)
357 __atomic_release_fence();
358 return arch_atomic_sub_return_relaxed(i, v);
360 #define arch_atomic_sub_return_release arch_atomic_sub_return_release
363 #ifndef arch_atomic_sub_return
364 static __always_inline int
365 arch_atomic_sub_return(int i, atomic_t *v)
368 __atomic_pre_full_fence();
369 ret = arch_atomic_sub_return_relaxed(i, v);
370 __atomic_post_full_fence();
373 #define arch_atomic_sub_return arch_atomic_sub_return
376 #endif /* arch_atomic_sub_return_relaxed */
378 #ifndef arch_atomic_fetch_sub_relaxed
379 #define arch_atomic_fetch_sub_acquire arch_atomic_fetch_sub
380 #define arch_atomic_fetch_sub_release arch_atomic_fetch_sub
381 #define arch_atomic_fetch_sub_relaxed arch_atomic_fetch_sub
382 #else /* arch_atomic_fetch_sub_relaxed */
384 #ifndef arch_atomic_fetch_sub_acquire
385 static __always_inline int
386 arch_atomic_fetch_sub_acquire(int i, atomic_t *v)
388 int ret = arch_atomic_fetch_sub_relaxed(i, v);
389 __atomic_acquire_fence();
392 #define arch_atomic_fetch_sub_acquire arch_atomic_fetch_sub_acquire
395 #ifndef arch_atomic_fetch_sub_release
396 static __always_inline int
397 arch_atomic_fetch_sub_release(int i, atomic_t *v)
399 __atomic_release_fence();
400 return arch_atomic_fetch_sub_relaxed(i, v);
402 #define arch_atomic_fetch_sub_release arch_atomic_fetch_sub_release
405 #ifndef arch_atomic_fetch_sub
406 static __always_inline int
407 arch_atomic_fetch_sub(int i, atomic_t *v)
410 __atomic_pre_full_fence();
411 ret = arch_atomic_fetch_sub_relaxed(i, v);
412 __atomic_post_full_fence();
415 #define arch_atomic_fetch_sub arch_atomic_fetch_sub
418 #endif /* arch_atomic_fetch_sub_relaxed */
420 #ifndef arch_atomic_inc
421 static __always_inline void
422 arch_atomic_inc(atomic_t *v)
424 arch_atomic_add(1, v);
426 #define arch_atomic_inc arch_atomic_inc
429 #ifndef arch_atomic_inc_return_relaxed
430 #ifdef arch_atomic_inc_return
431 #define arch_atomic_inc_return_acquire arch_atomic_inc_return
432 #define arch_atomic_inc_return_release arch_atomic_inc_return
433 #define arch_atomic_inc_return_relaxed arch_atomic_inc_return
434 #endif /* arch_atomic_inc_return */
436 #ifndef arch_atomic_inc_return
437 static __always_inline int
438 arch_atomic_inc_return(atomic_t *v)
440 return arch_atomic_add_return(1, v);
442 #define arch_atomic_inc_return arch_atomic_inc_return
445 #ifndef arch_atomic_inc_return_acquire
446 static __always_inline int
447 arch_atomic_inc_return_acquire(atomic_t *v)
449 return arch_atomic_add_return_acquire(1, v);
451 #define arch_atomic_inc_return_acquire arch_atomic_inc_return_acquire
454 #ifndef arch_atomic_inc_return_release
455 static __always_inline int
456 arch_atomic_inc_return_release(atomic_t *v)
458 return arch_atomic_add_return_release(1, v);
460 #define arch_atomic_inc_return_release arch_atomic_inc_return_release
463 #ifndef arch_atomic_inc_return_relaxed
464 static __always_inline int
465 arch_atomic_inc_return_relaxed(atomic_t *v)
467 return arch_atomic_add_return_relaxed(1, v);
469 #define arch_atomic_inc_return_relaxed arch_atomic_inc_return_relaxed
472 #else /* arch_atomic_inc_return_relaxed */
474 #ifndef arch_atomic_inc_return_acquire
475 static __always_inline int
476 arch_atomic_inc_return_acquire(atomic_t *v)
478 int ret = arch_atomic_inc_return_relaxed(v);
479 __atomic_acquire_fence();
482 #define arch_atomic_inc_return_acquire arch_atomic_inc_return_acquire
485 #ifndef arch_atomic_inc_return_release
486 static __always_inline int
487 arch_atomic_inc_return_release(atomic_t *v)
489 __atomic_release_fence();
490 return arch_atomic_inc_return_relaxed(v);
492 #define arch_atomic_inc_return_release arch_atomic_inc_return_release
495 #ifndef arch_atomic_inc_return
496 static __always_inline int
497 arch_atomic_inc_return(atomic_t *v)
500 __atomic_pre_full_fence();
501 ret = arch_atomic_inc_return_relaxed(v);
502 __atomic_post_full_fence();
505 #define arch_atomic_inc_return arch_atomic_inc_return
508 #endif /* arch_atomic_inc_return_relaxed */
510 #ifndef arch_atomic_fetch_inc_relaxed
511 #ifdef arch_atomic_fetch_inc
512 #define arch_atomic_fetch_inc_acquire arch_atomic_fetch_inc
513 #define arch_atomic_fetch_inc_release arch_atomic_fetch_inc
514 #define arch_atomic_fetch_inc_relaxed arch_atomic_fetch_inc
515 #endif /* arch_atomic_fetch_inc */
517 #ifndef arch_atomic_fetch_inc
518 static __always_inline int
519 arch_atomic_fetch_inc(atomic_t *v)
521 return arch_atomic_fetch_add(1, v);
523 #define arch_atomic_fetch_inc arch_atomic_fetch_inc
526 #ifndef arch_atomic_fetch_inc_acquire
527 static __always_inline int
528 arch_atomic_fetch_inc_acquire(atomic_t *v)
530 return arch_atomic_fetch_add_acquire(1, v);
532 #define arch_atomic_fetch_inc_acquire arch_atomic_fetch_inc_acquire
535 #ifndef arch_atomic_fetch_inc_release
536 static __always_inline int
537 arch_atomic_fetch_inc_release(atomic_t *v)
539 return arch_atomic_fetch_add_release(1, v);
541 #define arch_atomic_fetch_inc_release arch_atomic_fetch_inc_release
544 #ifndef arch_atomic_fetch_inc_relaxed
545 static __always_inline int
546 arch_atomic_fetch_inc_relaxed(atomic_t *v)
548 return arch_atomic_fetch_add_relaxed(1, v);
550 #define arch_atomic_fetch_inc_relaxed arch_atomic_fetch_inc_relaxed
553 #else /* arch_atomic_fetch_inc_relaxed */
555 #ifndef arch_atomic_fetch_inc_acquire
556 static __always_inline int
557 arch_atomic_fetch_inc_acquire(atomic_t *v)
559 int ret = arch_atomic_fetch_inc_relaxed(v);
560 __atomic_acquire_fence();
563 #define arch_atomic_fetch_inc_acquire arch_atomic_fetch_inc_acquire
566 #ifndef arch_atomic_fetch_inc_release
567 static __always_inline int
568 arch_atomic_fetch_inc_release(atomic_t *v)
570 __atomic_release_fence();
571 return arch_atomic_fetch_inc_relaxed(v);
573 #define arch_atomic_fetch_inc_release arch_atomic_fetch_inc_release
576 #ifndef arch_atomic_fetch_inc
577 static __always_inline int
578 arch_atomic_fetch_inc(atomic_t *v)
581 __atomic_pre_full_fence();
582 ret = arch_atomic_fetch_inc_relaxed(v);
583 __atomic_post_full_fence();
586 #define arch_atomic_fetch_inc arch_atomic_fetch_inc
589 #endif /* arch_atomic_fetch_inc_relaxed */
591 #ifndef arch_atomic_dec
592 static __always_inline void
593 arch_atomic_dec(atomic_t *v)
595 arch_atomic_sub(1, v);
597 #define arch_atomic_dec arch_atomic_dec
600 #ifndef arch_atomic_dec_return_relaxed
601 #ifdef arch_atomic_dec_return
602 #define arch_atomic_dec_return_acquire arch_atomic_dec_return
603 #define arch_atomic_dec_return_release arch_atomic_dec_return
604 #define arch_atomic_dec_return_relaxed arch_atomic_dec_return
605 #endif /* arch_atomic_dec_return */
607 #ifndef arch_atomic_dec_return
608 static __always_inline int
609 arch_atomic_dec_return(atomic_t *v)
611 return arch_atomic_sub_return(1, v);
613 #define arch_atomic_dec_return arch_atomic_dec_return
616 #ifndef arch_atomic_dec_return_acquire
617 static __always_inline int
618 arch_atomic_dec_return_acquire(atomic_t *v)
620 return arch_atomic_sub_return_acquire(1, v);
622 #define arch_atomic_dec_return_acquire arch_atomic_dec_return_acquire
625 #ifndef arch_atomic_dec_return_release
626 static __always_inline int
627 arch_atomic_dec_return_release(atomic_t *v)
629 return arch_atomic_sub_return_release(1, v);
631 #define arch_atomic_dec_return_release arch_atomic_dec_return_release
634 #ifndef arch_atomic_dec_return_relaxed
635 static __always_inline int
636 arch_atomic_dec_return_relaxed(atomic_t *v)
638 return arch_atomic_sub_return_relaxed(1, v);
640 #define arch_atomic_dec_return_relaxed arch_atomic_dec_return_relaxed
643 #else /* arch_atomic_dec_return_relaxed */
645 #ifndef arch_atomic_dec_return_acquire
646 static __always_inline int
647 arch_atomic_dec_return_acquire(atomic_t *v)
649 int ret = arch_atomic_dec_return_relaxed(v);
650 __atomic_acquire_fence();
653 #define arch_atomic_dec_return_acquire arch_atomic_dec_return_acquire
656 #ifndef arch_atomic_dec_return_release
657 static __always_inline int
658 arch_atomic_dec_return_release(atomic_t *v)
660 __atomic_release_fence();
661 return arch_atomic_dec_return_relaxed(v);
663 #define arch_atomic_dec_return_release arch_atomic_dec_return_release
666 #ifndef arch_atomic_dec_return
667 static __always_inline int
668 arch_atomic_dec_return(atomic_t *v)
671 __atomic_pre_full_fence();
672 ret = arch_atomic_dec_return_relaxed(v);
673 __atomic_post_full_fence();
676 #define arch_atomic_dec_return arch_atomic_dec_return
679 #endif /* arch_atomic_dec_return_relaxed */
681 #ifndef arch_atomic_fetch_dec_relaxed
682 #ifdef arch_atomic_fetch_dec
683 #define arch_atomic_fetch_dec_acquire arch_atomic_fetch_dec
684 #define arch_atomic_fetch_dec_release arch_atomic_fetch_dec
685 #define arch_atomic_fetch_dec_relaxed arch_atomic_fetch_dec
686 #endif /* arch_atomic_fetch_dec */
688 #ifndef arch_atomic_fetch_dec
689 static __always_inline int
690 arch_atomic_fetch_dec(atomic_t *v)
692 return arch_atomic_fetch_sub(1, v);
694 #define arch_atomic_fetch_dec arch_atomic_fetch_dec
697 #ifndef arch_atomic_fetch_dec_acquire
698 static __always_inline int
699 arch_atomic_fetch_dec_acquire(atomic_t *v)
701 return arch_atomic_fetch_sub_acquire(1, v);
703 #define arch_atomic_fetch_dec_acquire arch_atomic_fetch_dec_acquire
706 #ifndef arch_atomic_fetch_dec_release
707 static __always_inline int
708 arch_atomic_fetch_dec_release(atomic_t *v)
710 return arch_atomic_fetch_sub_release(1, v);
712 #define arch_atomic_fetch_dec_release arch_atomic_fetch_dec_release
715 #ifndef arch_atomic_fetch_dec_relaxed
716 static __always_inline int
717 arch_atomic_fetch_dec_relaxed(atomic_t *v)
719 return arch_atomic_fetch_sub_relaxed(1, v);
721 #define arch_atomic_fetch_dec_relaxed arch_atomic_fetch_dec_relaxed
724 #else /* arch_atomic_fetch_dec_relaxed */
726 #ifndef arch_atomic_fetch_dec_acquire
727 static __always_inline int
728 arch_atomic_fetch_dec_acquire(atomic_t *v)
730 int ret = arch_atomic_fetch_dec_relaxed(v);
731 __atomic_acquire_fence();
734 #define arch_atomic_fetch_dec_acquire arch_atomic_fetch_dec_acquire
737 #ifndef arch_atomic_fetch_dec_release
738 static __always_inline int
739 arch_atomic_fetch_dec_release(atomic_t *v)
741 __atomic_release_fence();
742 return arch_atomic_fetch_dec_relaxed(v);
744 #define arch_atomic_fetch_dec_release arch_atomic_fetch_dec_release
747 #ifndef arch_atomic_fetch_dec
748 static __always_inline int
749 arch_atomic_fetch_dec(atomic_t *v)
752 __atomic_pre_full_fence();
753 ret = arch_atomic_fetch_dec_relaxed(v);
754 __atomic_post_full_fence();
757 #define arch_atomic_fetch_dec arch_atomic_fetch_dec
760 #endif /* arch_atomic_fetch_dec_relaxed */
762 #ifndef arch_atomic_fetch_and_relaxed
763 #define arch_atomic_fetch_and_acquire arch_atomic_fetch_and
764 #define arch_atomic_fetch_and_release arch_atomic_fetch_and
765 #define arch_atomic_fetch_and_relaxed arch_atomic_fetch_and
766 #else /* arch_atomic_fetch_and_relaxed */
768 #ifndef arch_atomic_fetch_and_acquire
769 static __always_inline int
770 arch_atomic_fetch_and_acquire(int i, atomic_t *v)
772 int ret = arch_atomic_fetch_and_relaxed(i, v);
773 __atomic_acquire_fence();
776 #define arch_atomic_fetch_and_acquire arch_atomic_fetch_and_acquire
779 #ifndef arch_atomic_fetch_and_release
780 static __always_inline int
781 arch_atomic_fetch_and_release(int i, atomic_t *v)
783 __atomic_release_fence();
784 return arch_atomic_fetch_and_relaxed(i, v);
786 #define arch_atomic_fetch_and_release arch_atomic_fetch_and_release
789 #ifndef arch_atomic_fetch_and
790 static __always_inline int
791 arch_atomic_fetch_and(int i, atomic_t *v)
794 __atomic_pre_full_fence();
795 ret = arch_atomic_fetch_and_relaxed(i, v);
796 __atomic_post_full_fence();
799 #define arch_atomic_fetch_and arch_atomic_fetch_and
802 #endif /* arch_atomic_fetch_and_relaxed */
804 #ifndef arch_atomic_andnot
805 static __always_inline void
806 arch_atomic_andnot(int i, atomic_t *v)
808 arch_atomic_and(~i, v);
810 #define arch_atomic_andnot arch_atomic_andnot
813 #ifndef arch_atomic_fetch_andnot_relaxed
814 #ifdef arch_atomic_fetch_andnot
815 #define arch_atomic_fetch_andnot_acquire arch_atomic_fetch_andnot
816 #define arch_atomic_fetch_andnot_release arch_atomic_fetch_andnot
817 #define arch_atomic_fetch_andnot_relaxed arch_atomic_fetch_andnot
818 #endif /* arch_atomic_fetch_andnot */
820 #ifndef arch_atomic_fetch_andnot
821 static __always_inline int
822 arch_atomic_fetch_andnot(int i, atomic_t *v)
824 return arch_atomic_fetch_and(~i, v);
826 #define arch_atomic_fetch_andnot arch_atomic_fetch_andnot
829 #ifndef arch_atomic_fetch_andnot_acquire
830 static __always_inline int
831 arch_atomic_fetch_andnot_acquire(int i, atomic_t *v)
833 return arch_atomic_fetch_and_acquire(~i, v);
835 #define arch_atomic_fetch_andnot_acquire arch_atomic_fetch_andnot_acquire
838 #ifndef arch_atomic_fetch_andnot_release
839 static __always_inline int
840 arch_atomic_fetch_andnot_release(int i, atomic_t *v)
842 return arch_atomic_fetch_and_release(~i, v);
844 #define arch_atomic_fetch_andnot_release arch_atomic_fetch_andnot_release
847 #ifndef arch_atomic_fetch_andnot_relaxed
848 static __always_inline int
849 arch_atomic_fetch_andnot_relaxed(int i, atomic_t *v)
851 return arch_atomic_fetch_and_relaxed(~i, v);
853 #define arch_atomic_fetch_andnot_relaxed arch_atomic_fetch_andnot_relaxed
856 #else /* arch_atomic_fetch_andnot_relaxed */
858 #ifndef arch_atomic_fetch_andnot_acquire
859 static __always_inline int
860 arch_atomic_fetch_andnot_acquire(int i, atomic_t *v)
862 int ret = arch_atomic_fetch_andnot_relaxed(i, v);
863 __atomic_acquire_fence();
866 #define arch_atomic_fetch_andnot_acquire arch_atomic_fetch_andnot_acquire
869 #ifndef arch_atomic_fetch_andnot_release
870 static __always_inline int
871 arch_atomic_fetch_andnot_release(int i, atomic_t *v)
873 __atomic_release_fence();
874 return arch_atomic_fetch_andnot_relaxed(i, v);
876 #define arch_atomic_fetch_andnot_release arch_atomic_fetch_andnot_release
879 #ifndef arch_atomic_fetch_andnot
880 static __always_inline int
881 arch_atomic_fetch_andnot(int i, atomic_t *v)
884 __atomic_pre_full_fence();
885 ret = arch_atomic_fetch_andnot_relaxed(i, v);
886 __atomic_post_full_fence();
889 #define arch_atomic_fetch_andnot arch_atomic_fetch_andnot
892 #endif /* arch_atomic_fetch_andnot_relaxed */
894 #ifndef arch_atomic_fetch_or_relaxed
895 #define arch_atomic_fetch_or_acquire arch_atomic_fetch_or
896 #define arch_atomic_fetch_or_release arch_atomic_fetch_or
897 #define arch_atomic_fetch_or_relaxed arch_atomic_fetch_or
898 #else /* arch_atomic_fetch_or_relaxed */
900 #ifndef arch_atomic_fetch_or_acquire
901 static __always_inline int
902 arch_atomic_fetch_or_acquire(int i, atomic_t *v)
904 int ret = arch_atomic_fetch_or_relaxed(i, v);
905 __atomic_acquire_fence();
908 #define arch_atomic_fetch_or_acquire arch_atomic_fetch_or_acquire
911 #ifndef arch_atomic_fetch_or_release
912 static __always_inline int
913 arch_atomic_fetch_or_release(int i, atomic_t *v)
915 __atomic_release_fence();
916 return arch_atomic_fetch_or_relaxed(i, v);
918 #define arch_atomic_fetch_or_release arch_atomic_fetch_or_release
921 #ifndef arch_atomic_fetch_or
922 static __always_inline int
923 arch_atomic_fetch_or(int i, atomic_t *v)
926 __atomic_pre_full_fence();
927 ret = arch_atomic_fetch_or_relaxed(i, v);
928 __atomic_post_full_fence();
931 #define arch_atomic_fetch_or arch_atomic_fetch_or
934 #endif /* arch_atomic_fetch_or_relaxed */
936 #ifndef arch_atomic_fetch_xor_relaxed
937 #define arch_atomic_fetch_xor_acquire arch_atomic_fetch_xor
938 #define arch_atomic_fetch_xor_release arch_atomic_fetch_xor
939 #define arch_atomic_fetch_xor_relaxed arch_atomic_fetch_xor
940 #else /* arch_atomic_fetch_xor_relaxed */
942 #ifndef arch_atomic_fetch_xor_acquire
943 static __always_inline int
944 arch_atomic_fetch_xor_acquire(int i, atomic_t *v)
946 int ret = arch_atomic_fetch_xor_relaxed(i, v);
947 __atomic_acquire_fence();
950 #define arch_atomic_fetch_xor_acquire arch_atomic_fetch_xor_acquire
953 #ifndef arch_atomic_fetch_xor_release
954 static __always_inline int
955 arch_atomic_fetch_xor_release(int i, atomic_t *v)
957 __atomic_release_fence();
958 return arch_atomic_fetch_xor_relaxed(i, v);
960 #define arch_atomic_fetch_xor_release arch_atomic_fetch_xor_release
963 #ifndef arch_atomic_fetch_xor
964 static __always_inline int
965 arch_atomic_fetch_xor(int i, atomic_t *v)
968 __atomic_pre_full_fence();
969 ret = arch_atomic_fetch_xor_relaxed(i, v);
970 __atomic_post_full_fence();
973 #define arch_atomic_fetch_xor arch_atomic_fetch_xor
976 #endif /* arch_atomic_fetch_xor_relaxed */
978 #ifndef arch_atomic_xchg_relaxed
979 #define arch_atomic_xchg_acquire arch_atomic_xchg
980 #define arch_atomic_xchg_release arch_atomic_xchg
981 #define arch_atomic_xchg_relaxed arch_atomic_xchg
982 #else /* arch_atomic_xchg_relaxed */
984 #ifndef arch_atomic_xchg_acquire
985 static __always_inline int
986 arch_atomic_xchg_acquire(atomic_t *v, int i)
988 int ret = arch_atomic_xchg_relaxed(v, i);
989 __atomic_acquire_fence();
992 #define arch_atomic_xchg_acquire arch_atomic_xchg_acquire
995 #ifndef arch_atomic_xchg_release
996 static __always_inline int
997 arch_atomic_xchg_release(atomic_t *v, int i)
999 __atomic_release_fence();
1000 return arch_atomic_xchg_relaxed(v, i);
1002 #define arch_atomic_xchg_release arch_atomic_xchg_release
1005 #ifndef arch_atomic_xchg
1006 static __always_inline int
1007 arch_atomic_xchg(atomic_t *v, int i)
1010 __atomic_pre_full_fence();
1011 ret = arch_atomic_xchg_relaxed(v, i);
1012 __atomic_post_full_fence();
1015 #define arch_atomic_xchg arch_atomic_xchg
1018 #endif /* arch_atomic_xchg_relaxed */
1020 #ifndef arch_atomic_cmpxchg_relaxed
1021 #define arch_atomic_cmpxchg_acquire arch_atomic_cmpxchg
1022 #define arch_atomic_cmpxchg_release arch_atomic_cmpxchg
1023 #define arch_atomic_cmpxchg_relaxed arch_atomic_cmpxchg
1024 #else /* arch_atomic_cmpxchg_relaxed */
1026 #ifndef arch_atomic_cmpxchg_acquire
1027 static __always_inline int
1028 arch_atomic_cmpxchg_acquire(atomic_t *v, int old, int new)
1030 int ret = arch_atomic_cmpxchg_relaxed(v, old, new);
1031 __atomic_acquire_fence();
1034 #define arch_atomic_cmpxchg_acquire arch_atomic_cmpxchg_acquire
1037 #ifndef arch_atomic_cmpxchg_release
1038 static __always_inline int
1039 arch_atomic_cmpxchg_release(atomic_t *v, int old, int new)
1041 __atomic_release_fence();
1042 return arch_atomic_cmpxchg_relaxed(v, old, new);
1044 #define arch_atomic_cmpxchg_release arch_atomic_cmpxchg_release
1047 #ifndef arch_atomic_cmpxchg
1048 static __always_inline int
1049 arch_atomic_cmpxchg(atomic_t *v, int old, int new)
1052 __atomic_pre_full_fence();
1053 ret = arch_atomic_cmpxchg_relaxed(v, old, new);
1054 __atomic_post_full_fence();
1057 #define arch_atomic_cmpxchg arch_atomic_cmpxchg
1060 #endif /* arch_atomic_cmpxchg_relaxed */
1062 #ifndef arch_atomic_try_cmpxchg_relaxed
1063 #ifdef arch_atomic_try_cmpxchg
1064 #define arch_atomic_try_cmpxchg_acquire arch_atomic_try_cmpxchg
1065 #define arch_atomic_try_cmpxchg_release arch_atomic_try_cmpxchg
1066 #define arch_atomic_try_cmpxchg_relaxed arch_atomic_try_cmpxchg
1067 #endif /* arch_atomic_try_cmpxchg */
1069 #ifndef arch_atomic_try_cmpxchg
1070 static __always_inline bool
1071 arch_atomic_try_cmpxchg(atomic_t *v, int *old, int new)
1074 r = arch_atomic_cmpxchg(v, o, new);
1075 if (unlikely(r != o))
1077 return likely(r == o);
1079 #define arch_atomic_try_cmpxchg arch_atomic_try_cmpxchg
1082 #ifndef arch_atomic_try_cmpxchg_acquire
1083 static __always_inline bool
1084 arch_atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new)
1087 r = arch_atomic_cmpxchg_acquire(v, o, new);
1088 if (unlikely(r != o))
1090 return likely(r == o);
1092 #define arch_atomic_try_cmpxchg_acquire arch_atomic_try_cmpxchg_acquire
1095 #ifndef arch_atomic_try_cmpxchg_release
1096 static __always_inline bool
1097 arch_atomic_try_cmpxchg_release(atomic_t *v, int *old, int new)
1100 r = arch_atomic_cmpxchg_release(v, o, new);
1101 if (unlikely(r != o))
1103 return likely(r == o);
1105 #define arch_atomic_try_cmpxchg_release arch_atomic_try_cmpxchg_release
1108 #ifndef arch_atomic_try_cmpxchg_relaxed
1109 static __always_inline bool
1110 arch_atomic_try_cmpxchg_relaxed(atomic_t *v, int *old, int new)
1113 r = arch_atomic_cmpxchg_relaxed(v, o, new);
1114 if (unlikely(r != o))
1116 return likely(r == o);
1118 #define arch_atomic_try_cmpxchg_relaxed arch_atomic_try_cmpxchg_relaxed
1121 #else /* arch_atomic_try_cmpxchg_relaxed */
1123 #ifndef arch_atomic_try_cmpxchg_acquire
1124 static __always_inline bool
1125 arch_atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new)
1127 bool ret = arch_atomic_try_cmpxchg_relaxed(v, old, new);
1128 __atomic_acquire_fence();
1131 #define arch_atomic_try_cmpxchg_acquire arch_atomic_try_cmpxchg_acquire
1134 #ifndef arch_atomic_try_cmpxchg_release
1135 static __always_inline bool
1136 arch_atomic_try_cmpxchg_release(atomic_t *v, int *old, int new)
1138 __atomic_release_fence();
1139 return arch_atomic_try_cmpxchg_relaxed(v, old, new);
1141 #define arch_atomic_try_cmpxchg_release arch_atomic_try_cmpxchg_release
1144 #ifndef arch_atomic_try_cmpxchg
1145 static __always_inline bool
1146 arch_atomic_try_cmpxchg(atomic_t *v, int *old, int new)
1149 __atomic_pre_full_fence();
1150 ret = arch_atomic_try_cmpxchg_relaxed(v, old, new);
1151 __atomic_post_full_fence();
1154 #define arch_atomic_try_cmpxchg arch_atomic_try_cmpxchg
1157 #endif /* arch_atomic_try_cmpxchg_relaxed */
1159 #ifndef arch_atomic_sub_and_test
1161 * arch_atomic_sub_and_test - subtract value from variable and test result
1162 * @i: integer value to subtract
1163 * @v: pointer of type atomic_t
1165 * Atomically subtracts @i from @v and returns
1166 * true if the result is zero, or false for all
1169 static __always_inline bool
1170 arch_atomic_sub_and_test(int i, atomic_t *v)
1172 return arch_atomic_sub_return(i, v) == 0;
1174 #define arch_atomic_sub_and_test arch_atomic_sub_and_test
1177 #ifndef arch_atomic_dec_and_test
1179 * arch_atomic_dec_and_test - decrement and test
1180 * @v: pointer of type atomic_t
1182 * Atomically decrements @v by 1 and
1183 * returns true if the result is 0, or false for all other
1186 static __always_inline bool
1187 arch_atomic_dec_and_test(atomic_t *v)
1189 return arch_atomic_dec_return(v) == 0;
1191 #define arch_atomic_dec_and_test arch_atomic_dec_and_test
1194 #ifndef arch_atomic_inc_and_test
1196 * arch_atomic_inc_and_test - increment and test
1197 * @v: pointer of type atomic_t
1199 * Atomically increments @v by 1
1200 * and returns true if the result is zero, or false for all
1203 static __always_inline bool
1204 arch_atomic_inc_and_test(atomic_t *v)
1206 return arch_atomic_inc_return(v) == 0;
1208 #define arch_atomic_inc_and_test arch_atomic_inc_and_test
1211 #ifndef arch_atomic_add_negative
1213 * arch_atomic_add_negative - add and test if negative
1214 * @i: integer value to add
1215 * @v: pointer of type atomic_t
1217 * Atomically adds @i to @v and returns true
1218 * if the result is negative, or false when
1219 * result is greater than or equal to zero.
1221 static __always_inline bool
1222 arch_atomic_add_negative(int i, atomic_t *v)
1224 return arch_atomic_add_return(i, v) < 0;
1226 #define arch_atomic_add_negative arch_atomic_add_negative
1229 #ifndef arch_atomic_fetch_add_unless
1231 * arch_atomic_fetch_add_unless - add unless the number is already a given value
1232 * @v: pointer of type atomic_t
1233 * @a: the amount to add to v...
1234 * @u: ...unless v is equal to u.
1236 * Atomically adds @a to @v, so long as @v was not already @u.
1237 * Returns original value of @v
1239 static __always_inline int
1240 arch_atomic_fetch_add_unless(atomic_t *v, int a, int u)
1242 int c = arch_atomic_read(v);
1245 if (unlikely(c == u))
1247 } while (!arch_atomic_try_cmpxchg(v, &c, c + a));
1251 #define arch_atomic_fetch_add_unless arch_atomic_fetch_add_unless
1254 #ifndef arch_atomic_add_unless
1256 * arch_atomic_add_unless - add unless the number is already a given value
1257 * @v: pointer of type atomic_t
1258 * @a: the amount to add to v...
1259 * @u: ...unless v is equal to u.
1261 * Atomically adds @a to @v, if @v was not already @u.
1262 * Returns true if the addition was done.
1264 static __always_inline bool
1265 arch_atomic_add_unless(atomic_t *v, int a, int u)
1267 return arch_atomic_fetch_add_unless(v, a, u) != u;
1269 #define arch_atomic_add_unless arch_atomic_add_unless
1272 #ifndef arch_atomic_inc_not_zero
1274 * arch_atomic_inc_not_zero - increment unless the number is zero
1275 * @v: pointer of type atomic_t
1277 * Atomically increments @v by 1, if @v is non-zero.
1278 * Returns true if the increment was done.
1280 static __always_inline bool
1281 arch_atomic_inc_not_zero(atomic_t *v)
1283 return arch_atomic_add_unless(v, 1, 0);
1285 #define arch_atomic_inc_not_zero arch_atomic_inc_not_zero
1288 #ifndef arch_atomic_inc_unless_negative
1289 static __always_inline bool
1290 arch_atomic_inc_unless_negative(atomic_t *v)
1292 int c = arch_atomic_read(v);
1295 if (unlikely(c < 0))
1297 } while (!arch_atomic_try_cmpxchg(v, &c, c + 1));
1301 #define arch_atomic_inc_unless_negative arch_atomic_inc_unless_negative
1304 #ifndef arch_atomic_dec_unless_positive
1305 static __always_inline bool
1306 arch_atomic_dec_unless_positive(atomic_t *v)
1308 int c = arch_atomic_read(v);
1311 if (unlikely(c > 0))
1313 } while (!arch_atomic_try_cmpxchg(v, &c, c - 1));
1317 #define arch_atomic_dec_unless_positive arch_atomic_dec_unless_positive
1320 #ifndef arch_atomic_dec_if_positive
1321 static __always_inline int
1322 arch_atomic_dec_if_positive(atomic_t *v)
1324 int dec, c = arch_atomic_read(v);
1328 if (unlikely(dec < 0))
1330 } while (!arch_atomic_try_cmpxchg(v, &c, dec));
1334 #define arch_atomic_dec_if_positive arch_atomic_dec_if_positive
1337 #ifdef CONFIG_GENERIC_ATOMIC64
1338 #include <asm-generic/atomic64.h>
1341 #ifndef arch_atomic64_read_acquire
1342 static __always_inline s64
1343 arch_atomic64_read_acquire(const atomic64_t *v)
1347 if (__native_word(atomic64_t)) {
1348 ret = smp_load_acquire(&(v)->counter);
1350 ret = arch_atomic64_read(v);
1351 __atomic_acquire_fence();
1356 #define arch_atomic64_read_acquire arch_atomic64_read_acquire
1359 #ifndef arch_atomic64_set_release
1360 static __always_inline void
1361 arch_atomic64_set_release(atomic64_t *v, s64 i)
1363 if (__native_word(atomic64_t)) {
1364 smp_store_release(&(v)->counter, i);
1366 __atomic_release_fence();
1367 arch_atomic64_set(v, i);
1370 #define arch_atomic64_set_release arch_atomic64_set_release
1373 #ifndef arch_atomic64_add_return_relaxed
1374 #define arch_atomic64_add_return_acquire arch_atomic64_add_return
1375 #define arch_atomic64_add_return_release arch_atomic64_add_return
1376 #define arch_atomic64_add_return_relaxed arch_atomic64_add_return
1377 #else /* arch_atomic64_add_return_relaxed */
1379 #ifndef arch_atomic64_add_return_acquire
1380 static __always_inline s64
1381 arch_atomic64_add_return_acquire(s64 i, atomic64_t *v)
1383 s64 ret = arch_atomic64_add_return_relaxed(i, v);
1384 __atomic_acquire_fence();
1387 #define arch_atomic64_add_return_acquire arch_atomic64_add_return_acquire
1390 #ifndef arch_atomic64_add_return_release
1391 static __always_inline s64
1392 arch_atomic64_add_return_release(s64 i, atomic64_t *v)
1394 __atomic_release_fence();
1395 return arch_atomic64_add_return_relaxed(i, v);
1397 #define arch_atomic64_add_return_release arch_atomic64_add_return_release
1400 #ifndef arch_atomic64_add_return
1401 static __always_inline s64
1402 arch_atomic64_add_return(s64 i, atomic64_t *v)
1405 __atomic_pre_full_fence();
1406 ret = arch_atomic64_add_return_relaxed(i, v);
1407 __atomic_post_full_fence();
1410 #define arch_atomic64_add_return arch_atomic64_add_return
1413 #endif /* arch_atomic64_add_return_relaxed */
1415 #ifndef arch_atomic64_fetch_add_relaxed
1416 #define arch_atomic64_fetch_add_acquire arch_atomic64_fetch_add
1417 #define arch_atomic64_fetch_add_release arch_atomic64_fetch_add
1418 #define arch_atomic64_fetch_add_relaxed arch_atomic64_fetch_add
1419 #else /* arch_atomic64_fetch_add_relaxed */
1421 #ifndef arch_atomic64_fetch_add_acquire
1422 static __always_inline s64
1423 arch_atomic64_fetch_add_acquire(s64 i, atomic64_t *v)
1425 s64 ret = arch_atomic64_fetch_add_relaxed(i, v);
1426 __atomic_acquire_fence();
1429 #define arch_atomic64_fetch_add_acquire arch_atomic64_fetch_add_acquire
1432 #ifndef arch_atomic64_fetch_add_release
1433 static __always_inline s64
1434 arch_atomic64_fetch_add_release(s64 i, atomic64_t *v)
1436 __atomic_release_fence();
1437 return arch_atomic64_fetch_add_relaxed(i, v);
1439 #define arch_atomic64_fetch_add_release arch_atomic64_fetch_add_release
1442 #ifndef arch_atomic64_fetch_add
1443 static __always_inline s64
1444 arch_atomic64_fetch_add(s64 i, atomic64_t *v)
1447 __atomic_pre_full_fence();
1448 ret = arch_atomic64_fetch_add_relaxed(i, v);
1449 __atomic_post_full_fence();
1452 #define arch_atomic64_fetch_add arch_atomic64_fetch_add
1455 #endif /* arch_atomic64_fetch_add_relaxed */
1457 #ifndef arch_atomic64_sub_return_relaxed
1458 #define arch_atomic64_sub_return_acquire arch_atomic64_sub_return
1459 #define arch_atomic64_sub_return_release arch_atomic64_sub_return
1460 #define arch_atomic64_sub_return_relaxed arch_atomic64_sub_return
1461 #else /* arch_atomic64_sub_return_relaxed */
1463 #ifndef arch_atomic64_sub_return_acquire
1464 static __always_inline s64
1465 arch_atomic64_sub_return_acquire(s64 i, atomic64_t *v)
1467 s64 ret = arch_atomic64_sub_return_relaxed(i, v);
1468 __atomic_acquire_fence();
1471 #define arch_atomic64_sub_return_acquire arch_atomic64_sub_return_acquire
1474 #ifndef arch_atomic64_sub_return_release
1475 static __always_inline s64
1476 arch_atomic64_sub_return_release(s64 i, atomic64_t *v)
1478 __atomic_release_fence();
1479 return arch_atomic64_sub_return_relaxed(i, v);
1481 #define arch_atomic64_sub_return_release arch_atomic64_sub_return_release
1484 #ifndef arch_atomic64_sub_return
1485 static __always_inline s64
1486 arch_atomic64_sub_return(s64 i, atomic64_t *v)
1489 __atomic_pre_full_fence();
1490 ret = arch_atomic64_sub_return_relaxed(i, v);
1491 __atomic_post_full_fence();
1494 #define arch_atomic64_sub_return arch_atomic64_sub_return
1497 #endif /* arch_atomic64_sub_return_relaxed */
1499 #ifndef arch_atomic64_fetch_sub_relaxed
1500 #define arch_atomic64_fetch_sub_acquire arch_atomic64_fetch_sub
1501 #define arch_atomic64_fetch_sub_release arch_atomic64_fetch_sub
1502 #define arch_atomic64_fetch_sub_relaxed arch_atomic64_fetch_sub
1503 #else /* arch_atomic64_fetch_sub_relaxed */
1505 #ifndef arch_atomic64_fetch_sub_acquire
1506 static __always_inline s64
1507 arch_atomic64_fetch_sub_acquire(s64 i, atomic64_t *v)
1509 s64 ret = arch_atomic64_fetch_sub_relaxed(i, v);
1510 __atomic_acquire_fence();
1513 #define arch_atomic64_fetch_sub_acquire arch_atomic64_fetch_sub_acquire
1516 #ifndef arch_atomic64_fetch_sub_release
1517 static __always_inline s64
1518 arch_atomic64_fetch_sub_release(s64 i, atomic64_t *v)
1520 __atomic_release_fence();
1521 return arch_atomic64_fetch_sub_relaxed(i, v);
1523 #define arch_atomic64_fetch_sub_release arch_atomic64_fetch_sub_release
1526 #ifndef arch_atomic64_fetch_sub
1527 static __always_inline s64
1528 arch_atomic64_fetch_sub(s64 i, atomic64_t *v)
1531 __atomic_pre_full_fence();
1532 ret = arch_atomic64_fetch_sub_relaxed(i, v);
1533 __atomic_post_full_fence();
1536 #define arch_atomic64_fetch_sub arch_atomic64_fetch_sub
1539 #endif /* arch_atomic64_fetch_sub_relaxed */
1541 #ifndef arch_atomic64_inc
1542 static __always_inline void
1543 arch_atomic64_inc(atomic64_t *v)
1545 arch_atomic64_add(1, v);
1547 #define arch_atomic64_inc arch_atomic64_inc
1550 #ifndef arch_atomic64_inc_return_relaxed
1551 #ifdef arch_atomic64_inc_return
1552 #define arch_atomic64_inc_return_acquire arch_atomic64_inc_return
1553 #define arch_atomic64_inc_return_release arch_atomic64_inc_return
1554 #define arch_atomic64_inc_return_relaxed arch_atomic64_inc_return
1555 #endif /* arch_atomic64_inc_return */
1557 #ifndef arch_atomic64_inc_return
1558 static __always_inline s64
1559 arch_atomic64_inc_return(atomic64_t *v)
1561 return arch_atomic64_add_return(1, v);
1563 #define arch_atomic64_inc_return arch_atomic64_inc_return
1566 #ifndef arch_atomic64_inc_return_acquire
1567 static __always_inline s64
1568 arch_atomic64_inc_return_acquire(atomic64_t *v)
1570 return arch_atomic64_add_return_acquire(1, v);
1572 #define arch_atomic64_inc_return_acquire arch_atomic64_inc_return_acquire
1575 #ifndef arch_atomic64_inc_return_release
1576 static __always_inline s64
1577 arch_atomic64_inc_return_release(atomic64_t *v)
1579 return arch_atomic64_add_return_release(1, v);
1581 #define arch_atomic64_inc_return_release arch_atomic64_inc_return_release
1584 #ifndef arch_atomic64_inc_return_relaxed
1585 static __always_inline s64
1586 arch_atomic64_inc_return_relaxed(atomic64_t *v)
1588 return arch_atomic64_add_return_relaxed(1, v);
1590 #define arch_atomic64_inc_return_relaxed arch_atomic64_inc_return_relaxed
1593 #else /* arch_atomic64_inc_return_relaxed */
1595 #ifndef arch_atomic64_inc_return_acquire
1596 static __always_inline s64
1597 arch_atomic64_inc_return_acquire(atomic64_t *v)
1599 s64 ret = arch_atomic64_inc_return_relaxed(v);
1600 __atomic_acquire_fence();
1603 #define arch_atomic64_inc_return_acquire arch_atomic64_inc_return_acquire
1606 #ifndef arch_atomic64_inc_return_release
1607 static __always_inline s64
1608 arch_atomic64_inc_return_release(atomic64_t *v)
1610 __atomic_release_fence();
1611 return arch_atomic64_inc_return_relaxed(v);
1613 #define arch_atomic64_inc_return_release arch_atomic64_inc_return_release
1616 #ifndef arch_atomic64_inc_return
1617 static __always_inline s64
1618 arch_atomic64_inc_return(atomic64_t *v)
1621 __atomic_pre_full_fence();
1622 ret = arch_atomic64_inc_return_relaxed(v);
1623 __atomic_post_full_fence();
1626 #define arch_atomic64_inc_return arch_atomic64_inc_return
1629 #endif /* arch_atomic64_inc_return_relaxed */
1631 #ifndef arch_atomic64_fetch_inc_relaxed
1632 #ifdef arch_atomic64_fetch_inc
1633 #define arch_atomic64_fetch_inc_acquire arch_atomic64_fetch_inc
1634 #define arch_atomic64_fetch_inc_release arch_atomic64_fetch_inc
1635 #define arch_atomic64_fetch_inc_relaxed arch_atomic64_fetch_inc
1636 #endif /* arch_atomic64_fetch_inc */
1638 #ifndef arch_atomic64_fetch_inc
1639 static __always_inline s64
1640 arch_atomic64_fetch_inc(atomic64_t *v)
1642 return arch_atomic64_fetch_add(1, v);
1644 #define arch_atomic64_fetch_inc arch_atomic64_fetch_inc
1647 #ifndef arch_atomic64_fetch_inc_acquire
1648 static __always_inline s64
1649 arch_atomic64_fetch_inc_acquire(atomic64_t *v)
1651 return arch_atomic64_fetch_add_acquire(1, v);
1653 #define arch_atomic64_fetch_inc_acquire arch_atomic64_fetch_inc_acquire
1656 #ifndef arch_atomic64_fetch_inc_release
1657 static __always_inline s64
1658 arch_atomic64_fetch_inc_release(atomic64_t *v)
1660 return arch_atomic64_fetch_add_release(1, v);
1662 #define arch_atomic64_fetch_inc_release arch_atomic64_fetch_inc_release
1665 #ifndef arch_atomic64_fetch_inc_relaxed
1666 static __always_inline s64
1667 arch_atomic64_fetch_inc_relaxed(atomic64_t *v)
1669 return arch_atomic64_fetch_add_relaxed(1, v);
1671 #define arch_atomic64_fetch_inc_relaxed arch_atomic64_fetch_inc_relaxed
1674 #else /* arch_atomic64_fetch_inc_relaxed */
1676 #ifndef arch_atomic64_fetch_inc_acquire
1677 static __always_inline s64
1678 arch_atomic64_fetch_inc_acquire(atomic64_t *v)
1680 s64 ret = arch_atomic64_fetch_inc_relaxed(v);
1681 __atomic_acquire_fence();
1684 #define arch_atomic64_fetch_inc_acquire arch_atomic64_fetch_inc_acquire
1687 #ifndef arch_atomic64_fetch_inc_release
1688 static __always_inline s64
1689 arch_atomic64_fetch_inc_release(atomic64_t *v)
1691 __atomic_release_fence();
1692 return arch_atomic64_fetch_inc_relaxed(v);
1694 #define arch_atomic64_fetch_inc_release arch_atomic64_fetch_inc_release
1697 #ifndef arch_atomic64_fetch_inc
1698 static __always_inline s64
1699 arch_atomic64_fetch_inc(atomic64_t *v)
1702 __atomic_pre_full_fence();
1703 ret = arch_atomic64_fetch_inc_relaxed(v);
1704 __atomic_post_full_fence();
1707 #define arch_atomic64_fetch_inc arch_atomic64_fetch_inc
1710 #endif /* arch_atomic64_fetch_inc_relaxed */
1712 #ifndef arch_atomic64_dec
1713 static __always_inline void
1714 arch_atomic64_dec(atomic64_t *v)
1716 arch_atomic64_sub(1, v);
1718 #define arch_atomic64_dec arch_atomic64_dec
1721 #ifndef arch_atomic64_dec_return_relaxed
1722 #ifdef arch_atomic64_dec_return
1723 #define arch_atomic64_dec_return_acquire arch_atomic64_dec_return
1724 #define arch_atomic64_dec_return_release arch_atomic64_dec_return
1725 #define arch_atomic64_dec_return_relaxed arch_atomic64_dec_return
1726 #endif /* arch_atomic64_dec_return */
1728 #ifndef arch_atomic64_dec_return
1729 static __always_inline s64
1730 arch_atomic64_dec_return(atomic64_t *v)
1732 return arch_atomic64_sub_return(1, v);
1734 #define arch_atomic64_dec_return arch_atomic64_dec_return
1737 #ifndef arch_atomic64_dec_return_acquire
1738 static __always_inline s64
1739 arch_atomic64_dec_return_acquire(atomic64_t *v)
1741 return arch_atomic64_sub_return_acquire(1, v);
1743 #define arch_atomic64_dec_return_acquire arch_atomic64_dec_return_acquire
1746 #ifndef arch_atomic64_dec_return_release
1747 static __always_inline s64
1748 arch_atomic64_dec_return_release(atomic64_t *v)
1750 return arch_atomic64_sub_return_release(1, v);
1752 #define arch_atomic64_dec_return_release arch_atomic64_dec_return_release
1755 #ifndef arch_atomic64_dec_return_relaxed
1756 static __always_inline s64
1757 arch_atomic64_dec_return_relaxed(atomic64_t *v)
1759 return arch_atomic64_sub_return_relaxed(1, v);
1761 #define arch_atomic64_dec_return_relaxed arch_atomic64_dec_return_relaxed
1764 #else /* arch_atomic64_dec_return_relaxed */
1766 #ifndef arch_atomic64_dec_return_acquire
1767 static __always_inline s64
1768 arch_atomic64_dec_return_acquire(atomic64_t *v)
1770 s64 ret = arch_atomic64_dec_return_relaxed(v);
1771 __atomic_acquire_fence();
1774 #define arch_atomic64_dec_return_acquire arch_atomic64_dec_return_acquire
1777 #ifndef arch_atomic64_dec_return_release
1778 static __always_inline s64
1779 arch_atomic64_dec_return_release(atomic64_t *v)
1781 __atomic_release_fence();
1782 return arch_atomic64_dec_return_relaxed(v);
1784 #define arch_atomic64_dec_return_release arch_atomic64_dec_return_release
1787 #ifndef arch_atomic64_dec_return
1788 static __always_inline s64
1789 arch_atomic64_dec_return(atomic64_t *v)
1792 __atomic_pre_full_fence();
1793 ret = arch_atomic64_dec_return_relaxed(v);
1794 __atomic_post_full_fence();
1797 #define arch_atomic64_dec_return arch_atomic64_dec_return
1800 #endif /* arch_atomic64_dec_return_relaxed */
1802 #ifndef arch_atomic64_fetch_dec_relaxed
1803 #ifdef arch_atomic64_fetch_dec
1804 #define arch_atomic64_fetch_dec_acquire arch_atomic64_fetch_dec
1805 #define arch_atomic64_fetch_dec_release arch_atomic64_fetch_dec
1806 #define arch_atomic64_fetch_dec_relaxed arch_atomic64_fetch_dec
1807 #endif /* arch_atomic64_fetch_dec */
1809 #ifndef arch_atomic64_fetch_dec
1810 static __always_inline s64
1811 arch_atomic64_fetch_dec(atomic64_t *v)
1813 return arch_atomic64_fetch_sub(1, v);
1815 #define arch_atomic64_fetch_dec arch_atomic64_fetch_dec
1818 #ifndef arch_atomic64_fetch_dec_acquire
1819 static __always_inline s64
1820 arch_atomic64_fetch_dec_acquire(atomic64_t *v)
1822 return arch_atomic64_fetch_sub_acquire(1, v);
1824 #define arch_atomic64_fetch_dec_acquire arch_atomic64_fetch_dec_acquire
1827 #ifndef arch_atomic64_fetch_dec_release
1828 static __always_inline s64
1829 arch_atomic64_fetch_dec_release(atomic64_t *v)
1831 return arch_atomic64_fetch_sub_release(1, v);
1833 #define arch_atomic64_fetch_dec_release arch_atomic64_fetch_dec_release
1836 #ifndef arch_atomic64_fetch_dec_relaxed
1837 static __always_inline s64
1838 arch_atomic64_fetch_dec_relaxed(atomic64_t *v)
1840 return arch_atomic64_fetch_sub_relaxed(1, v);
1842 #define arch_atomic64_fetch_dec_relaxed arch_atomic64_fetch_dec_relaxed
1845 #else /* arch_atomic64_fetch_dec_relaxed */
1847 #ifndef arch_atomic64_fetch_dec_acquire
1848 static __always_inline s64
1849 arch_atomic64_fetch_dec_acquire(atomic64_t *v)
1851 s64 ret = arch_atomic64_fetch_dec_relaxed(v);
1852 __atomic_acquire_fence();
1855 #define arch_atomic64_fetch_dec_acquire arch_atomic64_fetch_dec_acquire
1858 #ifndef arch_atomic64_fetch_dec_release
1859 static __always_inline s64
1860 arch_atomic64_fetch_dec_release(atomic64_t *v)
1862 __atomic_release_fence();
1863 return arch_atomic64_fetch_dec_relaxed(v);
1865 #define arch_atomic64_fetch_dec_release arch_atomic64_fetch_dec_release
1868 #ifndef arch_atomic64_fetch_dec
1869 static __always_inline s64
1870 arch_atomic64_fetch_dec(atomic64_t *v)
1873 __atomic_pre_full_fence();
1874 ret = arch_atomic64_fetch_dec_relaxed(v);
1875 __atomic_post_full_fence();
1878 #define arch_atomic64_fetch_dec arch_atomic64_fetch_dec
1881 #endif /* arch_atomic64_fetch_dec_relaxed */
1883 #ifndef arch_atomic64_fetch_and_relaxed
1884 #define arch_atomic64_fetch_and_acquire arch_atomic64_fetch_and
1885 #define arch_atomic64_fetch_and_release arch_atomic64_fetch_and
1886 #define arch_atomic64_fetch_and_relaxed arch_atomic64_fetch_and
1887 #else /* arch_atomic64_fetch_and_relaxed */
1889 #ifndef arch_atomic64_fetch_and_acquire
1890 static __always_inline s64
1891 arch_atomic64_fetch_and_acquire(s64 i, atomic64_t *v)
1893 s64 ret = arch_atomic64_fetch_and_relaxed(i, v);
1894 __atomic_acquire_fence();
1897 #define arch_atomic64_fetch_and_acquire arch_atomic64_fetch_and_acquire
1900 #ifndef arch_atomic64_fetch_and_release
1901 static __always_inline s64
1902 arch_atomic64_fetch_and_release(s64 i, atomic64_t *v)
1904 __atomic_release_fence();
1905 return arch_atomic64_fetch_and_relaxed(i, v);
1907 #define arch_atomic64_fetch_and_release arch_atomic64_fetch_and_release
1910 #ifndef arch_atomic64_fetch_and
1911 static __always_inline s64
1912 arch_atomic64_fetch_and(s64 i, atomic64_t *v)
1915 __atomic_pre_full_fence();
1916 ret = arch_atomic64_fetch_and_relaxed(i, v);
1917 __atomic_post_full_fence();
1920 #define arch_atomic64_fetch_and arch_atomic64_fetch_and
1923 #endif /* arch_atomic64_fetch_and_relaxed */
1925 #ifndef arch_atomic64_andnot
1926 static __always_inline void
1927 arch_atomic64_andnot(s64 i, atomic64_t *v)
1929 arch_atomic64_and(~i, v);
1931 #define arch_atomic64_andnot arch_atomic64_andnot
1934 #ifndef arch_atomic64_fetch_andnot_relaxed
1935 #ifdef arch_atomic64_fetch_andnot
1936 #define arch_atomic64_fetch_andnot_acquire arch_atomic64_fetch_andnot
1937 #define arch_atomic64_fetch_andnot_release arch_atomic64_fetch_andnot
1938 #define arch_atomic64_fetch_andnot_relaxed arch_atomic64_fetch_andnot
1939 #endif /* arch_atomic64_fetch_andnot */
1941 #ifndef arch_atomic64_fetch_andnot
1942 static __always_inline s64
1943 arch_atomic64_fetch_andnot(s64 i, atomic64_t *v)
1945 return arch_atomic64_fetch_and(~i, v);
1947 #define arch_atomic64_fetch_andnot arch_atomic64_fetch_andnot
1950 #ifndef arch_atomic64_fetch_andnot_acquire
1951 static __always_inline s64
1952 arch_atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v)
1954 return arch_atomic64_fetch_and_acquire(~i, v);
1956 #define arch_atomic64_fetch_andnot_acquire arch_atomic64_fetch_andnot_acquire
1959 #ifndef arch_atomic64_fetch_andnot_release
1960 static __always_inline s64
1961 arch_atomic64_fetch_andnot_release(s64 i, atomic64_t *v)
1963 return arch_atomic64_fetch_and_release(~i, v);
1965 #define arch_atomic64_fetch_andnot_release arch_atomic64_fetch_andnot_release
1968 #ifndef arch_atomic64_fetch_andnot_relaxed
1969 static __always_inline s64
1970 arch_atomic64_fetch_andnot_relaxed(s64 i, atomic64_t *v)
1972 return arch_atomic64_fetch_and_relaxed(~i, v);
1974 #define arch_atomic64_fetch_andnot_relaxed arch_atomic64_fetch_andnot_relaxed
1977 #else /* arch_atomic64_fetch_andnot_relaxed */
1979 #ifndef arch_atomic64_fetch_andnot_acquire
1980 static __always_inline s64
1981 arch_atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v)
1983 s64 ret = arch_atomic64_fetch_andnot_relaxed(i, v);
1984 __atomic_acquire_fence();
1987 #define arch_atomic64_fetch_andnot_acquire arch_atomic64_fetch_andnot_acquire
1990 #ifndef arch_atomic64_fetch_andnot_release
1991 static __always_inline s64
1992 arch_atomic64_fetch_andnot_release(s64 i, atomic64_t *v)
1994 __atomic_release_fence();
1995 return arch_atomic64_fetch_andnot_relaxed(i, v);
1997 #define arch_atomic64_fetch_andnot_release arch_atomic64_fetch_andnot_release
2000 #ifndef arch_atomic64_fetch_andnot
2001 static __always_inline s64
2002 arch_atomic64_fetch_andnot(s64 i, atomic64_t *v)
2005 __atomic_pre_full_fence();
2006 ret = arch_atomic64_fetch_andnot_relaxed(i, v);
2007 __atomic_post_full_fence();
2010 #define arch_atomic64_fetch_andnot arch_atomic64_fetch_andnot
2013 #endif /* arch_atomic64_fetch_andnot_relaxed */
2015 #ifndef arch_atomic64_fetch_or_relaxed
2016 #define arch_atomic64_fetch_or_acquire arch_atomic64_fetch_or
2017 #define arch_atomic64_fetch_or_release arch_atomic64_fetch_or
2018 #define arch_atomic64_fetch_or_relaxed arch_atomic64_fetch_or
2019 #else /* arch_atomic64_fetch_or_relaxed */
2021 #ifndef arch_atomic64_fetch_or_acquire
2022 static __always_inline s64
2023 arch_atomic64_fetch_or_acquire(s64 i, atomic64_t *v)
2025 s64 ret = arch_atomic64_fetch_or_relaxed(i, v);
2026 __atomic_acquire_fence();
2029 #define arch_atomic64_fetch_or_acquire arch_atomic64_fetch_or_acquire
2032 #ifndef arch_atomic64_fetch_or_release
2033 static __always_inline s64
2034 arch_atomic64_fetch_or_release(s64 i, atomic64_t *v)
2036 __atomic_release_fence();
2037 return arch_atomic64_fetch_or_relaxed(i, v);
2039 #define arch_atomic64_fetch_or_release arch_atomic64_fetch_or_release
2042 #ifndef arch_atomic64_fetch_or
2043 static __always_inline s64
2044 arch_atomic64_fetch_or(s64 i, atomic64_t *v)
2047 __atomic_pre_full_fence();
2048 ret = arch_atomic64_fetch_or_relaxed(i, v);
2049 __atomic_post_full_fence();
2052 #define arch_atomic64_fetch_or arch_atomic64_fetch_or
2055 #endif /* arch_atomic64_fetch_or_relaxed */
2057 #ifndef arch_atomic64_fetch_xor_relaxed
2058 #define arch_atomic64_fetch_xor_acquire arch_atomic64_fetch_xor
2059 #define arch_atomic64_fetch_xor_release arch_atomic64_fetch_xor
2060 #define arch_atomic64_fetch_xor_relaxed arch_atomic64_fetch_xor
2061 #else /* arch_atomic64_fetch_xor_relaxed */
2063 #ifndef arch_atomic64_fetch_xor_acquire
2064 static __always_inline s64
2065 arch_atomic64_fetch_xor_acquire(s64 i, atomic64_t *v)
2067 s64 ret = arch_atomic64_fetch_xor_relaxed(i, v);
2068 __atomic_acquire_fence();
2071 #define arch_atomic64_fetch_xor_acquire arch_atomic64_fetch_xor_acquire
2074 #ifndef arch_atomic64_fetch_xor_release
2075 static __always_inline s64
2076 arch_atomic64_fetch_xor_release(s64 i, atomic64_t *v)
2078 __atomic_release_fence();
2079 return arch_atomic64_fetch_xor_relaxed(i, v);
2081 #define arch_atomic64_fetch_xor_release arch_atomic64_fetch_xor_release
2084 #ifndef arch_atomic64_fetch_xor
2085 static __always_inline s64
2086 arch_atomic64_fetch_xor(s64 i, atomic64_t *v)
2089 __atomic_pre_full_fence();
2090 ret = arch_atomic64_fetch_xor_relaxed(i, v);
2091 __atomic_post_full_fence();
2094 #define arch_atomic64_fetch_xor arch_atomic64_fetch_xor
2097 #endif /* arch_atomic64_fetch_xor_relaxed */
2099 #ifndef arch_atomic64_xchg_relaxed
2100 #define arch_atomic64_xchg_acquire arch_atomic64_xchg
2101 #define arch_atomic64_xchg_release arch_atomic64_xchg
2102 #define arch_atomic64_xchg_relaxed arch_atomic64_xchg
2103 #else /* arch_atomic64_xchg_relaxed */
2105 #ifndef arch_atomic64_xchg_acquire
2106 static __always_inline s64
2107 arch_atomic64_xchg_acquire(atomic64_t *v, s64 i)
2109 s64 ret = arch_atomic64_xchg_relaxed(v, i);
2110 __atomic_acquire_fence();
2113 #define arch_atomic64_xchg_acquire arch_atomic64_xchg_acquire
2116 #ifndef arch_atomic64_xchg_release
2117 static __always_inline s64
2118 arch_atomic64_xchg_release(atomic64_t *v, s64 i)
2120 __atomic_release_fence();
2121 return arch_atomic64_xchg_relaxed(v, i);
2123 #define arch_atomic64_xchg_release arch_atomic64_xchg_release
2126 #ifndef arch_atomic64_xchg
2127 static __always_inline s64
2128 arch_atomic64_xchg(atomic64_t *v, s64 i)
2131 __atomic_pre_full_fence();
2132 ret = arch_atomic64_xchg_relaxed(v, i);
2133 __atomic_post_full_fence();
2136 #define arch_atomic64_xchg arch_atomic64_xchg
2139 #endif /* arch_atomic64_xchg_relaxed */
2141 #ifndef arch_atomic64_cmpxchg_relaxed
2142 #define arch_atomic64_cmpxchg_acquire arch_atomic64_cmpxchg
2143 #define arch_atomic64_cmpxchg_release arch_atomic64_cmpxchg
2144 #define arch_atomic64_cmpxchg_relaxed arch_atomic64_cmpxchg
2145 #else /* arch_atomic64_cmpxchg_relaxed */
2147 #ifndef arch_atomic64_cmpxchg_acquire
2148 static __always_inline s64
2149 arch_atomic64_cmpxchg_acquire(atomic64_t *v, s64 old, s64 new)
2151 s64 ret = arch_atomic64_cmpxchg_relaxed(v, old, new);
2152 __atomic_acquire_fence();
2155 #define arch_atomic64_cmpxchg_acquire arch_atomic64_cmpxchg_acquire
2158 #ifndef arch_atomic64_cmpxchg_release
2159 static __always_inline s64
2160 arch_atomic64_cmpxchg_release(atomic64_t *v, s64 old, s64 new)
2162 __atomic_release_fence();
2163 return arch_atomic64_cmpxchg_relaxed(v, old, new);
2165 #define arch_atomic64_cmpxchg_release arch_atomic64_cmpxchg_release
2168 #ifndef arch_atomic64_cmpxchg
2169 static __always_inline s64
2170 arch_atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new)
2173 __atomic_pre_full_fence();
2174 ret = arch_atomic64_cmpxchg_relaxed(v, old, new);
2175 __atomic_post_full_fence();
2178 #define arch_atomic64_cmpxchg arch_atomic64_cmpxchg
2181 #endif /* arch_atomic64_cmpxchg_relaxed */
2183 #ifndef arch_atomic64_try_cmpxchg_relaxed
2184 #ifdef arch_atomic64_try_cmpxchg
2185 #define arch_atomic64_try_cmpxchg_acquire arch_atomic64_try_cmpxchg
2186 #define arch_atomic64_try_cmpxchg_release arch_atomic64_try_cmpxchg
2187 #define arch_atomic64_try_cmpxchg_relaxed arch_atomic64_try_cmpxchg
2188 #endif /* arch_atomic64_try_cmpxchg */
2190 #ifndef arch_atomic64_try_cmpxchg
2191 static __always_inline bool
2192 arch_atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
2195 r = arch_atomic64_cmpxchg(v, o, new);
2196 if (unlikely(r != o))
2198 return likely(r == o);
2200 #define arch_atomic64_try_cmpxchg arch_atomic64_try_cmpxchg
2203 #ifndef arch_atomic64_try_cmpxchg_acquire
2204 static __always_inline bool
2205 arch_atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new)
2208 r = arch_atomic64_cmpxchg_acquire(v, o, new);
2209 if (unlikely(r != o))
2211 return likely(r == o);
2213 #define arch_atomic64_try_cmpxchg_acquire arch_atomic64_try_cmpxchg_acquire
2216 #ifndef arch_atomic64_try_cmpxchg_release
2217 static __always_inline bool
2218 arch_atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new)
2221 r = arch_atomic64_cmpxchg_release(v, o, new);
2222 if (unlikely(r != o))
2224 return likely(r == o);
2226 #define arch_atomic64_try_cmpxchg_release arch_atomic64_try_cmpxchg_release
2229 #ifndef arch_atomic64_try_cmpxchg_relaxed
2230 static __always_inline bool
2231 arch_atomic64_try_cmpxchg_relaxed(atomic64_t *v, s64 *old, s64 new)
2234 r = arch_atomic64_cmpxchg_relaxed(v, o, new);
2235 if (unlikely(r != o))
2237 return likely(r == o);
2239 #define arch_atomic64_try_cmpxchg_relaxed arch_atomic64_try_cmpxchg_relaxed
2242 #else /* arch_atomic64_try_cmpxchg_relaxed */
2244 #ifndef arch_atomic64_try_cmpxchg_acquire
2245 static __always_inline bool
2246 arch_atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new)
2248 bool ret = arch_atomic64_try_cmpxchg_relaxed(v, old, new);
2249 __atomic_acquire_fence();
2252 #define arch_atomic64_try_cmpxchg_acquire arch_atomic64_try_cmpxchg_acquire
2255 #ifndef arch_atomic64_try_cmpxchg_release
2256 static __always_inline bool
2257 arch_atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new)
2259 __atomic_release_fence();
2260 return arch_atomic64_try_cmpxchg_relaxed(v, old, new);
2262 #define arch_atomic64_try_cmpxchg_release arch_atomic64_try_cmpxchg_release
2265 #ifndef arch_atomic64_try_cmpxchg
2266 static __always_inline bool
2267 arch_atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
2270 __atomic_pre_full_fence();
2271 ret = arch_atomic64_try_cmpxchg_relaxed(v, old, new);
2272 __atomic_post_full_fence();
2275 #define arch_atomic64_try_cmpxchg arch_atomic64_try_cmpxchg
2278 #endif /* arch_atomic64_try_cmpxchg_relaxed */
2280 #ifndef arch_atomic64_sub_and_test
2282 * arch_atomic64_sub_and_test - subtract value from variable and test result
2283 * @i: integer value to subtract
2284 * @v: pointer of type atomic64_t
2286 * Atomically subtracts @i from @v and returns
2287 * true if the result is zero, or false for all
2290 static __always_inline bool
2291 arch_atomic64_sub_and_test(s64 i, atomic64_t *v)
2293 return arch_atomic64_sub_return(i, v) == 0;
2295 #define arch_atomic64_sub_and_test arch_atomic64_sub_and_test
2298 #ifndef arch_atomic64_dec_and_test
2300 * arch_atomic64_dec_and_test - decrement and test
2301 * @v: pointer of type atomic64_t
2303 * Atomically decrements @v by 1 and
2304 * returns true if the result is 0, or false for all other
2307 static __always_inline bool
2308 arch_atomic64_dec_and_test(atomic64_t *v)
2310 return arch_atomic64_dec_return(v) == 0;
2312 #define arch_atomic64_dec_and_test arch_atomic64_dec_and_test
2315 #ifndef arch_atomic64_inc_and_test
2317 * arch_atomic64_inc_and_test - increment and test
2318 * @v: pointer of type atomic64_t
2320 * Atomically increments @v by 1
2321 * and returns true if the result is zero, or false for all
2324 static __always_inline bool
2325 arch_atomic64_inc_and_test(atomic64_t *v)
2327 return arch_atomic64_inc_return(v) == 0;
2329 #define arch_atomic64_inc_and_test arch_atomic64_inc_and_test
2332 #ifndef arch_atomic64_add_negative
2334 * arch_atomic64_add_negative - add and test if negative
2335 * @i: integer value to add
2336 * @v: pointer of type atomic64_t
2338 * Atomically adds @i to @v and returns true
2339 * if the result is negative, or false when
2340 * result is greater than or equal to zero.
2342 static __always_inline bool
2343 arch_atomic64_add_negative(s64 i, atomic64_t *v)
2345 return arch_atomic64_add_return(i, v) < 0;
2347 #define arch_atomic64_add_negative arch_atomic64_add_negative
2350 #ifndef arch_atomic64_fetch_add_unless
2352 * arch_atomic64_fetch_add_unless - add unless the number is already a given value
2353 * @v: pointer of type atomic64_t
2354 * @a: the amount to add to v...
2355 * @u: ...unless v is equal to u.
2357 * Atomically adds @a to @v, so long as @v was not already @u.
2358 * Returns original value of @v
2360 static __always_inline s64
2361 arch_atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u)
2363 s64 c = arch_atomic64_read(v);
2366 if (unlikely(c == u))
2368 } while (!arch_atomic64_try_cmpxchg(v, &c, c + a));
2372 #define arch_atomic64_fetch_add_unless arch_atomic64_fetch_add_unless
2375 #ifndef arch_atomic64_add_unless
2377 * arch_atomic64_add_unless - add unless the number is already a given value
2378 * @v: pointer of type atomic64_t
2379 * @a: the amount to add to v...
2380 * @u: ...unless v is equal to u.
2382 * Atomically adds @a to @v, if @v was not already @u.
2383 * Returns true if the addition was done.
2385 static __always_inline bool
2386 arch_atomic64_add_unless(atomic64_t *v, s64 a, s64 u)
2388 return arch_atomic64_fetch_add_unless(v, a, u) != u;
2390 #define arch_atomic64_add_unless arch_atomic64_add_unless
2393 #ifndef arch_atomic64_inc_not_zero
2395 * arch_atomic64_inc_not_zero - increment unless the number is zero
2396 * @v: pointer of type atomic64_t
2398 * Atomically increments @v by 1, if @v is non-zero.
2399 * Returns true if the increment was done.
2401 static __always_inline bool
2402 arch_atomic64_inc_not_zero(atomic64_t *v)
2404 return arch_atomic64_add_unless(v, 1, 0);
2406 #define arch_atomic64_inc_not_zero arch_atomic64_inc_not_zero
2409 #ifndef arch_atomic64_inc_unless_negative
2410 static __always_inline bool
2411 arch_atomic64_inc_unless_negative(atomic64_t *v)
2413 s64 c = arch_atomic64_read(v);
2416 if (unlikely(c < 0))
2418 } while (!arch_atomic64_try_cmpxchg(v, &c, c + 1));
2422 #define arch_atomic64_inc_unless_negative arch_atomic64_inc_unless_negative
2425 #ifndef arch_atomic64_dec_unless_positive
2426 static __always_inline bool
2427 arch_atomic64_dec_unless_positive(atomic64_t *v)
2429 s64 c = arch_atomic64_read(v);
2432 if (unlikely(c > 0))
2434 } while (!arch_atomic64_try_cmpxchg(v, &c, c - 1));
2438 #define arch_atomic64_dec_unless_positive arch_atomic64_dec_unless_positive
2441 #ifndef arch_atomic64_dec_if_positive
2442 static __always_inline s64
2443 arch_atomic64_dec_if_positive(atomic64_t *v)
2445 s64 dec, c = arch_atomic64_read(v);
2449 if (unlikely(dec < 0))
2451 } while (!arch_atomic64_try_cmpxchg(v, &c, dec));
2455 #define arch_atomic64_dec_if_positive arch_atomic64_dec_if_positive
2458 #endif /* _LINUX_ATOMIC_FALLBACK_H */
2459 // b5e87bdd5ede61470c29f7a7e4de781af3770f09