1 // SPDX-License-Identifier: GPL-2.0
3 // Generated by scripts/atomic/gen-atomic-fallback.sh
4 // DO NOT MODIFY THIS FILE DIRECTLY
6 #ifndef _LINUX_ATOMIC_FALLBACK_H
7 #define _LINUX_ATOMIC_FALLBACK_H
9 #include <linux/compiler.h>
11 #ifndef arch_xchg_relaxed
12 #define arch_xchg_acquire arch_xchg
13 #define arch_xchg_release arch_xchg
14 #define arch_xchg_relaxed arch_xchg
15 #else /* arch_xchg_relaxed */
17 #ifndef arch_xchg_acquire
18 #define arch_xchg_acquire(...) \
19 __atomic_op_acquire(arch_xchg, __VA_ARGS__)
22 #ifndef arch_xchg_release
23 #define arch_xchg_release(...) \
24 __atomic_op_release(arch_xchg, __VA_ARGS__)
28 #define arch_xchg(...) \
29 __atomic_op_fence(arch_xchg, __VA_ARGS__)
32 #endif /* arch_xchg_relaxed */
34 #ifndef arch_cmpxchg_relaxed
35 #define arch_cmpxchg_acquire arch_cmpxchg
36 #define arch_cmpxchg_release arch_cmpxchg
37 #define arch_cmpxchg_relaxed arch_cmpxchg
38 #else /* arch_cmpxchg_relaxed */
40 #ifndef arch_cmpxchg_acquire
41 #define arch_cmpxchg_acquire(...) \
42 __atomic_op_acquire(arch_cmpxchg, __VA_ARGS__)
45 #ifndef arch_cmpxchg_release
46 #define arch_cmpxchg_release(...) \
47 __atomic_op_release(arch_cmpxchg, __VA_ARGS__)
51 #define arch_cmpxchg(...) \
52 __atomic_op_fence(arch_cmpxchg, __VA_ARGS__)
55 #endif /* arch_cmpxchg_relaxed */
57 #ifndef arch_cmpxchg64_relaxed
58 #define arch_cmpxchg64_acquire arch_cmpxchg64
59 #define arch_cmpxchg64_release arch_cmpxchg64
60 #define arch_cmpxchg64_relaxed arch_cmpxchg64
61 #else /* arch_cmpxchg64_relaxed */
63 #ifndef arch_cmpxchg64_acquire
64 #define arch_cmpxchg64_acquire(...) \
65 __atomic_op_acquire(arch_cmpxchg64, __VA_ARGS__)
68 #ifndef arch_cmpxchg64_release
69 #define arch_cmpxchg64_release(...) \
70 __atomic_op_release(arch_cmpxchg64, __VA_ARGS__)
73 #ifndef arch_cmpxchg64
74 #define arch_cmpxchg64(...) \
75 __atomic_op_fence(arch_cmpxchg64, __VA_ARGS__)
78 #endif /* arch_cmpxchg64_relaxed */
80 #ifndef arch_try_cmpxchg_relaxed
81 #ifdef arch_try_cmpxchg
82 #define arch_try_cmpxchg_acquire arch_try_cmpxchg
83 #define arch_try_cmpxchg_release arch_try_cmpxchg
84 #define arch_try_cmpxchg_relaxed arch_try_cmpxchg
85 #endif /* arch_try_cmpxchg */
87 #ifndef arch_try_cmpxchg
88 #define arch_try_cmpxchg(_ptr, _oldp, _new) \
90 typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
91 ___r = arch_cmpxchg((_ptr), ___o, (_new)); \
92 if (unlikely(___r != ___o)) \
94 likely(___r == ___o); \
96 #endif /* arch_try_cmpxchg */
98 #ifndef arch_try_cmpxchg_acquire
99 #define arch_try_cmpxchg_acquire(_ptr, _oldp, _new) \
101 typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
102 ___r = arch_cmpxchg_acquire((_ptr), ___o, (_new)); \
103 if (unlikely(___r != ___o)) \
105 likely(___r == ___o); \
107 #endif /* arch_try_cmpxchg_acquire */
109 #ifndef arch_try_cmpxchg_release
110 #define arch_try_cmpxchg_release(_ptr, _oldp, _new) \
112 typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
113 ___r = arch_cmpxchg_release((_ptr), ___o, (_new)); \
114 if (unlikely(___r != ___o)) \
116 likely(___r == ___o); \
118 #endif /* arch_try_cmpxchg_release */
120 #ifndef arch_try_cmpxchg_relaxed
121 #define arch_try_cmpxchg_relaxed(_ptr, _oldp, _new) \
123 typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
124 ___r = arch_cmpxchg_relaxed((_ptr), ___o, (_new)); \
125 if (unlikely(___r != ___o)) \
127 likely(___r == ___o); \
129 #endif /* arch_try_cmpxchg_relaxed */
131 #else /* arch_try_cmpxchg_relaxed */
133 #ifndef arch_try_cmpxchg_acquire
134 #define arch_try_cmpxchg_acquire(...) \
135 __atomic_op_acquire(arch_try_cmpxchg, __VA_ARGS__)
138 #ifndef arch_try_cmpxchg_release
139 #define arch_try_cmpxchg_release(...) \
140 __atomic_op_release(arch_try_cmpxchg, __VA_ARGS__)
143 #ifndef arch_try_cmpxchg
144 #define arch_try_cmpxchg(...) \
145 __atomic_op_fence(arch_try_cmpxchg, __VA_ARGS__)
148 #endif /* arch_try_cmpxchg_relaxed */
150 #ifndef arch_atomic_read_acquire
151 static __always_inline int
152 arch_atomic_read_acquire(const atomic_t *v)
154 return smp_load_acquire(&(v)->counter);
156 #define arch_atomic_read_acquire arch_atomic_read_acquire
159 #ifndef arch_atomic_set_release
160 static __always_inline void
161 arch_atomic_set_release(atomic_t *v, int i)
163 smp_store_release(&(v)->counter, i);
165 #define arch_atomic_set_release arch_atomic_set_release
168 #ifndef arch_atomic_add_return_relaxed
169 #define arch_atomic_add_return_acquire arch_atomic_add_return
170 #define arch_atomic_add_return_release arch_atomic_add_return
171 #define arch_atomic_add_return_relaxed arch_atomic_add_return
172 #else /* arch_atomic_add_return_relaxed */
174 #ifndef arch_atomic_add_return_acquire
175 static __always_inline int
176 arch_atomic_add_return_acquire(int i, atomic_t *v)
178 int ret = arch_atomic_add_return_relaxed(i, v);
179 __atomic_acquire_fence();
182 #define arch_atomic_add_return_acquire arch_atomic_add_return_acquire
185 #ifndef arch_atomic_add_return_release
186 static __always_inline int
187 arch_atomic_add_return_release(int i, atomic_t *v)
189 __atomic_release_fence();
190 return arch_atomic_add_return_relaxed(i, v);
192 #define arch_atomic_add_return_release arch_atomic_add_return_release
195 #ifndef arch_atomic_add_return
196 static __always_inline int
197 arch_atomic_add_return(int i, atomic_t *v)
200 __atomic_pre_full_fence();
201 ret = arch_atomic_add_return_relaxed(i, v);
202 __atomic_post_full_fence();
205 #define arch_atomic_add_return arch_atomic_add_return
208 #endif /* arch_atomic_add_return_relaxed */
210 #ifndef arch_atomic_fetch_add_relaxed
211 #define arch_atomic_fetch_add_acquire arch_atomic_fetch_add
212 #define arch_atomic_fetch_add_release arch_atomic_fetch_add
213 #define arch_atomic_fetch_add_relaxed arch_atomic_fetch_add
214 #else /* arch_atomic_fetch_add_relaxed */
216 #ifndef arch_atomic_fetch_add_acquire
217 static __always_inline int
218 arch_atomic_fetch_add_acquire(int i, atomic_t *v)
220 int ret = arch_atomic_fetch_add_relaxed(i, v);
221 __atomic_acquire_fence();
224 #define arch_atomic_fetch_add_acquire arch_atomic_fetch_add_acquire
227 #ifndef arch_atomic_fetch_add_release
228 static __always_inline int
229 arch_atomic_fetch_add_release(int i, atomic_t *v)
231 __atomic_release_fence();
232 return arch_atomic_fetch_add_relaxed(i, v);
234 #define arch_atomic_fetch_add_release arch_atomic_fetch_add_release
237 #ifndef arch_atomic_fetch_add
238 static __always_inline int
239 arch_atomic_fetch_add(int i, atomic_t *v)
242 __atomic_pre_full_fence();
243 ret = arch_atomic_fetch_add_relaxed(i, v);
244 __atomic_post_full_fence();
247 #define arch_atomic_fetch_add arch_atomic_fetch_add
250 #endif /* arch_atomic_fetch_add_relaxed */
252 #ifndef arch_atomic_sub_return_relaxed
253 #define arch_atomic_sub_return_acquire arch_atomic_sub_return
254 #define arch_atomic_sub_return_release arch_atomic_sub_return
255 #define arch_atomic_sub_return_relaxed arch_atomic_sub_return
256 #else /* arch_atomic_sub_return_relaxed */
258 #ifndef arch_atomic_sub_return_acquire
259 static __always_inline int
260 arch_atomic_sub_return_acquire(int i, atomic_t *v)
262 int ret = arch_atomic_sub_return_relaxed(i, v);
263 __atomic_acquire_fence();
266 #define arch_atomic_sub_return_acquire arch_atomic_sub_return_acquire
269 #ifndef arch_atomic_sub_return_release
270 static __always_inline int
271 arch_atomic_sub_return_release(int i, atomic_t *v)
273 __atomic_release_fence();
274 return arch_atomic_sub_return_relaxed(i, v);
276 #define arch_atomic_sub_return_release arch_atomic_sub_return_release
279 #ifndef arch_atomic_sub_return
280 static __always_inline int
281 arch_atomic_sub_return(int i, atomic_t *v)
284 __atomic_pre_full_fence();
285 ret = arch_atomic_sub_return_relaxed(i, v);
286 __atomic_post_full_fence();
289 #define arch_atomic_sub_return arch_atomic_sub_return
292 #endif /* arch_atomic_sub_return_relaxed */
294 #ifndef arch_atomic_fetch_sub_relaxed
295 #define arch_atomic_fetch_sub_acquire arch_atomic_fetch_sub
296 #define arch_atomic_fetch_sub_release arch_atomic_fetch_sub
297 #define arch_atomic_fetch_sub_relaxed arch_atomic_fetch_sub
298 #else /* arch_atomic_fetch_sub_relaxed */
300 #ifndef arch_atomic_fetch_sub_acquire
301 static __always_inline int
302 arch_atomic_fetch_sub_acquire(int i, atomic_t *v)
304 int ret = arch_atomic_fetch_sub_relaxed(i, v);
305 __atomic_acquire_fence();
308 #define arch_atomic_fetch_sub_acquire arch_atomic_fetch_sub_acquire
311 #ifndef arch_atomic_fetch_sub_release
312 static __always_inline int
313 arch_atomic_fetch_sub_release(int i, atomic_t *v)
315 __atomic_release_fence();
316 return arch_atomic_fetch_sub_relaxed(i, v);
318 #define arch_atomic_fetch_sub_release arch_atomic_fetch_sub_release
321 #ifndef arch_atomic_fetch_sub
322 static __always_inline int
323 arch_atomic_fetch_sub(int i, atomic_t *v)
326 __atomic_pre_full_fence();
327 ret = arch_atomic_fetch_sub_relaxed(i, v);
328 __atomic_post_full_fence();
331 #define arch_atomic_fetch_sub arch_atomic_fetch_sub
334 #endif /* arch_atomic_fetch_sub_relaxed */
336 #ifndef arch_atomic_inc
337 static __always_inline void
338 arch_atomic_inc(atomic_t *v)
340 arch_atomic_add(1, v);
342 #define arch_atomic_inc arch_atomic_inc
345 #ifndef arch_atomic_inc_return_relaxed
346 #ifdef arch_atomic_inc_return
347 #define arch_atomic_inc_return_acquire arch_atomic_inc_return
348 #define arch_atomic_inc_return_release arch_atomic_inc_return
349 #define arch_atomic_inc_return_relaxed arch_atomic_inc_return
350 #endif /* arch_atomic_inc_return */
352 #ifndef arch_atomic_inc_return
353 static __always_inline int
354 arch_atomic_inc_return(atomic_t *v)
356 return arch_atomic_add_return(1, v);
358 #define arch_atomic_inc_return arch_atomic_inc_return
361 #ifndef arch_atomic_inc_return_acquire
362 static __always_inline int
363 arch_atomic_inc_return_acquire(atomic_t *v)
365 return arch_atomic_add_return_acquire(1, v);
367 #define arch_atomic_inc_return_acquire arch_atomic_inc_return_acquire
370 #ifndef arch_atomic_inc_return_release
371 static __always_inline int
372 arch_atomic_inc_return_release(atomic_t *v)
374 return arch_atomic_add_return_release(1, v);
376 #define arch_atomic_inc_return_release arch_atomic_inc_return_release
379 #ifndef arch_atomic_inc_return_relaxed
380 static __always_inline int
381 arch_atomic_inc_return_relaxed(atomic_t *v)
383 return arch_atomic_add_return_relaxed(1, v);
385 #define arch_atomic_inc_return_relaxed arch_atomic_inc_return_relaxed
388 #else /* arch_atomic_inc_return_relaxed */
390 #ifndef arch_atomic_inc_return_acquire
391 static __always_inline int
392 arch_atomic_inc_return_acquire(atomic_t *v)
394 int ret = arch_atomic_inc_return_relaxed(v);
395 __atomic_acquire_fence();
398 #define arch_atomic_inc_return_acquire arch_atomic_inc_return_acquire
401 #ifndef arch_atomic_inc_return_release
402 static __always_inline int
403 arch_atomic_inc_return_release(atomic_t *v)
405 __atomic_release_fence();
406 return arch_atomic_inc_return_relaxed(v);
408 #define arch_atomic_inc_return_release arch_atomic_inc_return_release
411 #ifndef arch_atomic_inc_return
412 static __always_inline int
413 arch_atomic_inc_return(atomic_t *v)
416 __atomic_pre_full_fence();
417 ret = arch_atomic_inc_return_relaxed(v);
418 __atomic_post_full_fence();
421 #define arch_atomic_inc_return arch_atomic_inc_return
424 #endif /* arch_atomic_inc_return_relaxed */
426 #ifndef arch_atomic_fetch_inc_relaxed
427 #ifdef arch_atomic_fetch_inc
428 #define arch_atomic_fetch_inc_acquire arch_atomic_fetch_inc
429 #define arch_atomic_fetch_inc_release arch_atomic_fetch_inc
430 #define arch_atomic_fetch_inc_relaxed arch_atomic_fetch_inc
431 #endif /* arch_atomic_fetch_inc */
433 #ifndef arch_atomic_fetch_inc
434 static __always_inline int
435 arch_atomic_fetch_inc(atomic_t *v)
437 return arch_atomic_fetch_add(1, v);
439 #define arch_atomic_fetch_inc arch_atomic_fetch_inc
442 #ifndef arch_atomic_fetch_inc_acquire
443 static __always_inline int
444 arch_atomic_fetch_inc_acquire(atomic_t *v)
446 return arch_atomic_fetch_add_acquire(1, v);
448 #define arch_atomic_fetch_inc_acquire arch_atomic_fetch_inc_acquire
451 #ifndef arch_atomic_fetch_inc_release
452 static __always_inline int
453 arch_atomic_fetch_inc_release(atomic_t *v)
455 return arch_atomic_fetch_add_release(1, v);
457 #define arch_atomic_fetch_inc_release arch_atomic_fetch_inc_release
460 #ifndef arch_atomic_fetch_inc_relaxed
461 static __always_inline int
462 arch_atomic_fetch_inc_relaxed(atomic_t *v)
464 return arch_atomic_fetch_add_relaxed(1, v);
466 #define arch_atomic_fetch_inc_relaxed arch_atomic_fetch_inc_relaxed
469 #else /* arch_atomic_fetch_inc_relaxed */
471 #ifndef arch_atomic_fetch_inc_acquire
472 static __always_inline int
473 arch_atomic_fetch_inc_acquire(atomic_t *v)
475 int ret = arch_atomic_fetch_inc_relaxed(v);
476 __atomic_acquire_fence();
479 #define arch_atomic_fetch_inc_acquire arch_atomic_fetch_inc_acquire
482 #ifndef arch_atomic_fetch_inc_release
483 static __always_inline int
484 arch_atomic_fetch_inc_release(atomic_t *v)
486 __atomic_release_fence();
487 return arch_atomic_fetch_inc_relaxed(v);
489 #define arch_atomic_fetch_inc_release arch_atomic_fetch_inc_release
492 #ifndef arch_atomic_fetch_inc
493 static __always_inline int
494 arch_atomic_fetch_inc(atomic_t *v)
497 __atomic_pre_full_fence();
498 ret = arch_atomic_fetch_inc_relaxed(v);
499 __atomic_post_full_fence();
502 #define arch_atomic_fetch_inc arch_atomic_fetch_inc
505 #endif /* arch_atomic_fetch_inc_relaxed */
507 #ifndef arch_atomic_dec
508 static __always_inline void
509 arch_atomic_dec(atomic_t *v)
511 arch_atomic_sub(1, v);
513 #define arch_atomic_dec arch_atomic_dec
516 #ifndef arch_atomic_dec_return_relaxed
517 #ifdef arch_atomic_dec_return
518 #define arch_atomic_dec_return_acquire arch_atomic_dec_return
519 #define arch_atomic_dec_return_release arch_atomic_dec_return
520 #define arch_atomic_dec_return_relaxed arch_atomic_dec_return
521 #endif /* arch_atomic_dec_return */
523 #ifndef arch_atomic_dec_return
524 static __always_inline int
525 arch_atomic_dec_return(atomic_t *v)
527 return arch_atomic_sub_return(1, v);
529 #define arch_atomic_dec_return arch_atomic_dec_return
532 #ifndef arch_atomic_dec_return_acquire
533 static __always_inline int
534 arch_atomic_dec_return_acquire(atomic_t *v)
536 return arch_atomic_sub_return_acquire(1, v);
538 #define arch_atomic_dec_return_acquire arch_atomic_dec_return_acquire
541 #ifndef arch_atomic_dec_return_release
542 static __always_inline int
543 arch_atomic_dec_return_release(atomic_t *v)
545 return arch_atomic_sub_return_release(1, v);
547 #define arch_atomic_dec_return_release arch_atomic_dec_return_release
550 #ifndef arch_atomic_dec_return_relaxed
551 static __always_inline int
552 arch_atomic_dec_return_relaxed(atomic_t *v)
554 return arch_atomic_sub_return_relaxed(1, v);
556 #define arch_atomic_dec_return_relaxed arch_atomic_dec_return_relaxed
559 #else /* arch_atomic_dec_return_relaxed */
561 #ifndef arch_atomic_dec_return_acquire
562 static __always_inline int
563 arch_atomic_dec_return_acquire(atomic_t *v)
565 int ret = arch_atomic_dec_return_relaxed(v);
566 __atomic_acquire_fence();
569 #define arch_atomic_dec_return_acquire arch_atomic_dec_return_acquire
572 #ifndef arch_atomic_dec_return_release
573 static __always_inline int
574 arch_atomic_dec_return_release(atomic_t *v)
576 __atomic_release_fence();
577 return arch_atomic_dec_return_relaxed(v);
579 #define arch_atomic_dec_return_release arch_atomic_dec_return_release
582 #ifndef arch_atomic_dec_return
583 static __always_inline int
584 arch_atomic_dec_return(atomic_t *v)
587 __atomic_pre_full_fence();
588 ret = arch_atomic_dec_return_relaxed(v);
589 __atomic_post_full_fence();
592 #define arch_atomic_dec_return arch_atomic_dec_return
595 #endif /* arch_atomic_dec_return_relaxed */
597 #ifndef arch_atomic_fetch_dec_relaxed
598 #ifdef arch_atomic_fetch_dec
599 #define arch_atomic_fetch_dec_acquire arch_atomic_fetch_dec
600 #define arch_atomic_fetch_dec_release arch_atomic_fetch_dec
601 #define arch_atomic_fetch_dec_relaxed arch_atomic_fetch_dec
602 #endif /* arch_atomic_fetch_dec */
604 #ifndef arch_atomic_fetch_dec
605 static __always_inline int
606 arch_atomic_fetch_dec(atomic_t *v)
608 return arch_atomic_fetch_sub(1, v);
610 #define arch_atomic_fetch_dec arch_atomic_fetch_dec
613 #ifndef arch_atomic_fetch_dec_acquire
614 static __always_inline int
615 arch_atomic_fetch_dec_acquire(atomic_t *v)
617 return arch_atomic_fetch_sub_acquire(1, v);
619 #define arch_atomic_fetch_dec_acquire arch_atomic_fetch_dec_acquire
622 #ifndef arch_atomic_fetch_dec_release
623 static __always_inline int
624 arch_atomic_fetch_dec_release(atomic_t *v)
626 return arch_atomic_fetch_sub_release(1, v);
628 #define arch_atomic_fetch_dec_release arch_atomic_fetch_dec_release
631 #ifndef arch_atomic_fetch_dec_relaxed
632 static __always_inline int
633 arch_atomic_fetch_dec_relaxed(atomic_t *v)
635 return arch_atomic_fetch_sub_relaxed(1, v);
637 #define arch_atomic_fetch_dec_relaxed arch_atomic_fetch_dec_relaxed
640 #else /* arch_atomic_fetch_dec_relaxed */
642 #ifndef arch_atomic_fetch_dec_acquire
643 static __always_inline int
644 arch_atomic_fetch_dec_acquire(atomic_t *v)
646 int ret = arch_atomic_fetch_dec_relaxed(v);
647 __atomic_acquire_fence();
650 #define arch_atomic_fetch_dec_acquire arch_atomic_fetch_dec_acquire
653 #ifndef arch_atomic_fetch_dec_release
654 static __always_inline int
655 arch_atomic_fetch_dec_release(atomic_t *v)
657 __atomic_release_fence();
658 return arch_atomic_fetch_dec_relaxed(v);
660 #define arch_atomic_fetch_dec_release arch_atomic_fetch_dec_release
663 #ifndef arch_atomic_fetch_dec
664 static __always_inline int
665 arch_atomic_fetch_dec(atomic_t *v)
668 __atomic_pre_full_fence();
669 ret = arch_atomic_fetch_dec_relaxed(v);
670 __atomic_post_full_fence();
673 #define arch_atomic_fetch_dec arch_atomic_fetch_dec
676 #endif /* arch_atomic_fetch_dec_relaxed */
678 #ifndef arch_atomic_fetch_and_relaxed
679 #define arch_atomic_fetch_and_acquire arch_atomic_fetch_and
680 #define arch_atomic_fetch_and_release arch_atomic_fetch_and
681 #define arch_atomic_fetch_and_relaxed arch_atomic_fetch_and
682 #else /* arch_atomic_fetch_and_relaxed */
684 #ifndef arch_atomic_fetch_and_acquire
685 static __always_inline int
686 arch_atomic_fetch_and_acquire(int i, atomic_t *v)
688 int ret = arch_atomic_fetch_and_relaxed(i, v);
689 __atomic_acquire_fence();
692 #define arch_atomic_fetch_and_acquire arch_atomic_fetch_and_acquire
695 #ifndef arch_atomic_fetch_and_release
696 static __always_inline int
697 arch_atomic_fetch_and_release(int i, atomic_t *v)
699 __atomic_release_fence();
700 return arch_atomic_fetch_and_relaxed(i, v);
702 #define arch_atomic_fetch_and_release arch_atomic_fetch_and_release
705 #ifndef arch_atomic_fetch_and
706 static __always_inline int
707 arch_atomic_fetch_and(int i, atomic_t *v)
710 __atomic_pre_full_fence();
711 ret = arch_atomic_fetch_and_relaxed(i, v);
712 __atomic_post_full_fence();
715 #define arch_atomic_fetch_and arch_atomic_fetch_and
718 #endif /* arch_atomic_fetch_and_relaxed */
720 #ifndef arch_atomic_andnot
721 static __always_inline void
722 arch_atomic_andnot(int i, atomic_t *v)
724 arch_atomic_and(~i, v);
726 #define arch_atomic_andnot arch_atomic_andnot
729 #ifndef arch_atomic_fetch_andnot_relaxed
730 #ifdef arch_atomic_fetch_andnot
731 #define arch_atomic_fetch_andnot_acquire arch_atomic_fetch_andnot
732 #define arch_atomic_fetch_andnot_release arch_atomic_fetch_andnot
733 #define arch_atomic_fetch_andnot_relaxed arch_atomic_fetch_andnot
734 #endif /* arch_atomic_fetch_andnot */
736 #ifndef arch_atomic_fetch_andnot
737 static __always_inline int
738 arch_atomic_fetch_andnot(int i, atomic_t *v)
740 return arch_atomic_fetch_and(~i, v);
742 #define arch_atomic_fetch_andnot arch_atomic_fetch_andnot
745 #ifndef arch_atomic_fetch_andnot_acquire
746 static __always_inline int
747 arch_atomic_fetch_andnot_acquire(int i, atomic_t *v)
749 return arch_atomic_fetch_and_acquire(~i, v);
751 #define arch_atomic_fetch_andnot_acquire arch_atomic_fetch_andnot_acquire
754 #ifndef arch_atomic_fetch_andnot_release
755 static __always_inline int
756 arch_atomic_fetch_andnot_release(int i, atomic_t *v)
758 return arch_atomic_fetch_and_release(~i, v);
760 #define arch_atomic_fetch_andnot_release arch_atomic_fetch_andnot_release
763 #ifndef arch_atomic_fetch_andnot_relaxed
764 static __always_inline int
765 arch_atomic_fetch_andnot_relaxed(int i, atomic_t *v)
767 return arch_atomic_fetch_and_relaxed(~i, v);
769 #define arch_atomic_fetch_andnot_relaxed arch_atomic_fetch_andnot_relaxed
772 #else /* arch_atomic_fetch_andnot_relaxed */
774 #ifndef arch_atomic_fetch_andnot_acquire
775 static __always_inline int
776 arch_atomic_fetch_andnot_acquire(int i, atomic_t *v)
778 int ret = arch_atomic_fetch_andnot_relaxed(i, v);
779 __atomic_acquire_fence();
782 #define arch_atomic_fetch_andnot_acquire arch_atomic_fetch_andnot_acquire
785 #ifndef arch_atomic_fetch_andnot_release
786 static __always_inline int
787 arch_atomic_fetch_andnot_release(int i, atomic_t *v)
789 __atomic_release_fence();
790 return arch_atomic_fetch_andnot_relaxed(i, v);
792 #define arch_atomic_fetch_andnot_release arch_atomic_fetch_andnot_release
795 #ifndef arch_atomic_fetch_andnot
796 static __always_inline int
797 arch_atomic_fetch_andnot(int i, atomic_t *v)
800 __atomic_pre_full_fence();
801 ret = arch_atomic_fetch_andnot_relaxed(i, v);
802 __atomic_post_full_fence();
805 #define arch_atomic_fetch_andnot arch_atomic_fetch_andnot
808 #endif /* arch_atomic_fetch_andnot_relaxed */
810 #ifndef arch_atomic_fetch_or_relaxed
811 #define arch_atomic_fetch_or_acquire arch_atomic_fetch_or
812 #define arch_atomic_fetch_or_release arch_atomic_fetch_or
813 #define arch_atomic_fetch_or_relaxed arch_atomic_fetch_or
814 #else /* arch_atomic_fetch_or_relaxed */
816 #ifndef arch_atomic_fetch_or_acquire
817 static __always_inline int
818 arch_atomic_fetch_or_acquire(int i, atomic_t *v)
820 int ret = arch_atomic_fetch_or_relaxed(i, v);
821 __atomic_acquire_fence();
824 #define arch_atomic_fetch_or_acquire arch_atomic_fetch_or_acquire
827 #ifndef arch_atomic_fetch_or_release
828 static __always_inline int
829 arch_atomic_fetch_or_release(int i, atomic_t *v)
831 __atomic_release_fence();
832 return arch_atomic_fetch_or_relaxed(i, v);
834 #define arch_atomic_fetch_or_release arch_atomic_fetch_or_release
837 #ifndef arch_atomic_fetch_or
838 static __always_inline int
839 arch_atomic_fetch_or(int i, atomic_t *v)
842 __atomic_pre_full_fence();
843 ret = arch_atomic_fetch_or_relaxed(i, v);
844 __atomic_post_full_fence();
847 #define arch_atomic_fetch_or arch_atomic_fetch_or
850 #endif /* arch_atomic_fetch_or_relaxed */
852 #ifndef arch_atomic_fetch_xor_relaxed
853 #define arch_atomic_fetch_xor_acquire arch_atomic_fetch_xor
854 #define arch_atomic_fetch_xor_release arch_atomic_fetch_xor
855 #define arch_atomic_fetch_xor_relaxed arch_atomic_fetch_xor
856 #else /* arch_atomic_fetch_xor_relaxed */
858 #ifndef arch_atomic_fetch_xor_acquire
859 static __always_inline int
860 arch_atomic_fetch_xor_acquire(int i, atomic_t *v)
862 int ret = arch_atomic_fetch_xor_relaxed(i, v);
863 __atomic_acquire_fence();
866 #define arch_atomic_fetch_xor_acquire arch_atomic_fetch_xor_acquire
869 #ifndef arch_atomic_fetch_xor_release
870 static __always_inline int
871 arch_atomic_fetch_xor_release(int i, atomic_t *v)
873 __atomic_release_fence();
874 return arch_atomic_fetch_xor_relaxed(i, v);
876 #define arch_atomic_fetch_xor_release arch_atomic_fetch_xor_release
879 #ifndef arch_atomic_fetch_xor
880 static __always_inline int
881 arch_atomic_fetch_xor(int i, atomic_t *v)
884 __atomic_pre_full_fence();
885 ret = arch_atomic_fetch_xor_relaxed(i, v);
886 __atomic_post_full_fence();
889 #define arch_atomic_fetch_xor arch_atomic_fetch_xor
892 #endif /* arch_atomic_fetch_xor_relaxed */
894 #ifndef arch_atomic_xchg_relaxed
895 #define arch_atomic_xchg_acquire arch_atomic_xchg
896 #define arch_atomic_xchg_release arch_atomic_xchg
897 #define arch_atomic_xchg_relaxed arch_atomic_xchg
898 #else /* arch_atomic_xchg_relaxed */
900 #ifndef arch_atomic_xchg_acquire
901 static __always_inline int
902 arch_atomic_xchg_acquire(atomic_t *v, int i)
904 int ret = arch_atomic_xchg_relaxed(v, i);
905 __atomic_acquire_fence();
908 #define arch_atomic_xchg_acquire arch_atomic_xchg_acquire
911 #ifndef arch_atomic_xchg_release
912 static __always_inline int
913 arch_atomic_xchg_release(atomic_t *v, int i)
915 __atomic_release_fence();
916 return arch_atomic_xchg_relaxed(v, i);
918 #define arch_atomic_xchg_release arch_atomic_xchg_release
921 #ifndef arch_atomic_xchg
922 static __always_inline int
923 arch_atomic_xchg(atomic_t *v, int i)
926 __atomic_pre_full_fence();
927 ret = arch_atomic_xchg_relaxed(v, i);
928 __atomic_post_full_fence();
931 #define arch_atomic_xchg arch_atomic_xchg
934 #endif /* arch_atomic_xchg_relaxed */
936 #ifndef arch_atomic_cmpxchg_relaxed
937 #define arch_atomic_cmpxchg_acquire arch_atomic_cmpxchg
938 #define arch_atomic_cmpxchg_release arch_atomic_cmpxchg
939 #define arch_atomic_cmpxchg_relaxed arch_atomic_cmpxchg
940 #else /* arch_atomic_cmpxchg_relaxed */
942 #ifndef arch_atomic_cmpxchg_acquire
943 static __always_inline int
944 arch_atomic_cmpxchg_acquire(atomic_t *v, int old, int new)
946 int ret = arch_atomic_cmpxchg_relaxed(v, old, new);
947 __atomic_acquire_fence();
950 #define arch_atomic_cmpxchg_acquire arch_atomic_cmpxchg_acquire
953 #ifndef arch_atomic_cmpxchg_release
954 static __always_inline int
955 arch_atomic_cmpxchg_release(atomic_t *v, int old, int new)
957 __atomic_release_fence();
958 return arch_atomic_cmpxchg_relaxed(v, old, new);
960 #define arch_atomic_cmpxchg_release arch_atomic_cmpxchg_release
963 #ifndef arch_atomic_cmpxchg
964 static __always_inline int
965 arch_atomic_cmpxchg(atomic_t *v, int old, int new)
968 __atomic_pre_full_fence();
969 ret = arch_atomic_cmpxchg_relaxed(v, old, new);
970 __atomic_post_full_fence();
973 #define arch_atomic_cmpxchg arch_atomic_cmpxchg
976 #endif /* arch_atomic_cmpxchg_relaxed */
978 #ifndef arch_atomic_try_cmpxchg_relaxed
979 #ifdef arch_atomic_try_cmpxchg
980 #define arch_atomic_try_cmpxchg_acquire arch_atomic_try_cmpxchg
981 #define arch_atomic_try_cmpxchg_release arch_atomic_try_cmpxchg
982 #define arch_atomic_try_cmpxchg_relaxed arch_atomic_try_cmpxchg
983 #endif /* arch_atomic_try_cmpxchg */
985 #ifndef arch_atomic_try_cmpxchg
986 static __always_inline bool
987 arch_atomic_try_cmpxchg(atomic_t *v, int *old, int new)
990 r = arch_atomic_cmpxchg(v, o, new);
991 if (unlikely(r != o))
993 return likely(r == o);
995 #define arch_atomic_try_cmpxchg arch_atomic_try_cmpxchg
998 #ifndef arch_atomic_try_cmpxchg_acquire
999 static __always_inline bool
1000 arch_atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new)
1003 r = arch_atomic_cmpxchg_acquire(v, o, new);
1004 if (unlikely(r != o))
1006 return likely(r == o);
1008 #define arch_atomic_try_cmpxchg_acquire arch_atomic_try_cmpxchg_acquire
1011 #ifndef arch_atomic_try_cmpxchg_release
1012 static __always_inline bool
1013 arch_atomic_try_cmpxchg_release(atomic_t *v, int *old, int new)
1016 r = arch_atomic_cmpxchg_release(v, o, new);
1017 if (unlikely(r != o))
1019 return likely(r == o);
1021 #define arch_atomic_try_cmpxchg_release arch_atomic_try_cmpxchg_release
1024 #ifndef arch_atomic_try_cmpxchg_relaxed
1025 static __always_inline bool
1026 arch_atomic_try_cmpxchg_relaxed(atomic_t *v, int *old, int new)
1029 r = arch_atomic_cmpxchg_relaxed(v, o, new);
1030 if (unlikely(r != o))
1032 return likely(r == o);
1034 #define arch_atomic_try_cmpxchg_relaxed arch_atomic_try_cmpxchg_relaxed
1037 #else /* arch_atomic_try_cmpxchg_relaxed */
1039 #ifndef arch_atomic_try_cmpxchg_acquire
1040 static __always_inline bool
1041 arch_atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new)
1043 bool ret = arch_atomic_try_cmpxchg_relaxed(v, old, new);
1044 __atomic_acquire_fence();
1047 #define arch_atomic_try_cmpxchg_acquire arch_atomic_try_cmpxchg_acquire
1050 #ifndef arch_atomic_try_cmpxchg_release
1051 static __always_inline bool
1052 arch_atomic_try_cmpxchg_release(atomic_t *v, int *old, int new)
1054 __atomic_release_fence();
1055 return arch_atomic_try_cmpxchg_relaxed(v, old, new);
1057 #define arch_atomic_try_cmpxchg_release arch_atomic_try_cmpxchg_release
1060 #ifndef arch_atomic_try_cmpxchg
1061 static __always_inline bool
1062 arch_atomic_try_cmpxchg(atomic_t *v, int *old, int new)
1065 __atomic_pre_full_fence();
1066 ret = arch_atomic_try_cmpxchg_relaxed(v, old, new);
1067 __atomic_post_full_fence();
1070 #define arch_atomic_try_cmpxchg arch_atomic_try_cmpxchg
1073 #endif /* arch_atomic_try_cmpxchg_relaxed */
1075 #ifndef arch_atomic_sub_and_test
1077 * arch_atomic_sub_and_test - subtract value from variable and test result
1078 * @i: integer value to subtract
1079 * @v: pointer of type atomic_t
1081 * Atomically subtracts @i from @v and returns
1082 * true if the result is zero, or false for all
1085 static __always_inline bool
1086 arch_atomic_sub_and_test(int i, atomic_t *v)
1088 return arch_atomic_sub_return(i, v) == 0;
1090 #define arch_atomic_sub_and_test arch_atomic_sub_and_test
1093 #ifndef arch_atomic_dec_and_test
1095 * arch_atomic_dec_and_test - decrement and test
1096 * @v: pointer of type atomic_t
1098 * Atomically decrements @v by 1 and
1099 * returns true if the result is 0, or false for all other
1102 static __always_inline bool
1103 arch_atomic_dec_and_test(atomic_t *v)
1105 return arch_atomic_dec_return(v) == 0;
1107 #define arch_atomic_dec_and_test arch_atomic_dec_and_test
1110 #ifndef arch_atomic_inc_and_test
1112 * arch_atomic_inc_and_test - increment and test
1113 * @v: pointer of type atomic_t
1115 * Atomically increments @v by 1
1116 * and returns true if the result is zero, or false for all
1119 static __always_inline bool
1120 arch_atomic_inc_and_test(atomic_t *v)
1122 return arch_atomic_inc_return(v) == 0;
1124 #define arch_atomic_inc_and_test arch_atomic_inc_and_test
1127 #ifndef arch_atomic_add_negative
1129 * arch_atomic_add_negative - add and test if negative
1130 * @i: integer value to add
1131 * @v: pointer of type atomic_t
1133 * Atomically adds @i to @v and returns true
1134 * if the result is negative, or false when
1135 * result is greater than or equal to zero.
1137 static __always_inline bool
1138 arch_atomic_add_negative(int i, atomic_t *v)
1140 return arch_atomic_add_return(i, v) < 0;
1142 #define arch_atomic_add_negative arch_atomic_add_negative
1145 #ifndef arch_atomic_fetch_add_unless
1147 * arch_atomic_fetch_add_unless - add unless the number is already a given value
1148 * @v: pointer of type atomic_t
1149 * @a: the amount to add to v...
1150 * @u: ...unless v is equal to u.
1152 * Atomically adds @a to @v, so long as @v was not already @u.
1153 * Returns original value of @v
1155 static __always_inline int
1156 arch_atomic_fetch_add_unless(atomic_t *v, int a, int u)
1158 int c = arch_atomic_read(v);
1161 if (unlikely(c == u))
1163 } while (!arch_atomic_try_cmpxchg(v, &c, c + a));
1167 #define arch_atomic_fetch_add_unless arch_atomic_fetch_add_unless
1170 #ifndef arch_atomic_add_unless
1172 * arch_atomic_add_unless - add unless the number is already a given value
1173 * @v: pointer of type atomic_t
1174 * @a: the amount to add to v...
1175 * @u: ...unless v is equal to u.
1177 * Atomically adds @a to @v, if @v was not already @u.
1178 * Returns true if the addition was done.
1180 static __always_inline bool
1181 arch_atomic_add_unless(atomic_t *v, int a, int u)
1183 return arch_atomic_fetch_add_unless(v, a, u) != u;
1185 #define arch_atomic_add_unless arch_atomic_add_unless
1188 #ifndef arch_atomic_inc_not_zero
1190 * arch_atomic_inc_not_zero - increment unless the number is zero
1191 * @v: pointer of type atomic_t
1193 * Atomically increments @v by 1, if @v is non-zero.
1194 * Returns true if the increment was done.
1196 static __always_inline bool
1197 arch_atomic_inc_not_zero(atomic_t *v)
1199 return arch_atomic_add_unless(v, 1, 0);
1201 #define arch_atomic_inc_not_zero arch_atomic_inc_not_zero
1204 #ifndef arch_atomic_inc_unless_negative
1205 static __always_inline bool
1206 arch_atomic_inc_unless_negative(atomic_t *v)
1208 int c = arch_atomic_read(v);
1211 if (unlikely(c < 0))
1213 } while (!arch_atomic_try_cmpxchg(v, &c, c + 1));
1217 #define arch_atomic_inc_unless_negative arch_atomic_inc_unless_negative
1220 #ifndef arch_atomic_dec_unless_positive
1221 static __always_inline bool
1222 arch_atomic_dec_unless_positive(atomic_t *v)
1224 int c = arch_atomic_read(v);
1227 if (unlikely(c > 0))
1229 } while (!arch_atomic_try_cmpxchg(v, &c, c - 1));
1233 #define arch_atomic_dec_unless_positive arch_atomic_dec_unless_positive
1236 #ifndef arch_atomic_dec_if_positive
1237 static __always_inline int
1238 arch_atomic_dec_if_positive(atomic_t *v)
1240 int dec, c = arch_atomic_read(v);
1244 if (unlikely(dec < 0))
1246 } while (!arch_atomic_try_cmpxchg(v, &c, dec));
1250 #define arch_atomic_dec_if_positive arch_atomic_dec_if_positive
1253 #ifdef CONFIG_GENERIC_ATOMIC64
1254 #include <asm-generic/atomic64.h>
1257 #ifndef arch_atomic64_read_acquire
1258 static __always_inline s64
1259 arch_atomic64_read_acquire(const atomic64_t *v)
1261 return smp_load_acquire(&(v)->counter);
1263 #define arch_atomic64_read_acquire arch_atomic64_read_acquire
1266 #ifndef arch_atomic64_set_release
1267 static __always_inline void
1268 arch_atomic64_set_release(atomic64_t *v, s64 i)
1270 smp_store_release(&(v)->counter, i);
1272 #define arch_atomic64_set_release arch_atomic64_set_release
1275 #ifndef arch_atomic64_add_return_relaxed
1276 #define arch_atomic64_add_return_acquire arch_atomic64_add_return
1277 #define arch_atomic64_add_return_release arch_atomic64_add_return
1278 #define arch_atomic64_add_return_relaxed arch_atomic64_add_return
1279 #else /* arch_atomic64_add_return_relaxed */
1281 #ifndef arch_atomic64_add_return_acquire
1282 static __always_inline s64
1283 arch_atomic64_add_return_acquire(s64 i, atomic64_t *v)
1285 s64 ret = arch_atomic64_add_return_relaxed(i, v);
1286 __atomic_acquire_fence();
1289 #define arch_atomic64_add_return_acquire arch_atomic64_add_return_acquire
1292 #ifndef arch_atomic64_add_return_release
1293 static __always_inline s64
1294 arch_atomic64_add_return_release(s64 i, atomic64_t *v)
1296 __atomic_release_fence();
1297 return arch_atomic64_add_return_relaxed(i, v);
1299 #define arch_atomic64_add_return_release arch_atomic64_add_return_release
1302 #ifndef arch_atomic64_add_return
1303 static __always_inline s64
1304 arch_atomic64_add_return(s64 i, atomic64_t *v)
1307 __atomic_pre_full_fence();
1308 ret = arch_atomic64_add_return_relaxed(i, v);
1309 __atomic_post_full_fence();
1312 #define arch_atomic64_add_return arch_atomic64_add_return
1315 #endif /* arch_atomic64_add_return_relaxed */
1317 #ifndef arch_atomic64_fetch_add_relaxed
1318 #define arch_atomic64_fetch_add_acquire arch_atomic64_fetch_add
1319 #define arch_atomic64_fetch_add_release arch_atomic64_fetch_add
1320 #define arch_atomic64_fetch_add_relaxed arch_atomic64_fetch_add
1321 #else /* arch_atomic64_fetch_add_relaxed */
1323 #ifndef arch_atomic64_fetch_add_acquire
1324 static __always_inline s64
1325 arch_atomic64_fetch_add_acquire(s64 i, atomic64_t *v)
1327 s64 ret = arch_atomic64_fetch_add_relaxed(i, v);
1328 __atomic_acquire_fence();
1331 #define arch_atomic64_fetch_add_acquire arch_atomic64_fetch_add_acquire
1334 #ifndef arch_atomic64_fetch_add_release
1335 static __always_inline s64
1336 arch_atomic64_fetch_add_release(s64 i, atomic64_t *v)
1338 __atomic_release_fence();
1339 return arch_atomic64_fetch_add_relaxed(i, v);
1341 #define arch_atomic64_fetch_add_release arch_atomic64_fetch_add_release
1344 #ifndef arch_atomic64_fetch_add
1345 static __always_inline s64
1346 arch_atomic64_fetch_add(s64 i, atomic64_t *v)
1349 __atomic_pre_full_fence();
1350 ret = arch_atomic64_fetch_add_relaxed(i, v);
1351 __atomic_post_full_fence();
1354 #define arch_atomic64_fetch_add arch_atomic64_fetch_add
1357 #endif /* arch_atomic64_fetch_add_relaxed */
1359 #ifndef arch_atomic64_sub_return_relaxed
1360 #define arch_atomic64_sub_return_acquire arch_atomic64_sub_return
1361 #define arch_atomic64_sub_return_release arch_atomic64_sub_return
1362 #define arch_atomic64_sub_return_relaxed arch_atomic64_sub_return
1363 #else /* arch_atomic64_sub_return_relaxed */
1365 #ifndef arch_atomic64_sub_return_acquire
1366 static __always_inline s64
1367 arch_atomic64_sub_return_acquire(s64 i, atomic64_t *v)
1369 s64 ret = arch_atomic64_sub_return_relaxed(i, v);
1370 __atomic_acquire_fence();
1373 #define arch_atomic64_sub_return_acquire arch_atomic64_sub_return_acquire
1376 #ifndef arch_atomic64_sub_return_release
1377 static __always_inline s64
1378 arch_atomic64_sub_return_release(s64 i, atomic64_t *v)
1380 __atomic_release_fence();
1381 return arch_atomic64_sub_return_relaxed(i, v);
1383 #define arch_atomic64_sub_return_release arch_atomic64_sub_return_release
1386 #ifndef arch_atomic64_sub_return
1387 static __always_inline s64
1388 arch_atomic64_sub_return(s64 i, atomic64_t *v)
1391 __atomic_pre_full_fence();
1392 ret = arch_atomic64_sub_return_relaxed(i, v);
1393 __atomic_post_full_fence();
1396 #define arch_atomic64_sub_return arch_atomic64_sub_return
1399 #endif /* arch_atomic64_sub_return_relaxed */
1401 #ifndef arch_atomic64_fetch_sub_relaxed
1402 #define arch_atomic64_fetch_sub_acquire arch_atomic64_fetch_sub
1403 #define arch_atomic64_fetch_sub_release arch_atomic64_fetch_sub
1404 #define arch_atomic64_fetch_sub_relaxed arch_atomic64_fetch_sub
1405 #else /* arch_atomic64_fetch_sub_relaxed */
1407 #ifndef arch_atomic64_fetch_sub_acquire
1408 static __always_inline s64
1409 arch_atomic64_fetch_sub_acquire(s64 i, atomic64_t *v)
1411 s64 ret = arch_atomic64_fetch_sub_relaxed(i, v);
1412 __atomic_acquire_fence();
1415 #define arch_atomic64_fetch_sub_acquire arch_atomic64_fetch_sub_acquire
1418 #ifndef arch_atomic64_fetch_sub_release
1419 static __always_inline s64
1420 arch_atomic64_fetch_sub_release(s64 i, atomic64_t *v)
1422 __atomic_release_fence();
1423 return arch_atomic64_fetch_sub_relaxed(i, v);
1425 #define arch_atomic64_fetch_sub_release arch_atomic64_fetch_sub_release
1428 #ifndef arch_atomic64_fetch_sub
1429 static __always_inline s64
1430 arch_atomic64_fetch_sub(s64 i, atomic64_t *v)
1433 __atomic_pre_full_fence();
1434 ret = arch_atomic64_fetch_sub_relaxed(i, v);
1435 __atomic_post_full_fence();
1438 #define arch_atomic64_fetch_sub arch_atomic64_fetch_sub
1441 #endif /* arch_atomic64_fetch_sub_relaxed */
1443 #ifndef arch_atomic64_inc
1444 static __always_inline void
1445 arch_atomic64_inc(atomic64_t *v)
1447 arch_atomic64_add(1, v);
1449 #define arch_atomic64_inc arch_atomic64_inc
1452 #ifndef arch_atomic64_inc_return_relaxed
1453 #ifdef arch_atomic64_inc_return
1454 #define arch_atomic64_inc_return_acquire arch_atomic64_inc_return
1455 #define arch_atomic64_inc_return_release arch_atomic64_inc_return
1456 #define arch_atomic64_inc_return_relaxed arch_atomic64_inc_return
1457 #endif /* arch_atomic64_inc_return */
1459 #ifndef arch_atomic64_inc_return
1460 static __always_inline s64
1461 arch_atomic64_inc_return(atomic64_t *v)
1463 return arch_atomic64_add_return(1, v);
1465 #define arch_atomic64_inc_return arch_atomic64_inc_return
1468 #ifndef arch_atomic64_inc_return_acquire
1469 static __always_inline s64
1470 arch_atomic64_inc_return_acquire(atomic64_t *v)
1472 return arch_atomic64_add_return_acquire(1, v);
1474 #define arch_atomic64_inc_return_acquire arch_atomic64_inc_return_acquire
1477 #ifndef arch_atomic64_inc_return_release
1478 static __always_inline s64
1479 arch_atomic64_inc_return_release(atomic64_t *v)
1481 return arch_atomic64_add_return_release(1, v);
1483 #define arch_atomic64_inc_return_release arch_atomic64_inc_return_release
1486 #ifndef arch_atomic64_inc_return_relaxed
1487 static __always_inline s64
1488 arch_atomic64_inc_return_relaxed(atomic64_t *v)
1490 return arch_atomic64_add_return_relaxed(1, v);
1492 #define arch_atomic64_inc_return_relaxed arch_atomic64_inc_return_relaxed
1495 #else /* arch_atomic64_inc_return_relaxed */
1497 #ifndef arch_atomic64_inc_return_acquire
1498 static __always_inline s64
1499 arch_atomic64_inc_return_acquire(atomic64_t *v)
1501 s64 ret = arch_atomic64_inc_return_relaxed(v);
1502 __atomic_acquire_fence();
1505 #define arch_atomic64_inc_return_acquire arch_atomic64_inc_return_acquire
1508 #ifndef arch_atomic64_inc_return_release
1509 static __always_inline s64
1510 arch_atomic64_inc_return_release(atomic64_t *v)
1512 __atomic_release_fence();
1513 return arch_atomic64_inc_return_relaxed(v);
1515 #define arch_atomic64_inc_return_release arch_atomic64_inc_return_release
1518 #ifndef arch_atomic64_inc_return
1519 static __always_inline s64
1520 arch_atomic64_inc_return(atomic64_t *v)
1523 __atomic_pre_full_fence();
1524 ret = arch_atomic64_inc_return_relaxed(v);
1525 __atomic_post_full_fence();
1528 #define arch_atomic64_inc_return arch_atomic64_inc_return
1531 #endif /* arch_atomic64_inc_return_relaxed */
1533 #ifndef arch_atomic64_fetch_inc_relaxed
1534 #ifdef arch_atomic64_fetch_inc
1535 #define arch_atomic64_fetch_inc_acquire arch_atomic64_fetch_inc
1536 #define arch_atomic64_fetch_inc_release arch_atomic64_fetch_inc
1537 #define arch_atomic64_fetch_inc_relaxed arch_atomic64_fetch_inc
1538 #endif /* arch_atomic64_fetch_inc */
1540 #ifndef arch_atomic64_fetch_inc
1541 static __always_inline s64
1542 arch_atomic64_fetch_inc(atomic64_t *v)
1544 return arch_atomic64_fetch_add(1, v);
1546 #define arch_atomic64_fetch_inc arch_atomic64_fetch_inc
1549 #ifndef arch_atomic64_fetch_inc_acquire
1550 static __always_inline s64
1551 arch_atomic64_fetch_inc_acquire(atomic64_t *v)
1553 return arch_atomic64_fetch_add_acquire(1, v);
1555 #define arch_atomic64_fetch_inc_acquire arch_atomic64_fetch_inc_acquire
1558 #ifndef arch_atomic64_fetch_inc_release
1559 static __always_inline s64
1560 arch_atomic64_fetch_inc_release(atomic64_t *v)
1562 return arch_atomic64_fetch_add_release(1, v);
1564 #define arch_atomic64_fetch_inc_release arch_atomic64_fetch_inc_release
1567 #ifndef arch_atomic64_fetch_inc_relaxed
1568 static __always_inline s64
1569 arch_atomic64_fetch_inc_relaxed(atomic64_t *v)
1571 return arch_atomic64_fetch_add_relaxed(1, v);
1573 #define arch_atomic64_fetch_inc_relaxed arch_atomic64_fetch_inc_relaxed
1576 #else /* arch_atomic64_fetch_inc_relaxed */
1578 #ifndef arch_atomic64_fetch_inc_acquire
1579 static __always_inline s64
1580 arch_atomic64_fetch_inc_acquire(atomic64_t *v)
1582 s64 ret = arch_atomic64_fetch_inc_relaxed(v);
1583 __atomic_acquire_fence();
1586 #define arch_atomic64_fetch_inc_acquire arch_atomic64_fetch_inc_acquire
1589 #ifndef arch_atomic64_fetch_inc_release
1590 static __always_inline s64
1591 arch_atomic64_fetch_inc_release(atomic64_t *v)
1593 __atomic_release_fence();
1594 return arch_atomic64_fetch_inc_relaxed(v);
1596 #define arch_atomic64_fetch_inc_release arch_atomic64_fetch_inc_release
1599 #ifndef arch_atomic64_fetch_inc
1600 static __always_inline s64
1601 arch_atomic64_fetch_inc(atomic64_t *v)
1604 __atomic_pre_full_fence();
1605 ret = arch_atomic64_fetch_inc_relaxed(v);
1606 __atomic_post_full_fence();
1609 #define arch_atomic64_fetch_inc arch_atomic64_fetch_inc
1612 #endif /* arch_atomic64_fetch_inc_relaxed */
1614 #ifndef arch_atomic64_dec
1615 static __always_inline void
1616 arch_atomic64_dec(atomic64_t *v)
1618 arch_atomic64_sub(1, v);
1620 #define arch_atomic64_dec arch_atomic64_dec
1623 #ifndef arch_atomic64_dec_return_relaxed
1624 #ifdef arch_atomic64_dec_return
1625 #define arch_atomic64_dec_return_acquire arch_atomic64_dec_return
1626 #define arch_atomic64_dec_return_release arch_atomic64_dec_return
1627 #define arch_atomic64_dec_return_relaxed arch_atomic64_dec_return
1628 #endif /* arch_atomic64_dec_return */
1630 #ifndef arch_atomic64_dec_return
1631 static __always_inline s64
1632 arch_atomic64_dec_return(atomic64_t *v)
1634 return arch_atomic64_sub_return(1, v);
1636 #define arch_atomic64_dec_return arch_atomic64_dec_return
1639 #ifndef arch_atomic64_dec_return_acquire
1640 static __always_inline s64
1641 arch_atomic64_dec_return_acquire(atomic64_t *v)
1643 return arch_atomic64_sub_return_acquire(1, v);
1645 #define arch_atomic64_dec_return_acquire arch_atomic64_dec_return_acquire
1648 #ifndef arch_atomic64_dec_return_release
1649 static __always_inline s64
1650 arch_atomic64_dec_return_release(atomic64_t *v)
1652 return arch_atomic64_sub_return_release(1, v);
1654 #define arch_atomic64_dec_return_release arch_atomic64_dec_return_release
1657 #ifndef arch_atomic64_dec_return_relaxed
1658 static __always_inline s64
1659 arch_atomic64_dec_return_relaxed(atomic64_t *v)
1661 return arch_atomic64_sub_return_relaxed(1, v);
1663 #define arch_atomic64_dec_return_relaxed arch_atomic64_dec_return_relaxed
1666 #else /* arch_atomic64_dec_return_relaxed */
1668 #ifndef arch_atomic64_dec_return_acquire
1669 static __always_inline s64
1670 arch_atomic64_dec_return_acquire(atomic64_t *v)
1672 s64 ret = arch_atomic64_dec_return_relaxed(v);
1673 __atomic_acquire_fence();
1676 #define arch_atomic64_dec_return_acquire arch_atomic64_dec_return_acquire
1679 #ifndef arch_atomic64_dec_return_release
1680 static __always_inline s64
1681 arch_atomic64_dec_return_release(atomic64_t *v)
1683 __atomic_release_fence();
1684 return arch_atomic64_dec_return_relaxed(v);
1686 #define arch_atomic64_dec_return_release arch_atomic64_dec_return_release
1689 #ifndef arch_atomic64_dec_return
1690 static __always_inline s64
1691 arch_atomic64_dec_return(atomic64_t *v)
1694 __atomic_pre_full_fence();
1695 ret = arch_atomic64_dec_return_relaxed(v);
1696 __atomic_post_full_fence();
1699 #define arch_atomic64_dec_return arch_atomic64_dec_return
1702 #endif /* arch_atomic64_dec_return_relaxed */
1704 #ifndef arch_atomic64_fetch_dec_relaxed
1705 #ifdef arch_atomic64_fetch_dec
1706 #define arch_atomic64_fetch_dec_acquire arch_atomic64_fetch_dec
1707 #define arch_atomic64_fetch_dec_release arch_atomic64_fetch_dec
1708 #define arch_atomic64_fetch_dec_relaxed arch_atomic64_fetch_dec
1709 #endif /* arch_atomic64_fetch_dec */
1711 #ifndef arch_atomic64_fetch_dec
1712 static __always_inline s64
1713 arch_atomic64_fetch_dec(atomic64_t *v)
1715 return arch_atomic64_fetch_sub(1, v);
1717 #define arch_atomic64_fetch_dec arch_atomic64_fetch_dec
1720 #ifndef arch_atomic64_fetch_dec_acquire
1721 static __always_inline s64
1722 arch_atomic64_fetch_dec_acquire(atomic64_t *v)
1724 return arch_atomic64_fetch_sub_acquire(1, v);
1726 #define arch_atomic64_fetch_dec_acquire arch_atomic64_fetch_dec_acquire
1729 #ifndef arch_atomic64_fetch_dec_release
1730 static __always_inline s64
1731 arch_atomic64_fetch_dec_release(atomic64_t *v)
1733 return arch_atomic64_fetch_sub_release(1, v);
1735 #define arch_atomic64_fetch_dec_release arch_atomic64_fetch_dec_release
1738 #ifndef arch_atomic64_fetch_dec_relaxed
1739 static __always_inline s64
1740 arch_atomic64_fetch_dec_relaxed(atomic64_t *v)
1742 return arch_atomic64_fetch_sub_relaxed(1, v);
1744 #define arch_atomic64_fetch_dec_relaxed arch_atomic64_fetch_dec_relaxed
1747 #else /* arch_atomic64_fetch_dec_relaxed */
1749 #ifndef arch_atomic64_fetch_dec_acquire
1750 static __always_inline s64
1751 arch_atomic64_fetch_dec_acquire(atomic64_t *v)
1753 s64 ret = arch_atomic64_fetch_dec_relaxed(v);
1754 __atomic_acquire_fence();
1757 #define arch_atomic64_fetch_dec_acquire arch_atomic64_fetch_dec_acquire
1760 #ifndef arch_atomic64_fetch_dec_release
1761 static __always_inline s64
1762 arch_atomic64_fetch_dec_release(atomic64_t *v)
1764 __atomic_release_fence();
1765 return arch_atomic64_fetch_dec_relaxed(v);
1767 #define arch_atomic64_fetch_dec_release arch_atomic64_fetch_dec_release
1770 #ifndef arch_atomic64_fetch_dec
1771 static __always_inline s64
1772 arch_atomic64_fetch_dec(atomic64_t *v)
1775 __atomic_pre_full_fence();
1776 ret = arch_atomic64_fetch_dec_relaxed(v);
1777 __atomic_post_full_fence();
1780 #define arch_atomic64_fetch_dec arch_atomic64_fetch_dec
1783 #endif /* arch_atomic64_fetch_dec_relaxed */
1785 #ifndef arch_atomic64_fetch_and_relaxed
1786 #define arch_atomic64_fetch_and_acquire arch_atomic64_fetch_and
1787 #define arch_atomic64_fetch_and_release arch_atomic64_fetch_and
1788 #define arch_atomic64_fetch_and_relaxed arch_atomic64_fetch_and
1789 #else /* arch_atomic64_fetch_and_relaxed */
1791 #ifndef arch_atomic64_fetch_and_acquire
1792 static __always_inline s64
1793 arch_atomic64_fetch_and_acquire(s64 i, atomic64_t *v)
1795 s64 ret = arch_atomic64_fetch_and_relaxed(i, v);
1796 __atomic_acquire_fence();
1799 #define arch_atomic64_fetch_and_acquire arch_atomic64_fetch_and_acquire
1802 #ifndef arch_atomic64_fetch_and_release
1803 static __always_inline s64
1804 arch_atomic64_fetch_and_release(s64 i, atomic64_t *v)
1806 __atomic_release_fence();
1807 return arch_atomic64_fetch_and_relaxed(i, v);
1809 #define arch_atomic64_fetch_and_release arch_atomic64_fetch_and_release
1812 #ifndef arch_atomic64_fetch_and
1813 static __always_inline s64
1814 arch_atomic64_fetch_and(s64 i, atomic64_t *v)
1817 __atomic_pre_full_fence();
1818 ret = arch_atomic64_fetch_and_relaxed(i, v);
1819 __atomic_post_full_fence();
1822 #define arch_atomic64_fetch_and arch_atomic64_fetch_and
1825 #endif /* arch_atomic64_fetch_and_relaxed */
1827 #ifndef arch_atomic64_andnot
1828 static __always_inline void
1829 arch_atomic64_andnot(s64 i, atomic64_t *v)
1831 arch_atomic64_and(~i, v);
1833 #define arch_atomic64_andnot arch_atomic64_andnot
1836 #ifndef arch_atomic64_fetch_andnot_relaxed
1837 #ifdef arch_atomic64_fetch_andnot
1838 #define arch_atomic64_fetch_andnot_acquire arch_atomic64_fetch_andnot
1839 #define arch_atomic64_fetch_andnot_release arch_atomic64_fetch_andnot
1840 #define arch_atomic64_fetch_andnot_relaxed arch_atomic64_fetch_andnot
1841 #endif /* arch_atomic64_fetch_andnot */
1843 #ifndef arch_atomic64_fetch_andnot
1844 static __always_inline s64
1845 arch_atomic64_fetch_andnot(s64 i, atomic64_t *v)
1847 return arch_atomic64_fetch_and(~i, v);
1849 #define arch_atomic64_fetch_andnot arch_atomic64_fetch_andnot
1852 #ifndef arch_atomic64_fetch_andnot_acquire
1853 static __always_inline s64
1854 arch_atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v)
1856 return arch_atomic64_fetch_and_acquire(~i, v);
1858 #define arch_atomic64_fetch_andnot_acquire arch_atomic64_fetch_andnot_acquire
1861 #ifndef arch_atomic64_fetch_andnot_release
1862 static __always_inline s64
1863 arch_atomic64_fetch_andnot_release(s64 i, atomic64_t *v)
1865 return arch_atomic64_fetch_and_release(~i, v);
1867 #define arch_atomic64_fetch_andnot_release arch_atomic64_fetch_andnot_release
1870 #ifndef arch_atomic64_fetch_andnot_relaxed
1871 static __always_inline s64
1872 arch_atomic64_fetch_andnot_relaxed(s64 i, atomic64_t *v)
1874 return arch_atomic64_fetch_and_relaxed(~i, v);
1876 #define arch_atomic64_fetch_andnot_relaxed arch_atomic64_fetch_andnot_relaxed
1879 #else /* arch_atomic64_fetch_andnot_relaxed */
1881 #ifndef arch_atomic64_fetch_andnot_acquire
1882 static __always_inline s64
1883 arch_atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v)
1885 s64 ret = arch_atomic64_fetch_andnot_relaxed(i, v);
1886 __atomic_acquire_fence();
1889 #define arch_atomic64_fetch_andnot_acquire arch_atomic64_fetch_andnot_acquire
1892 #ifndef arch_atomic64_fetch_andnot_release
1893 static __always_inline s64
1894 arch_atomic64_fetch_andnot_release(s64 i, atomic64_t *v)
1896 __atomic_release_fence();
1897 return arch_atomic64_fetch_andnot_relaxed(i, v);
1899 #define arch_atomic64_fetch_andnot_release arch_atomic64_fetch_andnot_release
1902 #ifndef arch_atomic64_fetch_andnot
1903 static __always_inline s64
1904 arch_atomic64_fetch_andnot(s64 i, atomic64_t *v)
1907 __atomic_pre_full_fence();
1908 ret = arch_atomic64_fetch_andnot_relaxed(i, v);
1909 __atomic_post_full_fence();
1912 #define arch_atomic64_fetch_andnot arch_atomic64_fetch_andnot
1915 #endif /* arch_atomic64_fetch_andnot_relaxed */
1917 #ifndef arch_atomic64_fetch_or_relaxed
1918 #define arch_atomic64_fetch_or_acquire arch_atomic64_fetch_or
1919 #define arch_atomic64_fetch_or_release arch_atomic64_fetch_or
1920 #define arch_atomic64_fetch_or_relaxed arch_atomic64_fetch_or
1921 #else /* arch_atomic64_fetch_or_relaxed */
1923 #ifndef arch_atomic64_fetch_or_acquire
1924 static __always_inline s64
1925 arch_atomic64_fetch_or_acquire(s64 i, atomic64_t *v)
1927 s64 ret = arch_atomic64_fetch_or_relaxed(i, v);
1928 __atomic_acquire_fence();
1931 #define arch_atomic64_fetch_or_acquire arch_atomic64_fetch_or_acquire
1934 #ifndef arch_atomic64_fetch_or_release
1935 static __always_inline s64
1936 arch_atomic64_fetch_or_release(s64 i, atomic64_t *v)
1938 __atomic_release_fence();
1939 return arch_atomic64_fetch_or_relaxed(i, v);
1941 #define arch_atomic64_fetch_or_release arch_atomic64_fetch_or_release
1944 #ifndef arch_atomic64_fetch_or
1945 static __always_inline s64
1946 arch_atomic64_fetch_or(s64 i, atomic64_t *v)
1949 __atomic_pre_full_fence();
1950 ret = arch_atomic64_fetch_or_relaxed(i, v);
1951 __atomic_post_full_fence();
1954 #define arch_atomic64_fetch_or arch_atomic64_fetch_or
1957 #endif /* arch_atomic64_fetch_or_relaxed */
1959 #ifndef arch_atomic64_fetch_xor_relaxed
1960 #define arch_atomic64_fetch_xor_acquire arch_atomic64_fetch_xor
1961 #define arch_atomic64_fetch_xor_release arch_atomic64_fetch_xor
1962 #define arch_atomic64_fetch_xor_relaxed arch_atomic64_fetch_xor
1963 #else /* arch_atomic64_fetch_xor_relaxed */
1965 #ifndef arch_atomic64_fetch_xor_acquire
1966 static __always_inline s64
1967 arch_atomic64_fetch_xor_acquire(s64 i, atomic64_t *v)
1969 s64 ret = arch_atomic64_fetch_xor_relaxed(i, v);
1970 __atomic_acquire_fence();
1973 #define arch_atomic64_fetch_xor_acquire arch_atomic64_fetch_xor_acquire
1976 #ifndef arch_atomic64_fetch_xor_release
1977 static __always_inline s64
1978 arch_atomic64_fetch_xor_release(s64 i, atomic64_t *v)
1980 __atomic_release_fence();
1981 return arch_atomic64_fetch_xor_relaxed(i, v);
1983 #define arch_atomic64_fetch_xor_release arch_atomic64_fetch_xor_release
1986 #ifndef arch_atomic64_fetch_xor
1987 static __always_inline s64
1988 arch_atomic64_fetch_xor(s64 i, atomic64_t *v)
1991 __atomic_pre_full_fence();
1992 ret = arch_atomic64_fetch_xor_relaxed(i, v);
1993 __atomic_post_full_fence();
1996 #define arch_atomic64_fetch_xor arch_atomic64_fetch_xor
1999 #endif /* arch_atomic64_fetch_xor_relaxed */
2001 #ifndef arch_atomic64_xchg_relaxed
2002 #define arch_atomic64_xchg_acquire arch_atomic64_xchg
2003 #define arch_atomic64_xchg_release arch_atomic64_xchg
2004 #define arch_atomic64_xchg_relaxed arch_atomic64_xchg
2005 #else /* arch_atomic64_xchg_relaxed */
2007 #ifndef arch_atomic64_xchg_acquire
2008 static __always_inline s64
2009 arch_atomic64_xchg_acquire(atomic64_t *v, s64 i)
2011 s64 ret = arch_atomic64_xchg_relaxed(v, i);
2012 __atomic_acquire_fence();
2015 #define arch_atomic64_xchg_acquire arch_atomic64_xchg_acquire
2018 #ifndef arch_atomic64_xchg_release
2019 static __always_inline s64
2020 arch_atomic64_xchg_release(atomic64_t *v, s64 i)
2022 __atomic_release_fence();
2023 return arch_atomic64_xchg_relaxed(v, i);
2025 #define arch_atomic64_xchg_release arch_atomic64_xchg_release
2028 #ifndef arch_atomic64_xchg
2029 static __always_inline s64
2030 arch_atomic64_xchg(atomic64_t *v, s64 i)
2033 __atomic_pre_full_fence();
2034 ret = arch_atomic64_xchg_relaxed(v, i);
2035 __atomic_post_full_fence();
2038 #define arch_atomic64_xchg arch_atomic64_xchg
2041 #endif /* arch_atomic64_xchg_relaxed */
2043 #ifndef arch_atomic64_cmpxchg_relaxed
2044 #define arch_atomic64_cmpxchg_acquire arch_atomic64_cmpxchg
2045 #define arch_atomic64_cmpxchg_release arch_atomic64_cmpxchg
2046 #define arch_atomic64_cmpxchg_relaxed arch_atomic64_cmpxchg
2047 #else /* arch_atomic64_cmpxchg_relaxed */
2049 #ifndef arch_atomic64_cmpxchg_acquire
2050 static __always_inline s64
2051 arch_atomic64_cmpxchg_acquire(atomic64_t *v, s64 old, s64 new)
2053 s64 ret = arch_atomic64_cmpxchg_relaxed(v, old, new);
2054 __atomic_acquire_fence();
2057 #define arch_atomic64_cmpxchg_acquire arch_atomic64_cmpxchg_acquire
2060 #ifndef arch_atomic64_cmpxchg_release
2061 static __always_inline s64
2062 arch_atomic64_cmpxchg_release(atomic64_t *v, s64 old, s64 new)
2064 __atomic_release_fence();
2065 return arch_atomic64_cmpxchg_relaxed(v, old, new);
2067 #define arch_atomic64_cmpxchg_release arch_atomic64_cmpxchg_release
2070 #ifndef arch_atomic64_cmpxchg
2071 static __always_inline s64
2072 arch_atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new)
2075 __atomic_pre_full_fence();
2076 ret = arch_atomic64_cmpxchg_relaxed(v, old, new);
2077 __atomic_post_full_fence();
2080 #define arch_atomic64_cmpxchg arch_atomic64_cmpxchg
2083 #endif /* arch_atomic64_cmpxchg_relaxed */
2085 #ifndef arch_atomic64_try_cmpxchg_relaxed
2086 #ifdef arch_atomic64_try_cmpxchg
2087 #define arch_atomic64_try_cmpxchg_acquire arch_atomic64_try_cmpxchg
2088 #define arch_atomic64_try_cmpxchg_release arch_atomic64_try_cmpxchg
2089 #define arch_atomic64_try_cmpxchg_relaxed arch_atomic64_try_cmpxchg
2090 #endif /* arch_atomic64_try_cmpxchg */
2092 #ifndef arch_atomic64_try_cmpxchg
2093 static __always_inline bool
2094 arch_atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
2097 r = arch_atomic64_cmpxchg(v, o, new);
2098 if (unlikely(r != o))
2100 return likely(r == o);
2102 #define arch_atomic64_try_cmpxchg arch_atomic64_try_cmpxchg
2105 #ifndef arch_atomic64_try_cmpxchg_acquire
2106 static __always_inline bool
2107 arch_atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new)
2110 r = arch_atomic64_cmpxchg_acquire(v, o, new);
2111 if (unlikely(r != o))
2113 return likely(r == o);
2115 #define arch_atomic64_try_cmpxchg_acquire arch_atomic64_try_cmpxchg_acquire
2118 #ifndef arch_atomic64_try_cmpxchg_release
2119 static __always_inline bool
2120 arch_atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new)
2123 r = arch_atomic64_cmpxchg_release(v, o, new);
2124 if (unlikely(r != o))
2126 return likely(r == o);
2128 #define arch_atomic64_try_cmpxchg_release arch_atomic64_try_cmpxchg_release
2131 #ifndef arch_atomic64_try_cmpxchg_relaxed
2132 static __always_inline bool
2133 arch_atomic64_try_cmpxchg_relaxed(atomic64_t *v, s64 *old, s64 new)
2136 r = arch_atomic64_cmpxchg_relaxed(v, o, new);
2137 if (unlikely(r != o))
2139 return likely(r == o);
2141 #define arch_atomic64_try_cmpxchg_relaxed arch_atomic64_try_cmpxchg_relaxed
2144 #else /* arch_atomic64_try_cmpxchg_relaxed */
2146 #ifndef arch_atomic64_try_cmpxchg_acquire
2147 static __always_inline bool
2148 arch_atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new)
2150 bool ret = arch_atomic64_try_cmpxchg_relaxed(v, old, new);
2151 __atomic_acquire_fence();
2154 #define arch_atomic64_try_cmpxchg_acquire arch_atomic64_try_cmpxchg_acquire
2157 #ifndef arch_atomic64_try_cmpxchg_release
2158 static __always_inline bool
2159 arch_atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new)
2161 __atomic_release_fence();
2162 return arch_atomic64_try_cmpxchg_relaxed(v, old, new);
2164 #define arch_atomic64_try_cmpxchg_release arch_atomic64_try_cmpxchg_release
2167 #ifndef arch_atomic64_try_cmpxchg
2168 static __always_inline bool
2169 arch_atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
2172 __atomic_pre_full_fence();
2173 ret = arch_atomic64_try_cmpxchg_relaxed(v, old, new);
2174 __atomic_post_full_fence();
2177 #define arch_atomic64_try_cmpxchg arch_atomic64_try_cmpxchg
2180 #endif /* arch_atomic64_try_cmpxchg_relaxed */
2182 #ifndef arch_atomic64_sub_and_test
2184 * arch_atomic64_sub_and_test - subtract value from variable and test result
2185 * @i: integer value to subtract
2186 * @v: pointer of type atomic64_t
2188 * Atomically subtracts @i from @v and returns
2189 * true if the result is zero, or false for all
2192 static __always_inline bool
2193 arch_atomic64_sub_and_test(s64 i, atomic64_t *v)
2195 return arch_atomic64_sub_return(i, v) == 0;
2197 #define arch_atomic64_sub_and_test arch_atomic64_sub_and_test
2200 #ifndef arch_atomic64_dec_and_test
2202 * arch_atomic64_dec_and_test - decrement and test
2203 * @v: pointer of type atomic64_t
2205 * Atomically decrements @v by 1 and
2206 * returns true if the result is 0, or false for all other
2209 static __always_inline bool
2210 arch_atomic64_dec_and_test(atomic64_t *v)
2212 return arch_atomic64_dec_return(v) == 0;
2214 #define arch_atomic64_dec_and_test arch_atomic64_dec_and_test
2217 #ifndef arch_atomic64_inc_and_test
2219 * arch_atomic64_inc_and_test - increment and test
2220 * @v: pointer of type atomic64_t
2222 * Atomically increments @v by 1
2223 * and returns true if the result is zero, or false for all
2226 static __always_inline bool
2227 arch_atomic64_inc_and_test(atomic64_t *v)
2229 return arch_atomic64_inc_return(v) == 0;
2231 #define arch_atomic64_inc_and_test arch_atomic64_inc_and_test
2234 #ifndef arch_atomic64_add_negative
2236 * arch_atomic64_add_negative - add and test if negative
2237 * @i: integer value to add
2238 * @v: pointer of type atomic64_t
2240 * Atomically adds @i to @v and returns true
2241 * if the result is negative, or false when
2242 * result is greater than or equal to zero.
2244 static __always_inline bool
2245 arch_atomic64_add_negative(s64 i, atomic64_t *v)
2247 return arch_atomic64_add_return(i, v) < 0;
2249 #define arch_atomic64_add_negative arch_atomic64_add_negative
2252 #ifndef arch_atomic64_fetch_add_unless
2254 * arch_atomic64_fetch_add_unless - add unless the number is already a given value
2255 * @v: pointer of type atomic64_t
2256 * @a: the amount to add to v...
2257 * @u: ...unless v is equal to u.
2259 * Atomically adds @a to @v, so long as @v was not already @u.
2260 * Returns original value of @v
2262 static __always_inline s64
2263 arch_atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u)
2265 s64 c = arch_atomic64_read(v);
2268 if (unlikely(c == u))
2270 } while (!arch_atomic64_try_cmpxchg(v, &c, c + a));
2274 #define arch_atomic64_fetch_add_unless arch_atomic64_fetch_add_unless
2277 #ifndef arch_atomic64_add_unless
2279 * arch_atomic64_add_unless - add unless the number is already a given value
2280 * @v: pointer of type atomic64_t
2281 * @a: the amount to add to v...
2282 * @u: ...unless v is equal to u.
2284 * Atomically adds @a to @v, if @v was not already @u.
2285 * Returns true if the addition was done.
2287 static __always_inline bool
2288 arch_atomic64_add_unless(atomic64_t *v, s64 a, s64 u)
2290 return arch_atomic64_fetch_add_unless(v, a, u) != u;
2292 #define arch_atomic64_add_unless arch_atomic64_add_unless
2295 #ifndef arch_atomic64_inc_not_zero
2297 * arch_atomic64_inc_not_zero - increment unless the number is zero
2298 * @v: pointer of type atomic64_t
2300 * Atomically increments @v by 1, if @v is non-zero.
2301 * Returns true if the increment was done.
2303 static __always_inline bool
2304 arch_atomic64_inc_not_zero(atomic64_t *v)
2306 return arch_atomic64_add_unless(v, 1, 0);
2308 #define arch_atomic64_inc_not_zero arch_atomic64_inc_not_zero
2311 #ifndef arch_atomic64_inc_unless_negative
2312 static __always_inline bool
2313 arch_atomic64_inc_unless_negative(atomic64_t *v)
2315 s64 c = arch_atomic64_read(v);
2318 if (unlikely(c < 0))
2320 } while (!arch_atomic64_try_cmpxchg(v, &c, c + 1));
2324 #define arch_atomic64_inc_unless_negative arch_atomic64_inc_unless_negative
2327 #ifndef arch_atomic64_dec_unless_positive
2328 static __always_inline bool
2329 arch_atomic64_dec_unless_positive(atomic64_t *v)
2331 s64 c = arch_atomic64_read(v);
2334 if (unlikely(c > 0))
2336 } while (!arch_atomic64_try_cmpxchg(v, &c, c - 1));
2340 #define arch_atomic64_dec_unless_positive arch_atomic64_dec_unless_positive
2343 #ifndef arch_atomic64_dec_if_positive
2344 static __always_inline s64
2345 arch_atomic64_dec_if_positive(atomic64_t *v)
2347 s64 dec, c = arch_atomic64_read(v);
2351 if (unlikely(dec < 0))
2353 } while (!arch_atomic64_try_cmpxchg(v, &c, dec));
2357 #define arch_atomic64_dec_if_positive arch_atomic64_dec_if_positive
2360 #endif /* _LINUX_ATOMIC_FALLBACK_H */
2361 // cca554917d7ea73d5e3e7397dd70c484cad9b2c4