1 // SPDX-License-Identifier: GPL-2.0
3 // Generated by scripts/atomic/gen-atomic-fallback.sh
4 // DO NOT MODIFY THIS FILE DIRECTLY
6 #ifndef _LINUX_ATOMIC_FALLBACK_H
7 #define _LINUX_ATOMIC_FALLBACK_H
9 #include <linux/compiler.h>
11 #ifndef arch_xchg_relaxed
12 #define arch_xchg_relaxed arch_xchg
13 #define arch_xchg_acquire arch_xchg
14 #define arch_xchg_release arch_xchg
15 #else /* arch_xchg_relaxed */
17 #ifndef arch_xchg_acquire
18 #define arch_xchg_acquire(...) \
19 __atomic_op_acquire(arch_xchg, __VA_ARGS__)
22 #ifndef arch_xchg_release
23 #define arch_xchg_release(...) \
24 __atomic_op_release(arch_xchg, __VA_ARGS__)
28 #define arch_xchg(...) \
29 __atomic_op_fence(arch_xchg, __VA_ARGS__)
32 #endif /* arch_xchg_relaxed */
34 #ifndef arch_cmpxchg_relaxed
35 #define arch_cmpxchg_relaxed arch_cmpxchg
36 #define arch_cmpxchg_acquire arch_cmpxchg
37 #define arch_cmpxchg_release arch_cmpxchg
38 #else /* arch_cmpxchg_relaxed */
40 #ifndef arch_cmpxchg_acquire
41 #define arch_cmpxchg_acquire(...) \
42 __atomic_op_acquire(arch_cmpxchg, __VA_ARGS__)
45 #ifndef arch_cmpxchg_release
46 #define arch_cmpxchg_release(...) \
47 __atomic_op_release(arch_cmpxchg, __VA_ARGS__)
51 #define arch_cmpxchg(...) \
52 __atomic_op_fence(arch_cmpxchg, __VA_ARGS__)
55 #endif /* arch_cmpxchg_relaxed */
57 #ifndef arch_cmpxchg64_relaxed
58 #define arch_cmpxchg64_relaxed arch_cmpxchg64
59 #define arch_cmpxchg64_acquire arch_cmpxchg64
60 #define arch_cmpxchg64_release arch_cmpxchg64
61 #else /* arch_cmpxchg64_relaxed */
63 #ifndef arch_cmpxchg64_acquire
64 #define arch_cmpxchg64_acquire(...) \
65 __atomic_op_acquire(arch_cmpxchg64, __VA_ARGS__)
68 #ifndef arch_cmpxchg64_release
69 #define arch_cmpxchg64_release(...) \
70 __atomic_op_release(arch_cmpxchg64, __VA_ARGS__)
73 #ifndef arch_cmpxchg64
74 #define arch_cmpxchg64(...) \
75 __atomic_op_fence(arch_cmpxchg64, __VA_ARGS__)
78 #endif /* arch_cmpxchg64_relaxed */
80 #ifndef arch_atomic_read_acquire
81 static __always_inline int
82 arch_atomic_read_acquire(const atomic_t *v)
84 return smp_load_acquire(&(v)->counter);
86 #define arch_atomic_read_acquire arch_atomic_read_acquire
89 #ifndef arch_atomic_set_release
90 static __always_inline void
91 arch_atomic_set_release(atomic_t *v, int i)
93 smp_store_release(&(v)->counter, i);
95 #define arch_atomic_set_release arch_atomic_set_release
98 #ifndef arch_atomic_add_return_relaxed
99 #define arch_atomic_add_return_acquire arch_atomic_add_return
100 #define arch_atomic_add_return_release arch_atomic_add_return
101 #define arch_atomic_add_return_relaxed arch_atomic_add_return
102 #else /* arch_atomic_add_return_relaxed */
104 #ifndef arch_atomic_add_return_acquire
105 static __always_inline int
106 arch_atomic_add_return_acquire(int i, atomic_t *v)
108 int ret = arch_atomic_add_return_relaxed(i, v);
109 __atomic_acquire_fence();
112 #define arch_atomic_add_return_acquire arch_atomic_add_return_acquire
115 #ifndef arch_atomic_add_return_release
116 static __always_inline int
117 arch_atomic_add_return_release(int i, atomic_t *v)
119 __atomic_release_fence();
120 return arch_atomic_add_return_relaxed(i, v);
122 #define arch_atomic_add_return_release arch_atomic_add_return_release
125 #ifndef arch_atomic_add_return
126 static __always_inline int
127 arch_atomic_add_return(int i, atomic_t *v)
130 __atomic_pre_full_fence();
131 ret = arch_atomic_add_return_relaxed(i, v);
132 __atomic_post_full_fence();
135 #define arch_atomic_add_return arch_atomic_add_return
138 #endif /* arch_atomic_add_return_relaxed */
140 #ifndef arch_atomic_fetch_add_relaxed
141 #define arch_atomic_fetch_add_acquire arch_atomic_fetch_add
142 #define arch_atomic_fetch_add_release arch_atomic_fetch_add
143 #define arch_atomic_fetch_add_relaxed arch_atomic_fetch_add
144 #else /* arch_atomic_fetch_add_relaxed */
146 #ifndef arch_atomic_fetch_add_acquire
147 static __always_inline int
148 arch_atomic_fetch_add_acquire(int i, atomic_t *v)
150 int ret = arch_atomic_fetch_add_relaxed(i, v);
151 __atomic_acquire_fence();
154 #define arch_atomic_fetch_add_acquire arch_atomic_fetch_add_acquire
157 #ifndef arch_atomic_fetch_add_release
158 static __always_inline int
159 arch_atomic_fetch_add_release(int i, atomic_t *v)
161 __atomic_release_fence();
162 return arch_atomic_fetch_add_relaxed(i, v);
164 #define arch_atomic_fetch_add_release arch_atomic_fetch_add_release
167 #ifndef arch_atomic_fetch_add
168 static __always_inline int
169 arch_atomic_fetch_add(int i, atomic_t *v)
172 __atomic_pre_full_fence();
173 ret = arch_atomic_fetch_add_relaxed(i, v);
174 __atomic_post_full_fence();
177 #define arch_atomic_fetch_add arch_atomic_fetch_add
180 #endif /* arch_atomic_fetch_add_relaxed */
182 #ifndef arch_atomic_sub_return_relaxed
183 #define arch_atomic_sub_return_acquire arch_atomic_sub_return
184 #define arch_atomic_sub_return_release arch_atomic_sub_return
185 #define arch_atomic_sub_return_relaxed arch_atomic_sub_return
186 #else /* arch_atomic_sub_return_relaxed */
188 #ifndef arch_atomic_sub_return_acquire
189 static __always_inline int
190 arch_atomic_sub_return_acquire(int i, atomic_t *v)
192 int ret = arch_atomic_sub_return_relaxed(i, v);
193 __atomic_acquire_fence();
196 #define arch_atomic_sub_return_acquire arch_atomic_sub_return_acquire
199 #ifndef arch_atomic_sub_return_release
200 static __always_inline int
201 arch_atomic_sub_return_release(int i, atomic_t *v)
203 __atomic_release_fence();
204 return arch_atomic_sub_return_relaxed(i, v);
206 #define arch_atomic_sub_return_release arch_atomic_sub_return_release
209 #ifndef arch_atomic_sub_return
210 static __always_inline int
211 arch_atomic_sub_return(int i, atomic_t *v)
214 __atomic_pre_full_fence();
215 ret = arch_atomic_sub_return_relaxed(i, v);
216 __atomic_post_full_fence();
219 #define arch_atomic_sub_return arch_atomic_sub_return
222 #endif /* arch_atomic_sub_return_relaxed */
224 #ifndef arch_atomic_fetch_sub_relaxed
225 #define arch_atomic_fetch_sub_acquire arch_atomic_fetch_sub
226 #define arch_atomic_fetch_sub_release arch_atomic_fetch_sub
227 #define arch_atomic_fetch_sub_relaxed arch_atomic_fetch_sub
228 #else /* arch_atomic_fetch_sub_relaxed */
230 #ifndef arch_atomic_fetch_sub_acquire
231 static __always_inline int
232 arch_atomic_fetch_sub_acquire(int i, atomic_t *v)
234 int ret = arch_atomic_fetch_sub_relaxed(i, v);
235 __atomic_acquire_fence();
238 #define arch_atomic_fetch_sub_acquire arch_atomic_fetch_sub_acquire
241 #ifndef arch_atomic_fetch_sub_release
242 static __always_inline int
243 arch_atomic_fetch_sub_release(int i, atomic_t *v)
245 __atomic_release_fence();
246 return arch_atomic_fetch_sub_relaxed(i, v);
248 #define arch_atomic_fetch_sub_release arch_atomic_fetch_sub_release
251 #ifndef arch_atomic_fetch_sub
252 static __always_inline int
253 arch_atomic_fetch_sub(int i, atomic_t *v)
256 __atomic_pre_full_fence();
257 ret = arch_atomic_fetch_sub_relaxed(i, v);
258 __atomic_post_full_fence();
261 #define arch_atomic_fetch_sub arch_atomic_fetch_sub
264 #endif /* arch_atomic_fetch_sub_relaxed */
266 #ifndef arch_atomic_inc
267 static __always_inline void
268 arch_atomic_inc(atomic_t *v)
270 arch_atomic_add(1, v);
272 #define arch_atomic_inc arch_atomic_inc
275 #ifndef arch_atomic_inc_return_relaxed
276 #ifdef arch_atomic_inc_return
277 #define arch_atomic_inc_return_acquire arch_atomic_inc_return
278 #define arch_atomic_inc_return_release arch_atomic_inc_return
279 #define arch_atomic_inc_return_relaxed arch_atomic_inc_return
280 #endif /* arch_atomic_inc_return */
282 #ifndef arch_atomic_inc_return
283 static __always_inline int
284 arch_atomic_inc_return(atomic_t *v)
286 return arch_atomic_add_return(1, v);
288 #define arch_atomic_inc_return arch_atomic_inc_return
291 #ifndef arch_atomic_inc_return_acquire
292 static __always_inline int
293 arch_atomic_inc_return_acquire(atomic_t *v)
295 return arch_atomic_add_return_acquire(1, v);
297 #define arch_atomic_inc_return_acquire arch_atomic_inc_return_acquire
300 #ifndef arch_atomic_inc_return_release
301 static __always_inline int
302 arch_atomic_inc_return_release(atomic_t *v)
304 return arch_atomic_add_return_release(1, v);
306 #define arch_atomic_inc_return_release arch_atomic_inc_return_release
309 #ifndef arch_atomic_inc_return_relaxed
310 static __always_inline int
311 arch_atomic_inc_return_relaxed(atomic_t *v)
313 return arch_atomic_add_return_relaxed(1, v);
315 #define arch_atomic_inc_return_relaxed arch_atomic_inc_return_relaxed
318 #else /* arch_atomic_inc_return_relaxed */
320 #ifndef arch_atomic_inc_return_acquire
321 static __always_inline int
322 arch_atomic_inc_return_acquire(atomic_t *v)
324 int ret = arch_atomic_inc_return_relaxed(v);
325 __atomic_acquire_fence();
328 #define arch_atomic_inc_return_acquire arch_atomic_inc_return_acquire
331 #ifndef arch_atomic_inc_return_release
332 static __always_inline int
333 arch_atomic_inc_return_release(atomic_t *v)
335 __atomic_release_fence();
336 return arch_atomic_inc_return_relaxed(v);
338 #define arch_atomic_inc_return_release arch_atomic_inc_return_release
341 #ifndef arch_atomic_inc_return
342 static __always_inline int
343 arch_atomic_inc_return(atomic_t *v)
346 __atomic_pre_full_fence();
347 ret = arch_atomic_inc_return_relaxed(v);
348 __atomic_post_full_fence();
351 #define arch_atomic_inc_return arch_atomic_inc_return
354 #endif /* arch_atomic_inc_return_relaxed */
356 #ifndef arch_atomic_fetch_inc_relaxed
357 #ifdef arch_atomic_fetch_inc
358 #define arch_atomic_fetch_inc_acquire arch_atomic_fetch_inc
359 #define arch_atomic_fetch_inc_release arch_atomic_fetch_inc
360 #define arch_atomic_fetch_inc_relaxed arch_atomic_fetch_inc
361 #endif /* arch_atomic_fetch_inc */
363 #ifndef arch_atomic_fetch_inc
364 static __always_inline int
365 arch_atomic_fetch_inc(atomic_t *v)
367 return arch_atomic_fetch_add(1, v);
369 #define arch_atomic_fetch_inc arch_atomic_fetch_inc
372 #ifndef arch_atomic_fetch_inc_acquire
373 static __always_inline int
374 arch_atomic_fetch_inc_acquire(atomic_t *v)
376 return arch_atomic_fetch_add_acquire(1, v);
378 #define arch_atomic_fetch_inc_acquire arch_atomic_fetch_inc_acquire
381 #ifndef arch_atomic_fetch_inc_release
382 static __always_inline int
383 arch_atomic_fetch_inc_release(atomic_t *v)
385 return arch_atomic_fetch_add_release(1, v);
387 #define arch_atomic_fetch_inc_release arch_atomic_fetch_inc_release
390 #ifndef arch_atomic_fetch_inc_relaxed
391 static __always_inline int
392 arch_atomic_fetch_inc_relaxed(atomic_t *v)
394 return arch_atomic_fetch_add_relaxed(1, v);
396 #define arch_atomic_fetch_inc_relaxed arch_atomic_fetch_inc_relaxed
399 #else /* arch_atomic_fetch_inc_relaxed */
401 #ifndef arch_atomic_fetch_inc_acquire
402 static __always_inline int
403 arch_atomic_fetch_inc_acquire(atomic_t *v)
405 int ret = arch_atomic_fetch_inc_relaxed(v);
406 __atomic_acquire_fence();
409 #define arch_atomic_fetch_inc_acquire arch_atomic_fetch_inc_acquire
412 #ifndef arch_atomic_fetch_inc_release
413 static __always_inline int
414 arch_atomic_fetch_inc_release(atomic_t *v)
416 __atomic_release_fence();
417 return arch_atomic_fetch_inc_relaxed(v);
419 #define arch_atomic_fetch_inc_release arch_atomic_fetch_inc_release
422 #ifndef arch_atomic_fetch_inc
423 static __always_inline int
424 arch_atomic_fetch_inc(atomic_t *v)
427 __atomic_pre_full_fence();
428 ret = arch_atomic_fetch_inc_relaxed(v);
429 __atomic_post_full_fence();
432 #define arch_atomic_fetch_inc arch_atomic_fetch_inc
435 #endif /* arch_atomic_fetch_inc_relaxed */
437 #ifndef arch_atomic_dec
438 static __always_inline void
439 arch_atomic_dec(atomic_t *v)
441 arch_atomic_sub(1, v);
443 #define arch_atomic_dec arch_atomic_dec
446 #ifndef arch_atomic_dec_return_relaxed
447 #ifdef arch_atomic_dec_return
448 #define arch_atomic_dec_return_acquire arch_atomic_dec_return
449 #define arch_atomic_dec_return_release arch_atomic_dec_return
450 #define arch_atomic_dec_return_relaxed arch_atomic_dec_return
451 #endif /* arch_atomic_dec_return */
453 #ifndef arch_atomic_dec_return
454 static __always_inline int
455 arch_atomic_dec_return(atomic_t *v)
457 return arch_atomic_sub_return(1, v);
459 #define arch_atomic_dec_return arch_atomic_dec_return
462 #ifndef arch_atomic_dec_return_acquire
463 static __always_inline int
464 arch_atomic_dec_return_acquire(atomic_t *v)
466 return arch_atomic_sub_return_acquire(1, v);
468 #define arch_atomic_dec_return_acquire arch_atomic_dec_return_acquire
471 #ifndef arch_atomic_dec_return_release
472 static __always_inline int
473 arch_atomic_dec_return_release(atomic_t *v)
475 return arch_atomic_sub_return_release(1, v);
477 #define arch_atomic_dec_return_release arch_atomic_dec_return_release
480 #ifndef arch_atomic_dec_return_relaxed
481 static __always_inline int
482 arch_atomic_dec_return_relaxed(atomic_t *v)
484 return arch_atomic_sub_return_relaxed(1, v);
486 #define arch_atomic_dec_return_relaxed arch_atomic_dec_return_relaxed
489 #else /* arch_atomic_dec_return_relaxed */
491 #ifndef arch_atomic_dec_return_acquire
492 static __always_inline int
493 arch_atomic_dec_return_acquire(atomic_t *v)
495 int ret = arch_atomic_dec_return_relaxed(v);
496 __atomic_acquire_fence();
499 #define arch_atomic_dec_return_acquire arch_atomic_dec_return_acquire
502 #ifndef arch_atomic_dec_return_release
503 static __always_inline int
504 arch_atomic_dec_return_release(atomic_t *v)
506 __atomic_release_fence();
507 return arch_atomic_dec_return_relaxed(v);
509 #define arch_atomic_dec_return_release arch_atomic_dec_return_release
512 #ifndef arch_atomic_dec_return
513 static __always_inline int
514 arch_atomic_dec_return(atomic_t *v)
517 __atomic_pre_full_fence();
518 ret = arch_atomic_dec_return_relaxed(v);
519 __atomic_post_full_fence();
522 #define arch_atomic_dec_return arch_atomic_dec_return
525 #endif /* arch_atomic_dec_return_relaxed */
527 #ifndef arch_atomic_fetch_dec_relaxed
528 #ifdef arch_atomic_fetch_dec
529 #define arch_atomic_fetch_dec_acquire arch_atomic_fetch_dec
530 #define arch_atomic_fetch_dec_release arch_atomic_fetch_dec
531 #define arch_atomic_fetch_dec_relaxed arch_atomic_fetch_dec
532 #endif /* arch_atomic_fetch_dec */
534 #ifndef arch_atomic_fetch_dec
535 static __always_inline int
536 arch_atomic_fetch_dec(atomic_t *v)
538 return arch_atomic_fetch_sub(1, v);
540 #define arch_atomic_fetch_dec arch_atomic_fetch_dec
543 #ifndef arch_atomic_fetch_dec_acquire
544 static __always_inline int
545 arch_atomic_fetch_dec_acquire(atomic_t *v)
547 return arch_atomic_fetch_sub_acquire(1, v);
549 #define arch_atomic_fetch_dec_acquire arch_atomic_fetch_dec_acquire
552 #ifndef arch_atomic_fetch_dec_release
553 static __always_inline int
554 arch_atomic_fetch_dec_release(atomic_t *v)
556 return arch_atomic_fetch_sub_release(1, v);
558 #define arch_atomic_fetch_dec_release arch_atomic_fetch_dec_release
561 #ifndef arch_atomic_fetch_dec_relaxed
562 static __always_inline int
563 arch_atomic_fetch_dec_relaxed(atomic_t *v)
565 return arch_atomic_fetch_sub_relaxed(1, v);
567 #define arch_atomic_fetch_dec_relaxed arch_atomic_fetch_dec_relaxed
570 #else /* arch_atomic_fetch_dec_relaxed */
572 #ifndef arch_atomic_fetch_dec_acquire
573 static __always_inline int
574 arch_atomic_fetch_dec_acquire(atomic_t *v)
576 int ret = arch_atomic_fetch_dec_relaxed(v);
577 __atomic_acquire_fence();
580 #define arch_atomic_fetch_dec_acquire arch_atomic_fetch_dec_acquire
583 #ifndef arch_atomic_fetch_dec_release
584 static __always_inline int
585 arch_atomic_fetch_dec_release(atomic_t *v)
587 __atomic_release_fence();
588 return arch_atomic_fetch_dec_relaxed(v);
590 #define arch_atomic_fetch_dec_release arch_atomic_fetch_dec_release
593 #ifndef arch_atomic_fetch_dec
594 static __always_inline int
595 arch_atomic_fetch_dec(atomic_t *v)
598 __atomic_pre_full_fence();
599 ret = arch_atomic_fetch_dec_relaxed(v);
600 __atomic_post_full_fence();
603 #define arch_atomic_fetch_dec arch_atomic_fetch_dec
606 #endif /* arch_atomic_fetch_dec_relaxed */
608 #ifndef arch_atomic_fetch_and_relaxed
609 #define arch_atomic_fetch_and_acquire arch_atomic_fetch_and
610 #define arch_atomic_fetch_and_release arch_atomic_fetch_and
611 #define arch_atomic_fetch_and_relaxed arch_atomic_fetch_and
612 #else /* arch_atomic_fetch_and_relaxed */
614 #ifndef arch_atomic_fetch_and_acquire
615 static __always_inline int
616 arch_atomic_fetch_and_acquire(int i, atomic_t *v)
618 int ret = arch_atomic_fetch_and_relaxed(i, v);
619 __atomic_acquire_fence();
622 #define arch_atomic_fetch_and_acquire arch_atomic_fetch_and_acquire
625 #ifndef arch_atomic_fetch_and_release
626 static __always_inline int
627 arch_atomic_fetch_and_release(int i, atomic_t *v)
629 __atomic_release_fence();
630 return arch_atomic_fetch_and_relaxed(i, v);
632 #define arch_atomic_fetch_and_release arch_atomic_fetch_and_release
635 #ifndef arch_atomic_fetch_and
636 static __always_inline int
637 arch_atomic_fetch_and(int i, atomic_t *v)
640 __atomic_pre_full_fence();
641 ret = arch_atomic_fetch_and_relaxed(i, v);
642 __atomic_post_full_fence();
645 #define arch_atomic_fetch_and arch_atomic_fetch_and
648 #endif /* arch_atomic_fetch_and_relaxed */
650 #ifndef arch_atomic_andnot
651 static __always_inline void
652 arch_atomic_andnot(int i, atomic_t *v)
654 arch_atomic_and(~i, v);
656 #define arch_atomic_andnot arch_atomic_andnot
659 #ifndef arch_atomic_fetch_andnot_relaxed
660 #ifdef arch_atomic_fetch_andnot
661 #define arch_atomic_fetch_andnot_acquire arch_atomic_fetch_andnot
662 #define arch_atomic_fetch_andnot_release arch_atomic_fetch_andnot
663 #define arch_atomic_fetch_andnot_relaxed arch_atomic_fetch_andnot
664 #endif /* arch_atomic_fetch_andnot */
666 #ifndef arch_atomic_fetch_andnot
667 static __always_inline int
668 arch_atomic_fetch_andnot(int i, atomic_t *v)
670 return arch_atomic_fetch_and(~i, v);
672 #define arch_atomic_fetch_andnot arch_atomic_fetch_andnot
675 #ifndef arch_atomic_fetch_andnot_acquire
676 static __always_inline int
677 arch_atomic_fetch_andnot_acquire(int i, atomic_t *v)
679 return arch_atomic_fetch_and_acquire(~i, v);
681 #define arch_atomic_fetch_andnot_acquire arch_atomic_fetch_andnot_acquire
684 #ifndef arch_atomic_fetch_andnot_release
685 static __always_inline int
686 arch_atomic_fetch_andnot_release(int i, atomic_t *v)
688 return arch_atomic_fetch_and_release(~i, v);
690 #define arch_atomic_fetch_andnot_release arch_atomic_fetch_andnot_release
693 #ifndef arch_atomic_fetch_andnot_relaxed
694 static __always_inline int
695 arch_atomic_fetch_andnot_relaxed(int i, atomic_t *v)
697 return arch_atomic_fetch_and_relaxed(~i, v);
699 #define arch_atomic_fetch_andnot_relaxed arch_atomic_fetch_andnot_relaxed
702 #else /* arch_atomic_fetch_andnot_relaxed */
704 #ifndef arch_atomic_fetch_andnot_acquire
705 static __always_inline int
706 arch_atomic_fetch_andnot_acquire(int i, atomic_t *v)
708 int ret = arch_atomic_fetch_andnot_relaxed(i, v);
709 __atomic_acquire_fence();
712 #define arch_atomic_fetch_andnot_acquire arch_atomic_fetch_andnot_acquire
715 #ifndef arch_atomic_fetch_andnot_release
716 static __always_inline int
717 arch_atomic_fetch_andnot_release(int i, atomic_t *v)
719 __atomic_release_fence();
720 return arch_atomic_fetch_andnot_relaxed(i, v);
722 #define arch_atomic_fetch_andnot_release arch_atomic_fetch_andnot_release
725 #ifndef arch_atomic_fetch_andnot
726 static __always_inline int
727 arch_atomic_fetch_andnot(int i, atomic_t *v)
730 __atomic_pre_full_fence();
731 ret = arch_atomic_fetch_andnot_relaxed(i, v);
732 __atomic_post_full_fence();
735 #define arch_atomic_fetch_andnot arch_atomic_fetch_andnot
738 #endif /* arch_atomic_fetch_andnot_relaxed */
740 #ifndef arch_atomic_fetch_or_relaxed
741 #define arch_atomic_fetch_or_acquire arch_atomic_fetch_or
742 #define arch_atomic_fetch_or_release arch_atomic_fetch_or
743 #define arch_atomic_fetch_or_relaxed arch_atomic_fetch_or
744 #else /* arch_atomic_fetch_or_relaxed */
746 #ifndef arch_atomic_fetch_or_acquire
747 static __always_inline int
748 arch_atomic_fetch_or_acquire(int i, atomic_t *v)
750 int ret = arch_atomic_fetch_or_relaxed(i, v);
751 __atomic_acquire_fence();
754 #define arch_atomic_fetch_or_acquire arch_atomic_fetch_or_acquire
757 #ifndef arch_atomic_fetch_or_release
758 static __always_inline int
759 arch_atomic_fetch_or_release(int i, atomic_t *v)
761 __atomic_release_fence();
762 return arch_atomic_fetch_or_relaxed(i, v);
764 #define arch_atomic_fetch_or_release arch_atomic_fetch_or_release
767 #ifndef arch_atomic_fetch_or
768 static __always_inline int
769 arch_atomic_fetch_or(int i, atomic_t *v)
772 __atomic_pre_full_fence();
773 ret = arch_atomic_fetch_or_relaxed(i, v);
774 __atomic_post_full_fence();
777 #define arch_atomic_fetch_or arch_atomic_fetch_or
780 #endif /* arch_atomic_fetch_or_relaxed */
782 #ifndef arch_atomic_fetch_xor_relaxed
783 #define arch_atomic_fetch_xor_acquire arch_atomic_fetch_xor
784 #define arch_atomic_fetch_xor_release arch_atomic_fetch_xor
785 #define arch_atomic_fetch_xor_relaxed arch_atomic_fetch_xor
786 #else /* arch_atomic_fetch_xor_relaxed */
788 #ifndef arch_atomic_fetch_xor_acquire
789 static __always_inline int
790 arch_atomic_fetch_xor_acquire(int i, atomic_t *v)
792 int ret = arch_atomic_fetch_xor_relaxed(i, v);
793 __atomic_acquire_fence();
796 #define arch_atomic_fetch_xor_acquire arch_atomic_fetch_xor_acquire
799 #ifndef arch_atomic_fetch_xor_release
800 static __always_inline int
801 arch_atomic_fetch_xor_release(int i, atomic_t *v)
803 __atomic_release_fence();
804 return arch_atomic_fetch_xor_relaxed(i, v);
806 #define arch_atomic_fetch_xor_release arch_atomic_fetch_xor_release
809 #ifndef arch_atomic_fetch_xor
810 static __always_inline int
811 arch_atomic_fetch_xor(int i, atomic_t *v)
814 __atomic_pre_full_fence();
815 ret = arch_atomic_fetch_xor_relaxed(i, v);
816 __atomic_post_full_fence();
819 #define arch_atomic_fetch_xor arch_atomic_fetch_xor
822 #endif /* arch_atomic_fetch_xor_relaxed */
824 #ifndef arch_atomic_xchg_relaxed
825 #define arch_atomic_xchg_acquire arch_atomic_xchg
826 #define arch_atomic_xchg_release arch_atomic_xchg
827 #define arch_atomic_xchg_relaxed arch_atomic_xchg
828 #else /* arch_atomic_xchg_relaxed */
830 #ifndef arch_atomic_xchg_acquire
831 static __always_inline int
832 arch_atomic_xchg_acquire(atomic_t *v, int i)
834 int ret = arch_atomic_xchg_relaxed(v, i);
835 __atomic_acquire_fence();
838 #define arch_atomic_xchg_acquire arch_atomic_xchg_acquire
841 #ifndef arch_atomic_xchg_release
842 static __always_inline int
843 arch_atomic_xchg_release(atomic_t *v, int i)
845 __atomic_release_fence();
846 return arch_atomic_xchg_relaxed(v, i);
848 #define arch_atomic_xchg_release arch_atomic_xchg_release
851 #ifndef arch_atomic_xchg
852 static __always_inline int
853 arch_atomic_xchg(atomic_t *v, int i)
856 __atomic_pre_full_fence();
857 ret = arch_atomic_xchg_relaxed(v, i);
858 __atomic_post_full_fence();
861 #define arch_atomic_xchg arch_atomic_xchg
864 #endif /* arch_atomic_xchg_relaxed */
866 #ifndef arch_atomic_cmpxchg_relaxed
867 #define arch_atomic_cmpxchg_acquire arch_atomic_cmpxchg
868 #define arch_atomic_cmpxchg_release arch_atomic_cmpxchg
869 #define arch_atomic_cmpxchg_relaxed arch_atomic_cmpxchg
870 #else /* arch_atomic_cmpxchg_relaxed */
872 #ifndef arch_atomic_cmpxchg_acquire
873 static __always_inline int
874 arch_atomic_cmpxchg_acquire(atomic_t *v, int old, int new)
876 int ret = arch_atomic_cmpxchg_relaxed(v, old, new);
877 __atomic_acquire_fence();
880 #define arch_atomic_cmpxchg_acquire arch_atomic_cmpxchg_acquire
883 #ifndef arch_atomic_cmpxchg_release
884 static __always_inline int
885 arch_atomic_cmpxchg_release(atomic_t *v, int old, int new)
887 __atomic_release_fence();
888 return arch_atomic_cmpxchg_relaxed(v, old, new);
890 #define arch_atomic_cmpxchg_release arch_atomic_cmpxchg_release
893 #ifndef arch_atomic_cmpxchg
894 static __always_inline int
895 arch_atomic_cmpxchg(atomic_t *v, int old, int new)
898 __atomic_pre_full_fence();
899 ret = arch_atomic_cmpxchg_relaxed(v, old, new);
900 __atomic_post_full_fence();
903 #define arch_atomic_cmpxchg arch_atomic_cmpxchg
906 #endif /* arch_atomic_cmpxchg_relaxed */
908 #ifndef arch_atomic_try_cmpxchg_relaxed
909 #ifdef arch_atomic_try_cmpxchg
910 #define arch_atomic_try_cmpxchg_acquire arch_atomic_try_cmpxchg
911 #define arch_atomic_try_cmpxchg_release arch_atomic_try_cmpxchg
912 #define arch_atomic_try_cmpxchg_relaxed arch_atomic_try_cmpxchg
913 #endif /* arch_atomic_try_cmpxchg */
915 #ifndef arch_atomic_try_cmpxchg
916 static __always_inline bool
917 arch_atomic_try_cmpxchg(atomic_t *v, int *old, int new)
920 r = arch_atomic_cmpxchg(v, o, new);
921 if (unlikely(r != o))
923 return likely(r == o);
925 #define arch_atomic_try_cmpxchg arch_atomic_try_cmpxchg
928 #ifndef arch_atomic_try_cmpxchg_acquire
929 static __always_inline bool
930 arch_atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new)
933 r = arch_atomic_cmpxchg_acquire(v, o, new);
934 if (unlikely(r != o))
936 return likely(r == o);
938 #define arch_atomic_try_cmpxchg_acquire arch_atomic_try_cmpxchg_acquire
941 #ifndef arch_atomic_try_cmpxchg_release
942 static __always_inline bool
943 arch_atomic_try_cmpxchg_release(atomic_t *v, int *old, int new)
946 r = arch_atomic_cmpxchg_release(v, o, new);
947 if (unlikely(r != o))
949 return likely(r == o);
951 #define arch_atomic_try_cmpxchg_release arch_atomic_try_cmpxchg_release
954 #ifndef arch_atomic_try_cmpxchg_relaxed
955 static __always_inline bool
956 arch_atomic_try_cmpxchg_relaxed(atomic_t *v, int *old, int new)
959 r = arch_atomic_cmpxchg_relaxed(v, o, new);
960 if (unlikely(r != o))
962 return likely(r == o);
964 #define arch_atomic_try_cmpxchg_relaxed arch_atomic_try_cmpxchg_relaxed
967 #else /* arch_atomic_try_cmpxchg_relaxed */
969 #ifndef arch_atomic_try_cmpxchg_acquire
970 static __always_inline bool
971 arch_atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new)
973 bool ret = arch_atomic_try_cmpxchg_relaxed(v, old, new);
974 __atomic_acquire_fence();
977 #define arch_atomic_try_cmpxchg_acquire arch_atomic_try_cmpxchg_acquire
980 #ifndef arch_atomic_try_cmpxchg_release
981 static __always_inline bool
982 arch_atomic_try_cmpxchg_release(atomic_t *v, int *old, int new)
984 __atomic_release_fence();
985 return arch_atomic_try_cmpxchg_relaxed(v, old, new);
987 #define arch_atomic_try_cmpxchg_release arch_atomic_try_cmpxchg_release
990 #ifndef arch_atomic_try_cmpxchg
991 static __always_inline bool
992 arch_atomic_try_cmpxchg(atomic_t *v, int *old, int new)
995 __atomic_pre_full_fence();
996 ret = arch_atomic_try_cmpxchg_relaxed(v, old, new);
997 __atomic_post_full_fence();
1000 #define arch_atomic_try_cmpxchg arch_atomic_try_cmpxchg
1003 #endif /* arch_atomic_try_cmpxchg_relaxed */
1005 #ifndef arch_atomic_sub_and_test
1007 * arch_atomic_sub_and_test - subtract value from variable and test result
1008 * @i: integer value to subtract
1009 * @v: pointer of type atomic_t
1011 * Atomically subtracts @i from @v and returns
1012 * true if the result is zero, or false for all
1015 static __always_inline bool
1016 arch_atomic_sub_and_test(int i, atomic_t *v)
1018 return arch_atomic_sub_return(i, v) == 0;
1020 #define arch_atomic_sub_and_test arch_atomic_sub_and_test
1023 #ifndef arch_atomic_dec_and_test
1025 * arch_atomic_dec_and_test - decrement and test
1026 * @v: pointer of type atomic_t
1028 * Atomically decrements @v by 1 and
1029 * returns true if the result is 0, or false for all other
1032 static __always_inline bool
1033 arch_atomic_dec_and_test(atomic_t *v)
1035 return arch_atomic_dec_return(v) == 0;
1037 #define arch_atomic_dec_and_test arch_atomic_dec_and_test
1040 #ifndef arch_atomic_inc_and_test
1042 * arch_atomic_inc_and_test - increment and test
1043 * @v: pointer of type atomic_t
1045 * Atomically increments @v by 1
1046 * and returns true if the result is zero, or false for all
1049 static __always_inline bool
1050 arch_atomic_inc_and_test(atomic_t *v)
1052 return arch_atomic_inc_return(v) == 0;
1054 #define arch_atomic_inc_and_test arch_atomic_inc_and_test
1057 #ifndef arch_atomic_add_negative
1059 * arch_atomic_add_negative - add and test if negative
1060 * @i: integer value to add
1061 * @v: pointer of type atomic_t
1063 * Atomically adds @i to @v and returns true
1064 * if the result is negative, or false when
1065 * result is greater than or equal to zero.
1067 static __always_inline bool
1068 arch_atomic_add_negative(int i, atomic_t *v)
1070 return arch_atomic_add_return(i, v) < 0;
1072 #define arch_atomic_add_negative arch_atomic_add_negative
1075 #ifndef arch_atomic_fetch_add_unless
1077 * arch_atomic_fetch_add_unless - add unless the number is already a given value
1078 * @v: pointer of type atomic_t
1079 * @a: the amount to add to v...
1080 * @u: ...unless v is equal to u.
1082 * Atomically adds @a to @v, so long as @v was not already @u.
1083 * Returns original value of @v
1085 static __always_inline int
1086 arch_atomic_fetch_add_unless(atomic_t *v, int a, int u)
1088 int c = arch_atomic_read(v);
1091 if (unlikely(c == u))
1093 } while (!arch_atomic_try_cmpxchg(v, &c, c + a));
1097 #define arch_atomic_fetch_add_unless arch_atomic_fetch_add_unless
1100 #ifndef arch_atomic_add_unless
1102 * arch_atomic_add_unless - add unless the number is already a given value
1103 * @v: pointer of type atomic_t
1104 * @a: the amount to add to v...
1105 * @u: ...unless v is equal to u.
1107 * Atomically adds @a to @v, if @v was not already @u.
1108 * Returns true if the addition was done.
1110 static __always_inline bool
1111 arch_atomic_add_unless(atomic_t *v, int a, int u)
1113 return arch_atomic_fetch_add_unless(v, a, u) != u;
1115 #define arch_atomic_add_unless arch_atomic_add_unless
1118 #ifndef arch_atomic_inc_not_zero
1120 * arch_atomic_inc_not_zero - increment unless the number is zero
1121 * @v: pointer of type atomic_t
1123 * Atomically increments @v by 1, if @v is non-zero.
1124 * Returns true if the increment was done.
1126 static __always_inline bool
1127 arch_atomic_inc_not_zero(atomic_t *v)
1129 return arch_atomic_add_unless(v, 1, 0);
1131 #define arch_atomic_inc_not_zero arch_atomic_inc_not_zero
1134 #ifndef arch_atomic_inc_unless_negative
1135 static __always_inline bool
1136 arch_atomic_inc_unless_negative(atomic_t *v)
1138 int c = arch_atomic_read(v);
1141 if (unlikely(c < 0))
1143 } while (!arch_atomic_try_cmpxchg(v, &c, c + 1));
1147 #define arch_atomic_inc_unless_negative arch_atomic_inc_unless_negative
1150 #ifndef arch_atomic_dec_unless_positive
1151 static __always_inline bool
1152 arch_atomic_dec_unless_positive(atomic_t *v)
1154 int c = arch_atomic_read(v);
1157 if (unlikely(c > 0))
1159 } while (!arch_atomic_try_cmpxchg(v, &c, c - 1));
1163 #define arch_atomic_dec_unless_positive arch_atomic_dec_unless_positive
1166 #ifndef arch_atomic_dec_if_positive
1167 static __always_inline int
1168 arch_atomic_dec_if_positive(atomic_t *v)
1170 int dec, c = arch_atomic_read(v);
1174 if (unlikely(dec < 0))
1176 } while (!arch_atomic_try_cmpxchg(v, &c, dec));
1180 #define arch_atomic_dec_if_positive arch_atomic_dec_if_positive
1183 #ifdef CONFIG_GENERIC_ATOMIC64
1184 #include <asm-generic/atomic64.h>
1187 #ifndef arch_atomic64_read_acquire
1188 static __always_inline s64
1189 arch_atomic64_read_acquire(const atomic64_t *v)
1191 return smp_load_acquire(&(v)->counter);
1193 #define arch_atomic64_read_acquire arch_atomic64_read_acquire
1196 #ifndef arch_atomic64_set_release
1197 static __always_inline void
1198 arch_atomic64_set_release(atomic64_t *v, s64 i)
1200 smp_store_release(&(v)->counter, i);
1202 #define arch_atomic64_set_release arch_atomic64_set_release
1205 #ifndef arch_atomic64_add_return_relaxed
1206 #define arch_atomic64_add_return_acquire arch_atomic64_add_return
1207 #define arch_atomic64_add_return_release arch_atomic64_add_return
1208 #define arch_atomic64_add_return_relaxed arch_atomic64_add_return
1209 #else /* arch_atomic64_add_return_relaxed */
1211 #ifndef arch_atomic64_add_return_acquire
1212 static __always_inline s64
1213 arch_atomic64_add_return_acquire(s64 i, atomic64_t *v)
1215 s64 ret = arch_atomic64_add_return_relaxed(i, v);
1216 __atomic_acquire_fence();
1219 #define arch_atomic64_add_return_acquire arch_atomic64_add_return_acquire
1222 #ifndef arch_atomic64_add_return_release
1223 static __always_inline s64
1224 arch_atomic64_add_return_release(s64 i, atomic64_t *v)
1226 __atomic_release_fence();
1227 return arch_atomic64_add_return_relaxed(i, v);
1229 #define arch_atomic64_add_return_release arch_atomic64_add_return_release
1232 #ifndef arch_atomic64_add_return
1233 static __always_inline s64
1234 arch_atomic64_add_return(s64 i, atomic64_t *v)
1237 __atomic_pre_full_fence();
1238 ret = arch_atomic64_add_return_relaxed(i, v);
1239 __atomic_post_full_fence();
1242 #define arch_atomic64_add_return arch_atomic64_add_return
1245 #endif /* arch_atomic64_add_return_relaxed */
1247 #ifndef arch_atomic64_fetch_add_relaxed
1248 #define arch_atomic64_fetch_add_acquire arch_atomic64_fetch_add
1249 #define arch_atomic64_fetch_add_release arch_atomic64_fetch_add
1250 #define arch_atomic64_fetch_add_relaxed arch_atomic64_fetch_add
1251 #else /* arch_atomic64_fetch_add_relaxed */
1253 #ifndef arch_atomic64_fetch_add_acquire
1254 static __always_inline s64
1255 arch_atomic64_fetch_add_acquire(s64 i, atomic64_t *v)
1257 s64 ret = arch_atomic64_fetch_add_relaxed(i, v);
1258 __atomic_acquire_fence();
1261 #define arch_atomic64_fetch_add_acquire arch_atomic64_fetch_add_acquire
1264 #ifndef arch_atomic64_fetch_add_release
1265 static __always_inline s64
1266 arch_atomic64_fetch_add_release(s64 i, atomic64_t *v)
1268 __atomic_release_fence();
1269 return arch_atomic64_fetch_add_relaxed(i, v);
1271 #define arch_atomic64_fetch_add_release arch_atomic64_fetch_add_release
1274 #ifndef arch_atomic64_fetch_add
1275 static __always_inline s64
1276 arch_atomic64_fetch_add(s64 i, atomic64_t *v)
1279 __atomic_pre_full_fence();
1280 ret = arch_atomic64_fetch_add_relaxed(i, v);
1281 __atomic_post_full_fence();
1284 #define arch_atomic64_fetch_add arch_atomic64_fetch_add
1287 #endif /* arch_atomic64_fetch_add_relaxed */
1289 #ifndef arch_atomic64_sub_return_relaxed
1290 #define arch_atomic64_sub_return_acquire arch_atomic64_sub_return
1291 #define arch_atomic64_sub_return_release arch_atomic64_sub_return
1292 #define arch_atomic64_sub_return_relaxed arch_atomic64_sub_return
1293 #else /* arch_atomic64_sub_return_relaxed */
1295 #ifndef arch_atomic64_sub_return_acquire
1296 static __always_inline s64
1297 arch_atomic64_sub_return_acquire(s64 i, atomic64_t *v)
1299 s64 ret = arch_atomic64_sub_return_relaxed(i, v);
1300 __atomic_acquire_fence();
1303 #define arch_atomic64_sub_return_acquire arch_atomic64_sub_return_acquire
1306 #ifndef arch_atomic64_sub_return_release
1307 static __always_inline s64
1308 arch_atomic64_sub_return_release(s64 i, atomic64_t *v)
1310 __atomic_release_fence();
1311 return arch_atomic64_sub_return_relaxed(i, v);
1313 #define arch_atomic64_sub_return_release arch_atomic64_sub_return_release
1316 #ifndef arch_atomic64_sub_return
1317 static __always_inline s64
1318 arch_atomic64_sub_return(s64 i, atomic64_t *v)
1321 __atomic_pre_full_fence();
1322 ret = arch_atomic64_sub_return_relaxed(i, v);
1323 __atomic_post_full_fence();
1326 #define arch_atomic64_sub_return arch_atomic64_sub_return
1329 #endif /* arch_atomic64_sub_return_relaxed */
1331 #ifndef arch_atomic64_fetch_sub_relaxed
1332 #define arch_atomic64_fetch_sub_acquire arch_atomic64_fetch_sub
1333 #define arch_atomic64_fetch_sub_release arch_atomic64_fetch_sub
1334 #define arch_atomic64_fetch_sub_relaxed arch_atomic64_fetch_sub
1335 #else /* arch_atomic64_fetch_sub_relaxed */
1337 #ifndef arch_atomic64_fetch_sub_acquire
1338 static __always_inline s64
1339 arch_atomic64_fetch_sub_acquire(s64 i, atomic64_t *v)
1341 s64 ret = arch_atomic64_fetch_sub_relaxed(i, v);
1342 __atomic_acquire_fence();
1345 #define arch_atomic64_fetch_sub_acquire arch_atomic64_fetch_sub_acquire
1348 #ifndef arch_atomic64_fetch_sub_release
1349 static __always_inline s64
1350 arch_atomic64_fetch_sub_release(s64 i, atomic64_t *v)
1352 __atomic_release_fence();
1353 return arch_atomic64_fetch_sub_relaxed(i, v);
1355 #define arch_atomic64_fetch_sub_release arch_atomic64_fetch_sub_release
1358 #ifndef arch_atomic64_fetch_sub
1359 static __always_inline s64
1360 arch_atomic64_fetch_sub(s64 i, atomic64_t *v)
1363 __atomic_pre_full_fence();
1364 ret = arch_atomic64_fetch_sub_relaxed(i, v);
1365 __atomic_post_full_fence();
1368 #define arch_atomic64_fetch_sub arch_atomic64_fetch_sub
1371 #endif /* arch_atomic64_fetch_sub_relaxed */
1373 #ifndef arch_atomic64_inc
1374 static __always_inline void
1375 arch_atomic64_inc(atomic64_t *v)
1377 arch_atomic64_add(1, v);
1379 #define arch_atomic64_inc arch_atomic64_inc
1382 #ifndef arch_atomic64_inc_return_relaxed
1383 #ifdef arch_atomic64_inc_return
1384 #define arch_atomic64_inc_return_acquire arch_atomic64_inc_return
1385 #define arch_atomic64_inc_return_release arch_atomic64_inc_return
1386 #define arch_atomic64_inc_return_relaxed arch_atomic64_inc_return
1387 #endif /* arch_atomic64_inc_return */
1389 #ifndef arch_atomic64_inc_return
1390 static __always_inline s64
1391 arch_atomic64_inc_return(atomic64_t *v)
1393 return arch_atomic64_add_return(1, v);
1395 #define arch_atomic64_inc_return arch_atomic64_inc_return
1398 #ifndef arch_atomic64_inc_return_acquire
1399 static __always_inline s64
1400 arch_atomic64_inc_return_acquire(atomic64_t *v)
1402 return arch_atomic64_add_return_acquire(1, v);
1404 #define arch_atomic64_inc_return_acquire arch_atomic64_inc_return_acquire
1407 #ifndef arch_atomic64_inc_return_release
1408 static __always_inline s64
1409 arch_atomic64_inc_return_release(atomic64_t *v)
1411 return arch_atomic64_add_return_release(1, v);
1413 #define arch_atomic64_inc_return_release arch_atomic64_inc_return_release
1416 #ifndef arch_atomic64_inc_return_relaxed
1417 static __always_inline s64
1418 arch_atomic64_inc_return_relaxed(atomic64_t *v)
1420 return arch_atomic64_add_return_relaxed(1, v);
1422 #define arch_atomic64_inc_return_relaxed arch_atomic64_inc_return_relaxed
1425 #else /* arch_atomic64_inc_return_relaxed */
1427 #ifndef arch_atomic64_inc_return_acquire
1428 static __always_inline s64
1429 arch_atomic64_inc_return_acquire(atomic64_t *v)
1431 s64 ret = arch_atomic64_inc_return_relaxed(v);
1432 __atomic_acquire_fence();
1435 #define arch_atomic64_inc_return_acquire arch_atomic64_inc_return_acquire
1438 #ifndef arch_atomic64_inc_return_release
1439 static __always_inline s64
1440 arch_atomic64_inc_return_release(atomic64_t *v)
1442 __atomic_release_fence();
1443 return arch_atomic64_inc_return_relaxed(v);
1445 #define arch_atomic64_inc_return_release arch_atomic64_inc_return_release
1448 #ifndef arch_atomic64_inc_return
1449 static __always_inline s64
1450 arch_atomic64_inc_return(atomic64_t *v)
1453 __atomic_pre_full_fence();
1454 ret = arch_atomic64_inc_return_relaxed(v);
1455 __atomic_post_full_fence();
1458 #define arch_atomic64_inc_return arch_atomic64_inc_return
1461 #endif /* arch_atomic64_inc_return_relaxed */
1463 #ifndef arch_atomic64_fetch_inc_relaxed
1464 #ifdef arch_atomic64_fetch_inc
1465 #define arch_atomic64_fetch_inc_acquire arch_atomic64_fetch_inc
1466 #define arch_atomic64_fetch_inc_release arch_atomic64_fetch_inc
1467 #define arch_atomic64_fetch_inc_relaxed arch_atomic64_fetch_inc
1468 #endif /* arch_atomic64_fetch_inc */
1470 #ifndef arch_atomic64_fetch_inc
1471 static __always_inline s64
1472 arch_atomic64_fetch_inc(atomic64_t *v)
1474 return arch_atomic64_fetch_add(1, v);
1476 #define arch_atomic64_fetch_inc arch_atomic64_fetch_inc
1479 #ifndef arch_atomic64_fetch_inc_acquire
1480 static __always_inline s64
1481 arch_atomic64_fetch_inc_acquire(atomic64_t *v)
1483 return arch_atomic64_fetch_add_acquire(1, v);
1485 #define arch_atomic64_fetch_inc_acquire arch_atomic64_fetch_inc_acquire
1488 #ifndef arch_atomic64_fetch_inc_release
1489 static __always_inline s64
1490 arch_atomic64_fetch_inc_release(atomic64_t *v)
1492 return arch_atomic64_fetch_add_release(1, v);
1494 #define arch_atomic64_fetch_inc_release arch_atomic64_fetch_inc_release
1497 #ifndef arch_atomic64_fetch_inc_relaxed
1498 static __always_inline s64
1499 arch_atomic64_fetch_inc_relaxed(atomic64_t *v)
1501 return arch_atomic64_fetch_add_relaxed(1, v);
1503 #define arch_atomic64_fetch_inc_relaxed arch_atomic64_fetch_inc_relaxed
1506 #else /* arch_atomic64_fetch_inc_relaxed */
1508 #ifndef arch_atomic64_fetch_inc_acquire
1509 static __always_inline s64
1510 arch_atomic64_fetch_inc_acquire(atomic64_t *v)
1512 s64 ret = arch_atomic64_fetch_inc_relaxed(v);
1513 __atomic_acquire_fence();
1516 #define arch_atomic64_fetch_inc_acquire arch_atomic64_fetch_inc_acquire
1519 #ifndef arch_atomic64_fetch_inc_release
1520 static __always_inline s64
1521 arch_atomic64_fetch_inc_release(atomic64_t *v)
1523 __atomic_release_fence();
1524 return arch_atomic64_fetch_inc_relaxed(v);
1526 #define arch_atomic64_fetch_inc_release arch_atomic64_fetch_inc_release
1529 #ifndef arch_atomic64_fetch_inc
1530 static __always_inline s64
1531 arch_atomic64_fetch_inc(atomic64_t *v)
1534 __atomic_pre_full_fence();
1535 ret = arch_atomic64_fetch_inc_relaxed(v);
1536 __atomic_post_full_fence();
1539 #define arch_atomic64_fetch_inc arch_atomic64_fetch_inc
1542 #endif /* arch_atomic64_fetch_inc_relaxed */
1544 #ifndef arch_atomic64_dec
1545 static __always_inline void
1546 arch_atomic64_dec(atomic64_t *v)
1548 arch_atomic64_sub(1, v);
1550 #define arch_atomic64_dec arch_atomic64_dec
1553 #ifndef arch_atomic64_dec_return_relaxed
1554 #ifdef arch_atomic64_dec_return
1555 #define arch_atomic64_dec_return_acquire arch_atomic64_dec_return
1556 #define arch_atomic64_dec_return_release arch_atomic64_dec_return
1557 #define arch_atomic64_dec_return_relaxed arch_atomic64_dec_return
1558 #endif /* arch_atomic64_dec_return */
1560 #ifndef arch_atomic64_dec_return
1561 static __always_inline s64
1562 arch_atomic64_dec_return(atomic64_t *v)
1564 return arch_atomic64_sub_return(1, v);
1566 #define arch_atomic64_dec_return arch_atomic64_dec_return
1569 #ifndef arch_atomic64_dec_return_acquire
1570 static __always_inline s64
1571 arch_atomic64_dec_return_acquire(atomic64_t *v)
1573 return arch_atomic64_sub_return_acquire(1, v);
1575 #define arch_atomic64_dec_return_acquire arch_atomic64_dec_return_acquire
1578 #ifndef arch_atomic64_dec_return_release
1579 static __always_inline s64
1580 arch_atomic64_dec_return_release(atomic64_t *v)
1582 return arch_atomic64_sub_return_release(1, v);
1584 #define arch_atomic64_dec_return_release arch_atomic64_dec_return_release
1587 #ifndef arch_atomic64_dec_return_relaxed
1588 static __always_inline s64
1589 arch_atomic64_dec_return_relaxed(atomic64_t *v)
1591 return arch_atomic64_sub_return_relaxed(1, v);
1593 #define arch_atomic64_dec_return_relaxed arch_atomic64_dec_return_relaxed
1596 #else /* arch_atomic64_dec_return_relaxed */
1598 #ifndef arch_atomic64_dec_return_acquire
1599 static __always_inline s64
1600 arch_atomic64_dec_return_acquire(atomic64_t *v)
1602 s64 ret = arch_atomic64_dec_return_relaxed(v);
1603 __atomic_acquire_fence();
1606 #define arch_atomic64_dec_return_acquire arch_atomic64_dec_return_acquire
1609 #ifndef arch_atomic64_dec_return_release
1610 static __always_inline s64
1611 arch_atomic64_dec_return_release(atomic64_t *v)
1613 __atomic_release_fence();
1614 return arch_atomic64_dec_return_relaxed(v);
1616 #define arch_atomic64_dec_return_release arch_atomic64_dec_return_release
1619 #ifndef arch_atomic64_dec_return
1620 static __always_inline s64
1621 arch_atomic64_dec_return(atomic64_t *v)
1624 __atomic_pre_full_fence();
1625 ret = arch_atomic64_dec_return_relaxed(v);
1626 __atomic_post_full_fence();
1629 #define arch_atomic64_dec_return arch_atomic64_dec_return
1632 #endif /* arch_atomic64_dec_return_relaxed */
1634 #ifndef arch_atomic64_fetch_dec_relaxed
1635 #ifdef arch_atomic64_fetch_dec
1636 #define arch_atomic64_fetch_dec_acquire arch_atomic64_fetch_dec
1637 #define arch_atomic64_fetch_dec_release arch_atomic64_fetch_dec
1638 #define arch_atomic64_fetch_dec_relaxed arch_atomic64_fetch_dec
1639 #endif /* arch_atomic64_fetch_dec */
1641 #ifndef arch_atomic64_fetch_dec
1642 static __always_inline s64
1643 arch_atomic64_fetch_dec(atomic64_t *v)
1645 return arch_atomic64_fetch_sub(1, v);
1647 #define arch_atomic64_fetch_dec arch_atomic64_fetch_dec
1650 #ifndef arch_atomic64_fetch_dec_acquire
1651 static __always_inline s64
1652 arch_atomic64_fetch_dec_acquire(atomic64_t *v)
1654 return arch_atomic64_fetch_sub_acquire(1, v);
1656 #define arch_atomic64_fetch_dec_acquire arch_atomic64_fetch_dec_acquire
1659 #ifndef arch_atomic64_fetch_dec_release
1660 static __always_inline s64
1661 arch_atomic64_fetch_dec_release(atomic64_t *v)
1663 return arch_atomic64_fetch_sub_release(1, v);
1665 #define arch_atomic64_fetch_dec_release arch_atomic64_fetch_dec_release
1668 #ifndef arch_atomic64_fetch_dec_relaxed
1669 static __always_inline s64
1670 arch_atomic64_fetch_dec_relaxed(atomic64_t *v)
1672 return arch_atomic64_fetch_sub_relaxed(1, v);
1674 #define arch_atomic64_fetch_dec_relaxed arch_atomic64_fetch_dec_relaxed
1677 #else /* arch_atomic64_fetch_dec_relaxed */
1679 #ifndef arch_atomic64_fetch_dec_acquire
1680 static __always_inline s64
1681 arch_atomic64_fetch_dec_acquire(atomic64_t *v)
1683 s64 ret = arch_atomic64_fetch_dec_relaxed(v);
1684 __atomic_acquire_fence();
1687 #define arch_atomic64_fetch_dec_acquire arch_atomic64_fetch_dec_acquire
1690 #ifndef arch_atomic64_fetch_dec_release
1691 static __always_inline s64
1692 arch_atomic64_fetch_dec_release(atomic64_t *v)
1694 __atomic_release_fence();
1695 return arch_atomic64_fetch_dec_relaxed(v);
1697 #define arch_atomic64_fetch_dec_release arch_atomic64_fetch_dec_release
1700 #ifndef arch_atomic64_fetch_dec
1701 static __always_inline s64
1702 arch_atomic64_fetch_dec(atomic64_t *v)
1705 __atomic_pre_full_fence();
1706 ret = arch_atomic64_fetch_dec_relaxed(v);
1707 __atomic_post_full_fence();
1710 #define arch_atomic64_fetch_dec arch_atomic64_fetch_dec
1713 #endif /* arch_atomic64_fetch_dec_relaxed */
1715 #ifndef arch_atomic64_fetch_and_relaxed
1716 #define arch_atomic64_fetch_and_acquire arch_atomic64_fetch_and
1717 #define arch_atomic64_fetch_and_release arch_atomic64_fetch_and
1718 #define arch_atomic64_fetch_and_relaxed arch_atomic64_fetch_and
1719 #else /* arch_atomic64_fetch_and_relaxed */
1721 #ifndef arch_atomic64_fetch_and_acquire
1722 static __always_inline s64
1723 arch_atomic64_fetch_and_acquire(s64 i, atomic64_t *v)
1725 s64 ret = arch_atomic64_fetch_and_relaxed(i, v);
1726 __atomic_acquire_fence();
1729 #define arch_atomic64_fetch_and_acquire arch_atomic64_fetch_and_acquire
1732 #ifndef arch_atomic64_fetch_and_release
1733 static __always_inline s64
1734 arch_atomic64_fetch_and_release(s64 i, atomic64_t *v)
1736 __atomic_release_fence();
1737 return arch_atomic64_fetch_and_relaxed(i, v);
1739 #define arch_atomic64_fetch_and_release arch_atomic64_fetch_and_release
1742 #ifndef arch_atomic64_fetch_and
1743 static __always_inline s64
1744 arch_atomic64_fetch_and(s64 i, atomic64_t *v)
1747 __atomic_pre_full_fence();
1748 ret = arch_atomic64_fetch_and_relaxed(i, v);
1749 __atomic_post_full_fence();
1752 #define arch_atomic64_fetch_and arch_atomic64_fetch_and
1755 #endif /* arch_atomic64_fetch_and_relaxed */
1757 #ifndef arch_atomic64_andnot
1758 static __always_inline void
1759 arch_atomic64_andnot(s64 i, atomic64_t *v)
1761 arch_atomic64_and(~i, v);
1763 #define arch_atomic64_andnot arch_atomic64_andnot
1766 #ifndef arch_atomic64_fetch_andnot_relaxed
1767 #ifdef arch_atomic64_fetch_andnot
1768 #define arch_atomic64_fetch_andnot_acquire arch_atomic64_fetch_andnot
1769 #define arch_atomic64_fetch_andnot_release arch_atomic64_fetch_andnot
1770 #define arch_atomic64_fetch_andnot_relaxed arch_atomic64_fetch_andnot
1771 #endif /* arch_atomic64_fetch_andnot */
1773 #ifndef arch_atomic64_fetch_andnot
1774 static __always_inline s64
1775 arch_atomic64_fetch_andnot(s64 i, atomic64_t *v)
1777 return arch_atomic64_fetch_and(~i, v);
1779 #define arch_atomic64_fetch_andnot arch_atomic64_fetch_andnot
1782 #ifndef arch_atomic64_fetch_andnot_acquire
1783 static __always_inline s64
1784 arch_atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v)
1786 return arch_atomic64_fetch_and_acquire(~i, v);
1788 #define arch_atomic64_fetch_andnot_acquire arch_atomic64_fetch_andnot_acquire
1791 #ifndef arch_atomic64_fetch_andnot_release
1792 static __always_inline s64
1793 arch_atomic64_fetch_andnot_release(s64 i, atomic64_t *v)
1795 return arch_atomic64_fetch_and_release(~i, v);
1797 #define arch_atomic64_fetch_andnot_release arch_atomic64_fetch_andnot_release
1800 #ifndef arch_atomic64_fetch_andnot_relaxed
1801 static __always_inline s64
1802 arch_atomic64_fetch_andnot_relaxed(s64 i, atomic64_t *v)
1804 return arch_atomic64_fetch_and_relaxed(~i, v);
1806 #define arch_atomic64_fetch_andnot_relaxed arch_atomic64_fetch_andnot_relaxed
1809 #else /* arch_atomic64_fetch_andnot_relaxed */
1811 #ifndef arch_atomic64_fetch_andnot_acquire
1812 static __always_inline s64
1813 arch_atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v)
1815 s64 ret = arch_atomic64_fetch_andnot_relaxed(i, v);
1816 __atomic_acquire_fence();
1819 #define arch_atomic64_fetch_andnot_acquire arch_atomic64_fetch_andnot_acquire
1822 #ifndef arch_atomic64_fetch_andnot_release
1823 static __always_inline s64
1824 arch_atomic64_fetch_andnot_release(s64 i, atomic64_t *v)
1826 __atomic_release_fence();
1827 return arch_atomic64_fetch_andnot_relaxed(i, v);
1829 #define arch_atomic64_fetch_andnot_release arch_atomic64_fetch_andnot_release
1832 #ifndef arch_atomic64_fetch_andnot
1833 static __always_inline s64
1834 arch_atomic64_fetch_andnot(s64 i, atomic64_t *v)
1837 __atomic_pre_full_fence();
1838 ret = arch_atomic64_fetch_andnot_relaxed(i, v);
1839 __atomic_post_full_fence();
1842 #define arch_atomic64_fetch_andnot arch_atomic64_fetch_andnot
1845 #endif /* arch_atomic64_fetch_andnot_relaxed */
1847 #ifndef arch_atomic64_fetch_or_relaxed
1848 #define arch_atomic64_fetch_or_acquire arch_atomic64_fetch_or
1849 #define arch_atomic64_fetch_or_release arch_atomic64_fetch_or
1850 #define arch_atomic64_fetch_or_relaxed arch_atomic64_fetch_or
1851 #else /* arch_atomic64_fetch_or_relaxed */
1853 #ifndef arch_atomic64_fetch_or_acquire
1854 static __always_inline s64
1855 arch_atomic64_fetch_or_acquire(s64 i, atomic64_t *v)
1857 s64 ret = arch_atomic64_fetch_or_relaxed(i, v);
1858 __atomic_acquire_fence();
1861 #define arch_atomic64_fetch_or_acquire arch_atomic64_fetch_or_acquire
1864 #ifndef arch_atomic64_fetch_or_release
1865 static __always_inline s64
1866 arch_atomic64_fetch_or_release(s64 i, atomic64_t *v)
1868 __atomic_release_fence();
1869 return arch_atomic64_fetch_or_relaxed(i, v);
1871 #define arch_atomic64_fetch_or_release arch_atomic64_fetch_or_release
1874 #ifndef arch_atomic64_fetch_or
1875 static __always_inline s64
1876 arch_atomic64_fetch_or(s64 i, atomic64_t *v)
1879 __atomic_pre_full_fence();
1880 ret = arch_atomic64_fetch_or_relaxed(i, v);
1881 __atomic_post_full_fence();
1884 #define arch_atomic64_fetch_or arch_atomic64_fetch_or
1887 #endif /* arch_atomic64_fetch_or_relaxed */
1889 #ifndef arch_atomic64_fetch_xor_relaxed
1890 #define arch_atomic64_fetch_xor_acquire arch_atomic64_fetch_xor
1891 #define arch_atomic64_fetch_xor_release arch_atomic64_fetch_xor
1892 #define arch_atomic64_fetch_xor_relaxed arch_atomic64_fetch_xor
1893 #else /* arch_atomic64_fetch_xor_relaxed */
1895 #ifndef arch_atomic64_fetch_xor_acquire
1896 static __always_inline s64
1897 arch_atomic64_fetch_xor_acquire(s64 i, atomic64_t *v)
1899 s64 ret = arch_atomic64_fetch_xor_relaxed(i, v);
1900 __atomic_acquire_fence();
1903 #define arch_atomic64_fetch_xor_acquire arch_atomic64_fetch_xor_acquire
1906 #ifndef arch_atomic64_fetch_xor_release
1907 static __always_inline s64
1908 arch_atomic64_fetch_xor_release(s64 i, atomic64_t *v)
1910 __atomic_release_fence();
1911 return arch_atomic64_fetch_xor_relaxed(i, v);
1913 #define arch_atomic64_fetch_xor_release arch_atomic64_fetch_xor_release
1916 #ifndef arch_atomic64_fetch_xor
1917 static __always_inline s64
1918 arch_atomic64_fetch_xor(s64 i, atomic64_t *v)
1921 __atomic_pre_full_fence();
1922 ret = arch_atomic64_fetch_xor_relaxed(i, v);
1923 __atomic_post_full_fence();
1926 #define arch_atomic64_fetch_xor arch_atomic64_fetch_xor
1929 #endif /* arch_atomic64_fetch_xor_relaxed */
1931 #ifndef arch_atomic64_xchg_relaxed
1932 #define arch_atomic64_xchg_acquire arch_atomic64_xchg
1933 #define arch_atomic64_xchg_release arch_atomic64_xchg
1934 #define arch_atomic64_xchg_relaxed arch_atomic64_xchg
1935 #else /* arch_atomic64_xchg_relaxed */
1937 #ifndef arch_atomic64_xchg_acquire
1938 static __always_inline s64
1939 arch_atomic64_xchg_acquire(atomic64_t *v, s64 i)
1941 s64 ret = arch_atomic64_xchg_relaxed(v, i);
1942 __atomic_acquire_fence();
1945 #define arch_atomic64_xchg_acquire arch_atomic64_xchg_acquire
1948 #ifndef arch_atomic64_xchg_release
1949 static __always_inline s64
1950 arch_atomic64_xchg_release(atomic64_t *v, s64 i)
1952 __atomic_release_fence();
1953 return arch_atomic64_xchg_relaxed(v, i);
1955 #define arch_atomic64_xchg_release arch_atomic64_xchg_release
1958 #ifndef arch_atomic64_xchg
1959 static __always_inline s64
1960 arch_atomic64_xchg(atomic64_t *v, s64 i)
1963 __atomic_pre_full_fence();
1964 ret = arch_atomic64_xchg_relaxed(v, i);
1965 __atomic_post_full_fence();
1968 #define arch_atomic64_xchg arch_atomic64_xchg
1971 #endif /* arch_atomic64_xchg_relaxed */
1973 #ifndef arch_atomic64_cmpxchg_relaxed
1974 #define arch_atomic64_cmpxchg_acquire arch_atomic64_cmpxchg
1975 #define arch_atomic64_cmpxchg_release arch_atomic64_cmpxchg
1976 #define arch_atomic64_cmpxchg_relaxed arch_atomic64_cmpxchg
1977 #else /* arch_atomic64_cmpxchg_relaxed */
1979 #ifndef arch_atomic64_cmpxchg_acquire
1980 static __always_inline s64
1981 arch_atomic64_cmpxchg_acquire(atomic64_t *v, s64 old, s64 new)
1983 s64 ret = arch_atomic64_cmpxchg_relaxed(v, old, new);
1984 __atomic_acquire_fence();
1987 #define arch_atomic64_cmpxchg_acquire arch_atomic64_cmpxchg_acquire
1990 #ifndef arch_atomic64_cmpxchg_release
1991 static __always_inline s64
1992 arch_atomic64_cmpxchg_release(atomic64_t *v, s64 old, s64 new)
1994 __atomic_release_fence();
1995 return arch_atomic64_cmpxchg_relaxed(v, old, new);
1997 #define arch_atomic64_cmpxchg_release arch_atomic64_cmpxchg_release
2000 #ifndef arch_atomic64_cmpxchg
2001 static __always_inline s64
2002 arch_atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new)
2005 __atomic_pre_full_fence();
2006 ret = arch_atomic64_cmpxchg_relaxed(v, old, new);
2007 __atomic_post_full_fence();
2010 #define arch_atomic64_cmpxchg arch_atomic64_cmpxchg
2013 #endif /* arch_atomic64_cmpxchg_relaxed */
2015 #ifndef arch_atomic64_try_cmpxchg_relaxed
2016 #ifdef arch_atomic64_try_cmpxchg
2017 #define arch_atomic64_try_cmpxchg_acquire arch_atomic64_try_cmpxchg
2018 #define arch_atomic64_try_cmpxchg_release arch_atomic64_try_cmpxchg
2019 #define arch_atomic64_try_cmpxchg_relaxed arch_atomic64_try_cmpxchg
2020 #endif /* arch_atomic64_try_cmpxchg */
2022 #ifndef arch_atomic64_try_cmpxchg
2023 static __always_inline bool
2024 arch_atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
2027 r = arch_atomic64_cmpxchg(v, o, new);
2028 if (unlikely(r != o))
2030 return likely(r == o);
2032 #define arch_atomic64_try_cmpxchg arch_atomic64_try_cmpxchg
2035 #ifndef arch_atomic64_try_cmpxchg_acquire
2036 static __always_inline bool
2037 arch_atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new)
2040 r = arch_atomic64_cmpxchg_acquire(v, o, new);
2041 if (unlikely(r != o))
2043 return likely(r == o);
2045 #define arch_atomic64_try_cmpxchg_acquire arch_atomic64_try_cmpxchg_acquire
2048 #ifndef arch_atomic64_try_cmpxchg_release
2049 static __always_inline bool
2050 arch_atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new)
2053 r = arch_atomic64_cmpxchg_release(v, o, new);
2054 if (unlikely(r != o))
2056 return likely(r == o);
2058 #define arch_atomic64_try_cmpxchg_release arch_atomic64_try_cmpxchg_release
2061 #ifndef arch_atomic64_try_cmpxchg_relaxed
2062 static __always_inline bool
2063 arch_atomic64_try_cmpxchg_relaxed(atomic64_t *v, s64 *old, s64 new)
2066 r = arch_atomic64_cmpxchg_relaxed(v, o, new);
2067 if (unlikely(r != o))
2069 return likely(r == o);
2071 #define arch_atomic64_try_cmpxchg_relaxed arch_atomic64_try_cmpxchg_relaxed
2074 #else /* arch_atomic64_try_cmpxchg_relaxed */
2076 #ifndef arch_atomic64_try_cmpxchg_acquire
2077 static __always_inline bool
2078 arch_atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new)
2080 bool ret = arch_atomic64_try_cmpxchg_relaxed(v, old, new);
2081 __atomic_acquire_fence();
2084 #define arch_atomic64_try_cmpxchg_acquire arch_atomic64_try_cmpxchg_acquire
2087 #ifndef arch_atomic64_try_cmpxchg_release
2088 static __always_inline bool
2089 arch_atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new)
2091 __atomic_release_fence();
2092 return arch_atomic64_try_cmpxchg_relaxed(v, old, new);
2094 #define arch_atomic64_try_cmpxchg_release arch_atomic64_try_cmpxchg_release
2097 #ifndef arch_atomic64_try_cmpxchg
2098 static __always_inline bool
2099 arch_atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
2102 __atomic_pre_full_fence();
2103 ret = arch_atomic64_try_cmpxchg_relaxed(v, old, new);
2104 __atomic_post_full_fence();
2107 #define arch_atomic64_try_cmpxchg arch_atomic64_try_cmpxchg
2110 #endif /* arch_atomic64_try_cmpxchg_relaxed */
2112 #ifndef arch_atomic64_sub_and_test
2114 * arch_atomic64_sub_and_test - subtract value from variable and test result
2115 * @i: integer value to subtract
2116 * @v: pointer of type atomic64_t
2118 * Atomically subtracts @i from @v and returns
2119 * true if the result is zero, or false for all
2122 static __always_inline bool
2123 arch_atomic64_sub_and_test(s64 i, atomic64_t *v)
2125 return arch_atomic64_sub_return(i, v) == 0;
2127 #define arch_atomic64_sub_and_test arch_atomic64_sub_and_test
2130 #ifndef arch_atomic64_dec_and_test
2132 * arch_atomic64_dec_and_test - decrement and test
2133 * @v: pointer of type atomic64_t
2135 * Atomically decrements @v by 1 and
2136 * returns true if the result is 0, or false for all other
2139 static __always_inline bool
2140 arch_atomic64_dec_and_test(atomic64_t *v)
2142 return arch_atomic64_dec_return(v) == 0;
2144 #define arch_atomic64_dec_and_test arch_atomic64_dec_and_test
2147 #ifndef arch_atomic64_inc_and_test
2149 * arch_atomic64_inc_and_test - increment and test
2150 * @v: pointer of type atomic64_t
2152 * Atomically increments @v by 1
2153 * and returns true if the result is zero, or false for all
2156 static __always_inline bool
2157 arch_atomic64_inc_and_test(atomic64_t *v)
2159 return arch_atomic64_inc_return(v) == 0;
2161 #define arch_atomic64_inc_and_test arch_atomic64_inc_and_test
2164 #ifndef arch_atomic64_add_negative
2166 * arch_atomic64_add_negative - add and test if negative
2167 * @i: integer value to add
2168 * @v: pointer of type atomic64_t
2170 * Atomically adds @i to @v and returns true
2171 * if the result is negative, or false when
2172 * result is greater than or equal to zero.
2174 static __always_inline bool
2175 arch_atomic64_add_negative(s64 i, atomic64_t *v)
2177 return arch_atomic64_add_return(i, v) < 0;
2179 #define arch_atomic64_add_negative arch_atomic64_add_negative
2182 #ifndef arch_atomic64_fetch_add_unless
2184 * arch_atomic64_fetch_add_unless - add unless the number is already a given value
2185 * @v: pointer of type atomic64_t
2186 * @a: the amount to add to v...
2187 * @u: ...unless v is equal to u.
2189 * Atomically adds @a to @v, so long as @v was not already @u.
2190 * Returns original value of @v
2192 static __always_inline s64
2193 arch_atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u)
2195 s64 c = arch_atomic64_read(v);
2198 if (unlikely(c == u))
2200 } while (!arch_atomic64_try_cmpxchg(v, &c, c + a));
2204 #define arch_atomic64_fetch_add_unless arch_atomic64_fetch_add_unless
2207 #ifndef arch_atomic64_add_unless
2209 * arch_atomic64_add_unless - add unless the number is already a given value
2210 * @v: pointer of type atomic64_t
2211 * @a: the amount to add to v...
2212 * @u: ...unless v is equal to u.
2214 * Atomically adds @a to @v, if @v was not already @u.
2215 * Returns true if the addition was done.
2217 static __always_inline bool
2218 arch_atomic64_add_unless(atomic64_t *v, s64 a, s64 u)
2220 return arch_atomic64_fetch_add_unless(v, a, u) != u;
2222 #define arch_atomic64_add_unless arch_atomic64_add_unless
2225 #ifndef arch_atomic64_inc_not_zero
2227 * arch_atomic64_inc_not_zero - increment unless the number is zero
2228 * @v: pointer of type atomic64_t
2230 * Atomically increments @v by 1, if @v is non-zero.
2231 * Returns true if the increment was done.
2233 static __always_inline bool
2234 arch_atomic64_inc_not_zero(atomic64_t *v)
2236 return arch_atomic64_add_unless(v, 1, 0);
2238 #define arch_atomic64_inc_not_zero arch_atomic64_inc_not_zero
2241 #ifndef arch_atomic64_inc_unless_negative
2242 static __always_inline bool
2243 arch_atomic64_inc_unless_negative(atomic64_t *v)
2245 s64 c = arch_atomic64_read(v);
2248 if (unlikely(c < 0))
2250 } while (!arch_atomic64_try_cmpxchg(v, &c, c + 1));
2254 #define arch_atomic64_inc_unless_negative arch_atomic64_inc_unless_negative
2257 #ifndef arch_atomic64_dec_unless_positive
2258 static __always_inline bool
2259 arch_atomic64_dec_unless_positive(atomic64_t *v)
2261 s64 c = arch_atomic64_read(v);
2264 if (unlikely(c > 0))
2266 } while (!arch_atomic64_try_cmpxchg(v, &c, c - 1));
2270 #define arch_atomic64_dec_unless_positive arch_atomic64_dec_unless_positive
2273 #ifndef arch_atomic64_dec_if_positive
2274 static __always_inline s64
2275 arch_atomic64_dec_if_positive(atomic64_t *v)
2277 s64 dec, c = arch_atomic64_read(v);
2281 if (unlikely(dec < 0))
2283 } while (!arch_atomic64_try_cmpxchg(v, &c, dec));
2287 #define arch_atomic64_dec_if_positive arch_atomic64_dec_if_positive
2290 #endif /* _LINUX_ATOMIC_FALLBACK_H */
2291 // 90cd26cfd69d2250303d654955a0cc12620fb91b