Merge tag 'for-linus' of git://git.kernel.org/pub/scm/virt/kvm/kvm
[linux-block.git] / include / linux / atomic-fallback.h
CommitLineData
9fa45070
MR
1// SPDX-License-Identifier: GPL-2.0
2
3// Generated by scripts/atomic/gen-atomic-fallback.sh
4// DO NOT MODIFY THIS FILE DIRECTLY
5
6#ifndef _LINUX_ATOMIC_FALLBACK_H
7#define _LINUX_ATOMIC_FALLBACK_H
8
765dcd20
ME
9#include <linux/compiler.h>
10
9fa45070
MR
11#ifndef xchg_relaxed
12#define xchg_relaxed xchg
13#define xchg_acquire xchg
14#define xchg_release xchg
15#else /* xchg_relaxed */
16
17#ifndef xchg_acquire
18#define xchg_acquire(...) \
19 __atomic_op_acquire(xchg, __VA_ARGS__)
20#endif
21
22#ifndef xchg_release
23#define xchg_release(...) \
24 __atomic_op_release(xchg, __VA_ARGS__)
25#endif
26
27#ifndef xchg
28#define xchg(...) \
29 __atomic_op_fence(xchg, __VA_ARGS__)
30#endif
31
32#endif /* xchg_relaxed */
33
34#ifndef cmpxchg_relaxed
35#define cmpxchg_relaxed cmpxchg
36#define cmpxchg_acquire cmpxchg
37#define cmpxchg_release cmpxchg
38#else /* cmpxchg_relaxed */
39
40#ifndef cmpxchg_acquire
41#define cmpxchg_acquire(...) \
42 __atomic_op_acquire(cmpxchg, __VA_ARGS__)
43#endif
44
45#ifndef cmpxchg_release
46#define cmpxchg_release(...) \
47 __atomic_op_release(cmpxchg, __VA_ARGS__)
48#endif
49
50#ifndef cmpxchg
51#define cmpxchg(...) \
52 __atomic_op_fence(cmpxchg, __VA_ARGS__)
53#endif
54
55#endif /* cmpxchg_relaxed */
56
57#ifndef cmpxchg64_relaxed
58#define cmpxchg64_relaxed cmpxchg64
59#define cmpxchg64_acquire cmpxchg64
60#define cmpxchg64_release cmpxchg64
61#else /* cmpxchg64_relaxed */
62
63#ifndef cmpxchg64_acquire
64#define cmpxchg64_acquire(...) \
65 __atomic_op_acquire(cmpxchg64, __VA_ARGS__)
66#endif
67
68#ifndef cmpxchg64_release
69#define cmpxchg64_release(...) \
70 __atomic_op_release(cmpxchg64, __VA_ARGS__)
71#endif
72
73#ifndef cmpxchg64
74#define cmpxchg64(...) \
75 __atomic_op_fence(cmpxchg64, __VA_ARGS__)
76#endif
77
78#endif /* cmpxchg64_relaxed */
79
5faafd56
PZ
80#define arch_atomic_read atomic_read
81#define arch_atomic_read_acquire atomic_read_acquire
82
9fa45070 83#ifndef atomic_read_acquire
765dcd20 84static __always_inline int
9fa45070
MR
85atomic_read_acquire(const atomic_t *v)
86{
87 return smp_load_acquire(&(v)->counter);
88}
89#define atomic_read_acquire atomic_read_acquire
90#endif
91
5faafd56
PZ
92#define arch_atomic_set atomic_set
93#define arch_atomic_set_release atomic_set_release
94
9fa45070 95#ifndef atomic_set_release
765dcd20 96static __always_inline void
9fa45070
MR
97atomic_set_release(atomic_t *v, int i)
98{
99 smp_store_release(&(v)->counter, i);
100}
101#define atomic_set_release atomic_set_release
102#endif
103
5faafd56
PZ
104#define arch_atomic_add atomic_add
105
106#define arch_atomic_add_return atomic_add_return
107#define arch_atomic_add_return_acquire atomic_add_return_acquire
108#define arch_atomic_add_return_release atomic_add_return_release
109#define arch_atomic_add_return_relaxed atomic_add_return_relaxed
110
9fa45070
MR
111#ifndef atomic_add_return_relaxed
112#define atomic_add_return_acquire atomic_add_return
113#define atomic_add_return_release atomic_add_return
114#define atomic_add_return_relaxed atomic_add_return
115#else /* atomic_add_return_relaxed */
116
117#ifndef atomic_add_return_acquire
765dcd20 118static __always_inline int
9fa45070
MR
119atomic_add_return_acquire(int i, atomic_t *v)
120{
121 int ret = atomic_add_return_relaxed(i, v);
122 __atomic_acquire_fence();
123 return ret;
124}
125#define atomic_add_return_acquire atomic_add_return_acquire
126#endif
127
128#ifndef atomic_add_return_release
765dcd20 129static __always_inline int
9fa45070
MR
130atomic_add_return_release(int i, atomic_t *v)
131{
132 __atomic_release_fence();
133 return atomic_add_return_relaxed(i, v);
134}
135#define atomic_add_return_release atomic_add_return_release
136#endif
137
138#ifndef atomic_add_return
765dcd20 139static __always_inline int
9fa45070
MR
140atomic_add_return(int i, atomic_t *v)
141{
142 int ret;
143 __atomic_pre_full_fence();
144 ret = atomic_add_return_relaxed(i, v);
145 __atomic_post_full_fence();
146 return ret;
147}
148#define atomic_add_return atomic_add_return
149#endif
150
151#endif /* atomic_add_return_relaxed */
152
5faafd56
PZ
153#define arch_atomic_fetch_add atomic_fetch_add
154#define arch_atomic_fetch_add_acquire atomic_fetch_add_acquire
155#define arch_atomic_fetch_add_release atomic_fetch_add_release
156#define arch_atomic_fetch_add_relaxed atomic_fetch_add_relaxed
157
9fa45070
MR
158#ifndef atomic_fetch_add_relaxed
159#define atomic_fetch_add_acquire atomic_fetch_add
160#define atomic_fetch_add_release atomic_fetch_add
161#define atomic_fetch_add_relaxed atomic_fetch_add
162#else /* atomic_fetch_add_relaxed */
163
164#ifndef atomic_fetch_add_acquire
765dcd20 165static __always_inline int
9fa45070
MR
166atomic_fetch_add_acquire(int i, atomic_t *v)
167{
168 int ret = atomic_fetch_add_relaxed(i, v);
169 __atomic_acquire_fence();
170 return ret;
171}
172#define atomic_fetch_add_acquire atomic_fetch_add_acquire
173#endif
174
175#ifndef atomic_fetch_add_release
765dcd20 176static __always_inline int
9fa45070
MR
177atomic_fetch_add_release(int i, atomic_t *v)
178{
179 __atomic_release_fence();
180 return atomic_fetch_add_relaxed(i, v);
181}
182#define atomic_fetch_add_release atomic_fetch_add_release
183#endif
184
185#ifndef atomic_fetch_add
765dcd20 186static __always_inline int
9fa45070
MR
187atomic_fetch_add(int i, atomic_t *v)
188{
189 int ret;
190 __atomic_pre_full_fence();
191 ret = atomic_fetch_add_relaxed(i, v);
192 __atomic_post_full_fence();
193 return ret;
194}
195#define atomic_fetch_add atomic_fetch_add
196#endif
197
198#endif /* atomic_fetch_add_relaxed */
199
5faafd56
PZ
200#define arch_atomic_sub atomic_sub
201
202#define arch_atomic_sub_return atomic_sub_return
203#define arch_atomic_sub_return_acquire atomic_sub_return_acquire
204#define arch_atomic_sub_return_release atomic_sub_return_release
205#define arch_atomic_sub_return_relaxed atomic_sub_return_relaxed
206
9fa45070
MR
207#ifndef atomic_sub_return_relaxed
208#define atomic_sub_return_acquire atomic_sub_return
209#define atomic_sub_return_release atomic_sub_return
210#define atomic_sub_return_relaxed atomic_sub_return
211#else /* atomic_sub_return_relaxed */
212
213#ifndef atomic_sub_return_acquire
765dcd20 214static __always_inline int
9fa45070
MR
215atomic_sub_return_acquire(int i, atomic_t *v)
216{
217 int ret = atomic_sub_return_relaxed(i, v);
218 __atomic_acquire_fence();
219 return ret;
220}
221#define atomic_sub_return_acquire atomic_sub_return_acquire
222#endif
223
224#ifndef atomic_sub_return_release
765dcd20 225static __always_inline int
9fa45070
MR
226atomic_sub_return_release(int i, atomic_t *v)
227{
228 __atomic_release_fence();
229 return atomic_sub_return_relaxed(i, v);
230}
231#define atomic_sub_return_release atomic_sub_return_release
232#endif
233
234#ifndef atomic_sub_return
765dcd20 235static __always_inline int
9fa45070
MR
236atomic_sub_return(int i, atomic_t *v)
237{
238 int ret;
239 __atomic_pre_full_fence();
240 ret = atomic_sub_return_relaxed(i, v);
241 __atomic_post_full_fence();
242 return ret;
243}
244#define atomic_sub_return atomic_sub_return
245#endif
246
247#endif /* atomic_sub_return_relaxed */
248
5faafd56
PZ
249#define arch_atomic_fetch_sub atomic_fetch_sub
250#define arch_atomic_fetch_sub_acquire atomic_fetch_sub_acquire
251#define arch_atomic_fetch_sub_release atomic_fetch_sub_release
252#define arch_atomic_fetch_sub_relaxed atomic_fetch_sub_relaxed
253
9fa45070
MR
254#ifndef atomic_fetch_sub_relaxed
255#define atomic_fetch_sub_acquire atomic_fetch_sub
256#define atomic_fetch_sub_release atomic_fetch_sub
257#define atomic_fetch_sub_relaxed atomic_fetch_sub
258#else /* atomic_fetch_sub_relaxed */
259
260#ifndef atomic_fetch_sub_acquire
765dcd20 261static __always_inline int
9fa45070
MR
262atomic_fetch_sub_acquire(int i, atomic_t *v)
263{
264 int ret = atomic_fetch_sub_relaxed(i, v);
265 __atomic_acquire_fence();
266 return ret;
267}
268#define atomic_fetch_sub_acquire atomic_fetch_sub_acquire
269#endif
270
271#ifndef atomic_fetch_sub_release
765dcd20 272static __always_inline int
9fa45070
MR
273atomic_fetch_sub_release(int i, atomic_t *v)
274{
275 __atomic_release_fence();
276 return atomic_fetch_sub_relaxed(i, v);
277}
278#define atomic_fetch_sub_release atomic_fetch_sub_release
279#endif
280
281#ifndef atomic_fetch_sub
765dcd20 282static __always_inline int
9fa45070
MR
283atomic_fetch_sub(int i, atomic_t *v)
284{
285 int ret;
286 __atomic_pre_full_fence();
287 ret = atomic_fetch_sub_relaxed(i, v);
288 __atomic_post_full_fence();
289 return ret;
290}
291#define atomic_fetch_sub atomic_fetch_sub
292#endif
293
294#endif /* atomic_fetch_sub_relaxed */
295
5faafd56
PZ
296#define arch_atomic_inc atomic_inc
297
9fa45070 298#ifndef atomic_inc
765dcd20 299static __always_inline void
9fa45070
MR
300atomic_inc(atomic_t *v)
301{
302 atomic_add(1, v);
303}
304#define atomic_inc atomic_inc
305#endif
306
5faafd56
PZ
307#define arch_atomic_inc_return atomic_inc_return
308#define arch_atomic_inc_return_acquire atomic_inc_return_acquire
309#define arch_atomic_inc_return_release atomic_inc_return_release
310#define arch_atomic_inc_return_relaxed atomic_inc_return_relaxed
311
9fa45070
MR
312#ifndef atomic_inc_return_relaxed
313#ifdef atomic_inc_return
314#define atomic_inc_return_acquire atomic_inc_return
315#define atomic_inc_return_release atomic_inc_return
316#define atomic_inc_return_relaxed atomic_inc_return
317#endif /* atomic_inc_return */
318
319#ifndef atomic_inc_return
765dcd20 320static __always_inline int
9fa45070
MR
321atomic_inc_return(atomic_t *v)
322{
323 return atomic_add_return(1, v);
324}
325#define atomic_inc_return atomic_inc_return
326#endif
327
328#ifndef atomic_inc_return_acquire
765dcd20 329static __always_inline int
9fa45070
MR
330atomic_inc_return_acquire(atomic_t *v)
331{
332 return atomic_add_return_acquire(1, v);
333}
334#define atomic_inc_return_acquire atomic_inc_return_acquire
335#endif
336
337#ifndef atomic_inc_return_release
765dcd20 338static __always_inline int
9fa45070
MR
339atomic_inc_return_release(atomic_t *v)
340{
341 return atomic_add_return_release(1, v);
342}
343#define atomic_inc_return_release atomic_inc_return_release
344#endif
345
346#ifndef atomic_inc_return_relaxed
765dcd20 347static __always_inline int
9fa45070
MR
348atomic_inc_return_relaxed(atomic_t *v)
349{
350 return atomic_add_return_relaxed(1, v);
351}
352#define atomic_inc_return_relaxed atomic_inc_return_relaxed
353#endif
354
355#else /* atomic_inc_return_relaxed */
356
357#ifndef atomic_inc_return_acquire
765dcd20 358static __always_inline int
9fa45070
MR
359atomic_inc_return_acquire(atomic_t *v)
360{
361 int ret = atomic_inc_return_relaxed(v);
362 __atomic_acquire_fence();
363 return ret;
364}
365#define atomic_inc_return_acquire atomic_inc_return_acquire
366#endif
367
368#ifndef atomic_inc_return_release
765dcd20 369static __always_inline int
9fa45070
MR
370atomic_inc_return_release(atomic_t *v)
371{
372 __atomic_release_fence();
373 return atomic_inc_return_relaxed(v);
374}
375#define atomic_inc_return_release atomic_inc_return_release
376#endif
377
378#ifndef atomic_inc_return
765dcd20 379static __always_inline int
9fa45070
MR
380atomic_inc_return(atomic_t *v)
381{
382 int ret;
383 __atomic_pre_full_fence();
384 ret = atomic_inc_return_relaxed(v);
385 __atomic_post_full_fence();
386 return ret;
387}
388#define atomic_inc_return atomic_inc_return
389#endif
390
391#endif /* atomic_inc_return_relaxed */
392
5faafd56
PZ
393#define arch_atomic_fetch_inc atomic_fetch_inc
394#define arch_atomic_fetch_inc_acquire atomic_fetch_inc_acquire
395#define arch_atomic_fetch_inc_release atomic_fetch_inc_release
396#define arch_atomic_fetch_inc_relaxed atomic_fetch_inc_relaxed
397
9fa45070
MR
398#ifndef atomic_fetch_inc_relaxed
399#ifdef atomic_fetch_inc
400#define atomic_fetch_inc_acquire atomic_fetch_inc
401#define atomic_fetch_inc_release atomic_fetch_inc
402#define atomic_fetch_inc_relaxed atomic_fetch_inc
403#endif /* atomic_fetch_inc */
404
405#ifndef atomic_fetch_inc
765dcd20 406static __always_inline int
9fa45070
MR
407atomic_fetch_inc(atomic_t *v)
408{
409 return atomic_fetch_add(1, v);
410}
411#define atomic_fetch_inc atomic_fetch_inc
412#endif
413
414#ifndef atomic_fetch_inc_acquire
765dcd20 415static __always_inline int
9fa45070
MR
416atomic_fetch_inc_acquire(atomic_t *v)
417{
418 return atomic_fetch_add_acquire(1, v);
419}
420#define atomic_fetch_inc_acquire atomic_fetch_inc_acquire
421#endif
422
423#ifndef atomic_fetch_inc_release
765dcd20 424static __always_inline int
9fa45070
MR
425atomic_fetch_inc_release(atomic_t *v)
426{
427 return atomic_fetch_add_release(1, v);
428}
429#define atomic_fetch_inc_release atomic_fetch_inc_release
430#endif
431
432#ifndef atomic_fetch_inc_relaxed
765dcd20 433static __always_inline int
9fa45070
MR
434atomic_fetch_inc_relaxed(atomic_t *v)
435{
436 return atomic_fetch_add_relaxed(1, v);
437}
438#define atomic_fetch_inc_relaxed atomic_fetch_inc_relaxed
439#endif
440
441#else /* atomic_fetch_inc_relaxed */
442
443#ifndef atomic_fetch_inc_acquire
765dcd20 444static __always_inline int
9fa45070
MR
445atomic_fetch_inc_acquire(atomic_t *v)
446{
447 int ret = atomic_fetch_inc_relaxed(v);
448 __atomic_acquire_fence();
449 return ret;
450}
451#define atomic_fetch_inc_acquire atomic_fetch_inc_acquire
452#endif
453
454#ifndef atomic_fetch_inc_release
765dcd20 455static __always_inline int
9fa45070
MR
456atomic_fetch_inc_release(atomic_t *v)
457{
458 __atomic_release_fence();
459 return atomic_fetch_inc_relaxed(v);
460}
461#define atomic_fetch_inc_release atomic_fetch_inc_release
462#endif
463
464#ifndef atomic_fetch_inc
765dcd20 465static __always_inline int
9fa45070
MR
466atomic_fetch_inc(atomic_t *v)
467{
468 int ret;
469 __atomic_pre_full_fence();
470 ret = atomic_fetch_inc_relaxed(v);
471 __atomic_post_full_fence();
472 return ret;
473}
474#define atomic_fetch_inc atomic_fetch_inc
475#endif
476
477#endif /* atomic_fetch_inc_relaxed */
478
5faafd56
PZ
479#define arch_atomic_dec atomic_dec
480
9fa45070 481#ifndef atomic_dec
765dcd20 482static __always_inline void
9fa45070
MR
483atomic_dec(atomic_t *v)
484{
485 atomic_sub(1, v);
486}
487#define atomic_dec atomic_dec
488#endif
489
5faafd56
PZ
490#define arch_atomic_dec_return atomic_dec_return
491#define arch_atomic_dec_return_acquire atomic_dec_return_acquire
492#define arch_atomic_dec_return_release atomic_dec_return_release
493#define arch_atomic_dec_return_relaxed atomic_dec_return_relaxed
494
9fa45070
MR
495#ifndef atomic_dec_return_relaxed
496#ifdef atomic_dec_return
497#define atomic_dec_return_acquire atomic_dec_return
498#define atomic_dec_return_release atomic_dec_return
499#define atomic_dec_return_relaxed atomic_dec_return
500#endif /* atomic_dec_return */
501
502#ifndef atomic_dec_return
765dcd20 503static __always_inline int
9fa45070
MR
504atomic_dec_return(atomic_t *v)
505{
506 return atomic_sub_return(1, v);
507}
508#define atomic_dec_return atomic_dec_return
509#endif
510
511#ifndef atomic_dec_return_acquire
765dcd20 512static __always_inline int
9fa45070
MR
513atomic_dec_return_acquire(atomic_t *v)
514{
515 return atomic_sub_return_acquire(1, v);
516}
517#define atomic_dec_return_acquire atomic_dec_return_acquire
518#endif
519
520#ifndef atomic_dec_return_release
765dcd20 521static __always_inline int
9fa45070
MR
522atomic_dec_return_release(atomic_t *v)
523{
524 return atomic_sub_return_release(1, v);
525}
526#define atomic_dec_return_release atomic_dec_return_release
527#endif
528
529#ifndef atomic_dec_return_relaxed
765dcd20 530static __always_inline int
9fa45070
MR
531atomic_dec_return_relaxed(atomic_t *v)
532{
533 return atomic_sub_return_relaxed(1, v);
534}
535#define atomic_dec_return_relaxed atomic_dec_return_relaxed
536#endif
537
538#else /* atomic_dec_return_relaxed */
539
540#ifndef atomic_dec_return_acquire
765dcd20 541static __always_inline int
9fa45070
MR
542atomic_dec_return_acquire(atomic_t *v)
543{
544 int ret = atomic_dec_return_relaxed(v);
545 __atomic_acquire_fence();
546 return ret;
547}
548#define atomic_dec_return_acquire atomic_dec_return_acquire
549#endif
550
551#ifndef atomic_dec_return_release
765dcd20 552static __always_inline int
9fa45070
MR
553atomic_dec_return_release(atomic_t *v)
554{
555 __atomic_release_fence();
556 return atomic_dec_return_relaxed(v);
557}
558#define atomic_dec_return_release atomic_dec_return_release
559#endif
560
561#ifndef atomic_dec_return
765dcd20 562static __always_inline int
9fa45070
MR
563atomic_dec_return(atomic_t *v)
564{
565 int ret;
566 __atomic_pre_full_fence();
567 ret = atomic_dec_return_relaxed(v);
568 __atomic_post_full_fence();
569 return ret;
570}
571#define atomic_dec_return atomic_dec_return
572#endif
573
574#endif /* atomic_dec_return_relaxed */
575
5faafd56
PZ
576#define arch_atomic_fetch_dec atomic_fetch_dec
577#define arch_atomic_fetch_dec_acquire atomic_fetch_dec_acquire
578#define arch_atomic_fetch_dec_release atomic_fetch_dec_release
579#define arch_atomic_fetch_dec_relaxed atomic_fetch_dec_relaxed
580
9fa45070
MR
581#ifndef atomic_fetch_dec_relaxed
582#ifdef atomic_fetch_dec
583#define atomic_fetch_dec_acquire atomic_fetch_dec
584#define atomic_fetch_dec_release atomic_fetch_dec
585#define atomic_fetch_dec_relaxed atomic_fetch_dec
586#endif /* atomic_fetch_dec */
587
588#ifndef atomic_fetch_dec
765dcd20 589static __always_inline int
9fa45070
MR
590atomic_fetch_dec(atomic_t *v)
591{
592 return atomic_fetch_sub(1, v);
593}
594#define atomic_fetch_dec atomic_fetch_dec
595#endif
596
597#ifndef atomic_fetch_dec_acquire
765dcd20 598static __always_inline int
9fa45070
MR
599atomic_fetch_dec_acquire(atomic_t *v)
600{
601 return atomic_fetch_sub_acquire(1, v);
602}
603#define atomic_fetch_dec_acquire atomic_fetch_dec_acquire
604#endif
605
606#ifndef atomic_fetch_dec_release
765dcd20 607static __always_inline int
9fa45070
MR
608atomic_fetch_dec_release(atomic_t *v)
609{
610 return atomic_fetch_sub_release(1, v);
611}
612#define atomic_fetch_dec_release atomic_fetch_dec_release
613#endif
614
615#ifndef atomic_fetch_dec_relaxed
765dcd20 616static __always_inline int
9fa45070
MR
617atomic_fetch_dec_relaxed(atomic_t *v)
618{
619 return atomic_fetch_sub_relaxed(1, v);
620}
621#define atomic_fetch_dec_relaxed atomic_fetch_dec_relaxed
622#endif
623
624#else /* atomic_fetch_dec_relaxed */
625
626#ifndef atomic_fetch_dec_acquire
765dcd20 627static __always_inline int
9fa45070
MR
628atomic_fetch_dec_acquire(atomic_t *v)
629{
630 int ret = atomic_fetch_dec_relaxed(v);
631 __atomic_acquire_fence();
632 return ret;
633}
634#define atomic_fetch_dec_acquire atomic_fetch_dec_acquire
635#endif
636
637#ifndef atomic_fetch_dec_release
765dcd20 638static __always_inline int
9fa45070
MR
639atomic_fetch_dec_release(atomic_t *v)
640{
641 __atomic_release_fence();
642 return atomic_fetch_dec_relaxed(v);
643}
644#define atomic_fetch_dec_release atomic_fetch_dec_release
645#endif
646
647#ifndef atomic_fetch_dec
765dcd20 648static __always_inline int
9fa45070
MR
649atomic_fetch_dec(atomic_t *v)
650{
651 int ret;
652 __atomic_pre_full_fence();
653 ret = atomic_fetch_dec_relaxed(v);
654 __atomic_post_full_fence();
655 return ret;
656}
657#define atomic_fetch_dec atomic_fetch_dec
658#endif
659
660#endif /* atomic_fetch_dec_relaxed */
661
5faafd56
PZ
662#define arch_atomic_and atomic_and
663
664#define arch_atomic_fetch_and atomic_fetch_and
665#define arch_atomic_fetch_and_acquire atomic_fetch_and_acquire
666#define arch_atomic_fetch_and_release atomic_fetch_and_release
667#define arch_atomic_fetch_and_relaxed atomic_fetch_and_relaxed
668
9fa45070
MR
669#ifndef atomic_fetch_and_relaxed
670#define atomic_fetch_and_acquire atomic_fetch_and
671#define atomic_fetch_and_release atomic_fetch_and
672#define atomic_fetch_and_relaxed atomic_fetch_and
673#else /* atomic_fetch_and_relaxed */
674
675#ifndef atomic_fetch_and_acquire
765dcd20 676static __always_inline int
9fa45070
MR
677atomic_fetch_and_acquire(int i, atomic_t *v)
678{
679 int ret = atomic_fetch_and_relaxed(i, v);
680 __atomic_acquire_fence();
681 return ret;
682}
683#define atomic_fetch_and_acquire atomic_fetch_and_acquire
684#endif
685
686#ifndef atomic_fetch_and_release
765dcd20 687static __always_inline int
9fa45070
MR
688atomic_fetch_and_release(int i, atomic_t *v)
689{
690 __atomic_release_fence();
691 return atomic_fetch_and_relaxed(i, v);
692}
693#define atomic_fetch_and_release atomic_fetch_and_release
694#endif
695
696#ifndef atomic_fetch_and
765dcd20 697static __always_inline int
9fa45070
MR
698atomic_fetch_and(int i, atomic_t *v)
699{
700 int ret;
701 __atomic_pre_full_fence();
702 ret = atomic_fetch_and_relaxed(i, v);
703 __atomic_post_full_fence();
704 return ret;
705}
706#define atomic_fetch_and atomic_fetch_and
707#endif
708
709#endif /* atomic_fetch_and_relaxed */
710
5faafd56
PZ
711#define arch_atomic_andnot atomic_andnot
712
9fa45070 713#ifndef atomic_andnot
765dcd20 714static __always_inline void
9fa45070
MR
715atomic_andnot(int i, atomic_t *v)
716{
717 atomic_and(~i, v);
718}
719#define atomic_andnot atomic_andnot
720#endif
721
5faafd56
PZ
722#define arch_atomic_fetch_andnot atomic_fetch_andnot
723#define arch_atomic_fetch_andnot_acquire atomic_fetch_andnot_acquire
724#define arch_atomic_fetch_andnot_release atomic_fetch_andnot_release
725#define arch_atomic_fetch_andnot_relaxed atomic_fetch_andnot_relaxed
726
9fa45070
MR
727#ifndef atomic_fetch_andnot_relaxed
728#ifdef atomic_fetch_andnot
729#define atomic_fetch_andnot_acquire atomic_fetch_andnot
730#define atomic_fetch_andnot_release atomic_fetch_andnot
731#define atomic_fetch_andnot_relaxed atomic_fetch_andnot
732#endif /* atomic_fetch_andnot */
733
734#ifndef atomic_fetch_andnot
765dcd20 735static __always_inline int
9fa45070
MR
736atomic_fetch_andnot(int i, atomic_t *v)
737{
738 return atomic_fetch_and(~i, v);
739}
740#define atomic_fetch_andnot atomic_fetch_andnot
741#endif
742
743#ifndef atomic_fetch_andnot_acquire
765dcd20 744static __always_inline int
9fa45070
MR
745atomic_fetch_andnot_acquire(int i, atomic_t *v)
746{
747 return atomic_fetch_and_acquire(~i, v);
748}
749#define atomic_fetch_andnot_acquire atomic_fetch_andnot_acquire
750#endif
751
752#ifndef atomic_fetch_andnot_release
765dcd20 753static __always_inline int
9fa45070
MR
754atomic_fetch_andnot_release(int i, atomic_t *v)
755{
756 return atomic_fetch_and_release(~i, v);
757}
758#define atomic_fetch_andnot_release atomic_fetch_andnot_release
759#endif
760
761#ifndef atomic_fetch_andnot_relaxed
765dcd20 762static __always_inline int
9fa45070
MR
763atomic_fetch_andnot_relaxed(int i, atomic_t *v)
764{
765 return atomic_fetch_and_relaxed(~i, v);
766}
767#define atomic_fetch_andnot_relaxed atomic_fetch_andnot_relaxed
768#endif
769
770#else /* atomic_fetch_andnot_relaxed */
771
772#ifndef atomic_fetch_andnot_acquire
765dcd20 773static __always_inline int
9fa45070
MR
774atomic_fetch_andnot_acquire(int i, atomic_t *v)
775{
776 int ret = atomic_fetch_andnot_relaxed(i, v);
777 __atomic_acquire_fence();
778 return ret;
779}
780#define atomic_fetch_andnot_acquire atomic_fetch_andnot_acquire
781#endif
782
783#ifndef atomic_fetch_andnot_release
765dcd20 784static __always_inline int
9fa45070
MR
785atomic_fetch_andnot_release(int i, atomic_t *v)
786{
787 __atomic_release_fence();
788 return atomic_fetch_andnot_relaxed(i, v);
789}
790#define atomic_fetch_andnot_release atomic_fetch_andnot_release
791#endif
792
793#ifndef atomic_fetch_andnot
765dcd20 794static __always_inline int
9fa45070
MR
795atomic_fetch_andnot(int i, atomic_t *v)
796{
797 int ret;
798 __atomic_pre_full_fence();
799 ret = atomic_fetch_andnot_relaxed(i, v);
800 __atomic_post_full_fence();
801 return ret;
802}
803#define atomic_fetch_andnot atomic_fetch_andnot
804#endif
805
806#endif /* atomic_fetch_andnot_relaxed */
807
5faafd56
PZ
808#define arch_atomic_or atomic_or
809
810#define arch_atomic_fetch_or atomic_fetch_or
811#define arch_atomic_fetch_or_acquire atomic_fetch_or_acquire
812#define arch_atomic_fetch_or_release atomic_fetch_or_release
813#define arch_atomic_fetch_or_relaxed atomic_fetch_or_relaxed
814
9fa45070
MR
815#ifndef atomic_fetch_or_relaxed
816#define atomic_fetch_or_acquire atomic_fetch_or
817#define atomic_fetch_or_release atomic_fetch_or
818#define atomic_fetch_or_relaxed atomic_fetch_or
819#else /* atomic_fetch_or_relaxed */
820
821#ifndef atomic_fetch_or_acquire
765dcd20 822static __always_inline int
9fa45070
MR
823atomic_fetch_or_acquire(int i, atomic_t *v)
824{
825 int ret = atomic_fetch_or_relaxed(i, v);
826 __atomic_acquire_fence();
827 return ret;
828}
829#define atomic_fetch_or_acquire atomic_fetch_or_acquire
830#endif
831
832#ifndef atomic_fetch_or_release
765dcd20 833static __always_inline int
9fa45070
MR
834atomic_fetch_or_release(int i, atomic_t *v)
835{
836 __atomic_release_fence();
837 return atomic_fetch_or_relaxed(i, v);
838}
839#define atomic_fetch_or_release atomic_fetch_or_release
840#endif
841
842#ifndef atomic_fetch_or
765dcd20 843static __always_inline int
9fa45070
MR
844atomic_fetch_or(int i, atomic_t *v)
845{
846 int ret;
847 __atomic_pre_full_fence();
848 ret = atomic_fetch_or_relaxed(i, v);
849 __atomic_post_full_fence();
850 return ret;
851}
852#define atomic_fetch_or atomic_fetch_or
853#endif
854
855#endif /* atomic_fetch_or_relaxed */
856
5faafd56
PZ
857#define arch_atomic_xor atomic_xor
858
859#define arch_atomic_fetch_xor atomic_fetch_xor
860#define arch_atomic_fetch_xor_acquire atomic_fetch_xor_acquire
861#define arch_atomic_fetch_xor_release atomic_fetch_xor_release
862#define arch_atomic_fetch_xor_relaxed atomic_fetch_xor_relaxed
863
9fa45070
MR
864#ifndef atomic_fetch_xor_relaxed
865#define atomic_fetch_xor_acquire atomic_fetch_xor
866#define atomic_fetch_xor_release atomic_fetch_xor
867#define atomic_fetch_xor_relaxed atomic_fetch_xor
868#else /* atomic_fetch_xor_relaxed */
869
870#ifndef atomic_fetch_xor_acquire
765dcd20 871static __always_inline int
9fa45070
MR
872atomic_fetch_xor_acquire(int i, atomic_t *v)
873{
874 int ret = atomic_fetch_xor_relaxed(i, v);
875 __atomic_acquire_fence();
876 return ret;
877}
878#define atomic_fetch_xor_acquire atomic_fetch_xor_acquire
879#endif
880
881#ifndef atomic_fetch_xor_release
765dcd20 882static __always_inline int
9fa45070
MR
883atomic_fetch_xor_release(int i, atomic_t *v)
884{
885 __atomic_release_fence();
886 return atomic_fetch_xor_relaxed(i, v);
887}
888#define atomic_fetch_xor_release atomic_fetch_xor_release
889#endif
890
891#ifndef atomic_fetch_xor
765dcd20 892static __always_inline int
9fa45070
MR
893atomic_fetch_xor(int i, atomic_t *v)
894{
895 int ret;
896 __atomic_pre_full_fence();
897 ret = atomic_fetch_xor_relaxed(i, v);
898 __atomic_post_full_fence();
899 return ret;
900}
901#define atomic_fetch_xor atomic_fetch_xor
902#endif
903
904#endif /* atomic_fetch_xor_relaxed */
905
5faafd56
PZ
906#define arch_atomic_xchg atomic_xchg
907#define arch_atomic_xchg_acquire atomic_xchg_acquire
908#define arch_atomic_xchg_release atomic_xchg_release
909#define arch_atomic_xchg_relaxed atomic_xchg_relaxed
910
9fa45070
MR
911#ifndef atomic_xchg_relaxed
912#define atomic_xchg_acquire atomic_xchg
913#define atomic_xchg_release atomic_xchg
914#define atomic_xchg_relaxed atomic_xchg
915#else /* atomic_xchg_relaxed */
916
917#ifndef atomic_xchg_acquire
765dcd20 918static __always_inline int
9fa45070
MR
919atomic_xchg_acquire(atomic_t *v, int i)
920{
921 int ret = atomic_xchg_relaxed(v, i);
922 __atomic_acquire_fence();
923 return ret;
924}
925#define atomic_xchg_acquire atomic_xchg_acquire
926#endif
927
928#ifndef atomic_xchg_release
765dcd20 929static __always_inline int
9fa45070
MR
930atomic_xchg_release(atomic_t *v, int i)
931{
932 __atomic_release_fence();
933 return atomic_xchg_relaxed(v, i);
934}
935#define atomic_xchg_release atomic_xchg_release
936#endif
937
938#ifndef atomic_xchg
765dcd20 939static __always_inline int
9fa45070
MR
940atomic_xchg(atomic_t *v, int i)
941{
942 int ret;
943 __atomic_pre_full_fence();
944 ret = atomic_xchg_relaxed(v, i);
945 __atomic_post_full_fence();
946 return ret;
947}
948#define atomic_xchg atomic_xchg
949#endif
950
951#endif /* atomic_xchg_relaxed */
952
5faafd56
PZ
953#define arch_atomic_cmpxchg atomic_cmpxchg
954#define arch_atomic_cmpxchg_acquire atomic_cmpxchg_acquire
955#define arch_atomic_cmpxchg_release atomic_cmpxchg_release
956#define arch_atomic_cmpxchg_relaxed atomic_cmpxchg_relaxed
957
9fa45070
MR
958#ifndef atomic_cmpxchg_relaxed
959#define atomic_cmpxchg_acquire atomic_cmpxchg
960#define atomic_cmpxchg_release atomic_cmpxchg
961#define atomic_cmpxchg_relaxed atomic_cmpxchg
962#else /* atomic_cmpxchg_relaxed */
963
964#ifndef atomic_cmpxchg_acquire
765dcd20 965static __always_inline int
9fa45070
MR
966atomic_cmpxchg_acquire(atomic_t *v, int old, int new)
967{
968 int ret = atomic_cmpxchg_relaxed(v, old, new);
969 __atomic_acquire_fence();
970 return ret;
971}
972#define atomic_cmpxchg_acquire atomic_cmpxchg_acquire
973#endif
974
975#ifndef atomic_cmpxchg_release
765dcd20 976static __always_inline int
9fa45070
MR
977atomic_cmpxchg_release(atomic_t *v, int old, int new)
978{
979 __atomic_release_fence();
980 return atomic_cmpxchg_relaxed(v, old, new);
981}
982#define atomic_cmpxchg_release atomic_cmpxchg_release
983#endif
984
985#ifndef atomic_cmpxchg
765dcd20 986static __always_inline int
9fa45070
MR
987atomic_cmpxchg(atomic_t *v, int old, int new)
988{
989 int ret;
990 __atomic_pre_full_fence();
991 ret = atomic_cmpxchg_relaxed(v, old, new);
992 __atomic_post_full_fence();
993 return ret;
994}
995#define atomic_cmpxchg atomic_cmpxchg
996#endif
997
998#endif /* atomic_cmpxchg_relaxed */
999
5faafd56
PZ
1000#define arch_atomic_try_cmpxchg atomic_try_cmpxchg
1001#define arch_atomic_try_cmpxchg_acquire atomic_try_cmpxchg_acquire
1002#define arch_atomic_try_cmpxchg_release atomic_try_cmpxchg_release
1003#define arch_atomic_try_cmpxchg_relaxed atomic_try_cmpxchg_relaxed
1004
9fa45070
MR
1005#ifndef atomic_try_cmpxchg_relaxed
1006#ifdef atomic_try_cmpxchg
1007#define atomic_try_cmpxchg_acquire atomic_try_cmpxchg
1008#define atomic_try_cmpxchg_release atomic_try_cmpxchg
1009#define atomic_try_cmpxchg_relaxed atomic_try_cmpxchg
1010#endif /* atomic_try_cmpxchg */
1011
1012#ifndef atomic_try_cmpxchg
765dcd20 1013static __always_inline bool
9fa45070
MR
1014atomic_try_cmpxchg(atomic_t *v, int *old, int new)
1015{
1016 int r, o = *old;
1017 r = atomic_cmpxchg(v, o, new);
1018 if (unlikely(r != o))
1019 *old = r;
1020 return likely(r == o);
1021}
1022#define atomic_try_cmpxchg atomic_try_cmpxchg
1023#endif
1024
1025#ifndef atomic_try_cmpxchg_acquire
765dcd20 1026static __always_inline bool
9fa45070
MR
1027atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new)
1028{
1029 int r, o = *old;
1030 r = atomic_cmpxchg_acquire(v, o, new);
1031 if (unlikely(r != o))
1032 *old = r;
1033 return likely(r == o);
1034}
1035#define atomic_try_cmpxchg_acquire atomic_try_cmpxchg_acquire
1036#endif
1037
1038#ifndef atomic_try_cmpxchg_release
765dcd20 1039static __always_inline bool
9fa45070
MR
1040atomic_try_cmpxchg_release(atomic_t *v, int *old, int new)
1041{
1042 int r, o = *old;
1043 r = atomic_cmpxchg_release(v, o, new);
1044 if (unlikely(r != o))
1045 *old = r;
1046 return likely(r == o);
1047}
1048#define atomic_try_cmpxchg_release atomic_try_cmpxchg_release
1049#endif
1050
1051#ifndef atomic_try_cmpxchg_relaxed
765dcd20 1052static __always_inline bool
9fa45070
MR
1053atomic_try_cmpxchg_relaxed(atomic_t *v, int *old, int new)
1054{
1055 int r, o = *old;
1056 r = atomic_cmpxchg_relaxed(v, o, new);
1057 if (unlikely(r != o))
1058 *old = r;
1059 return likely(r == o);
1060}
1061#define atomic_try_cmpxchg_relaxed atomic_try_cmpxchg_relaxed
1062#endif
1063
1064#else /* atomic_try_cmpxchg_relaxed */
1065
1066#ifndef atomic_try_cmpxchg_acquire
765dcd20 1067static __always_inline bool
9fa45070
MR
1068atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new)
1069{
1070 bool ret = atomic_try_cmpxchg_relaxed(v, old, new);
1071 __atomic_acquire_fence();
1072 return ret;
1073}
1074#define atomic_try_cmpxchg_acquire atomic_try_cmpxchg_acquire
1075#endif
1076
1077#ifndef atomic_try_cmpxchg_release
765dcd20 1078static __always_inline bool
9fa45070
MR
1079atomic_try_cmpxchg_release(atomic_t *v, int *old, int new)
1080{
1081 __atomic_release_fence();
1082 return atomic_try_cmpxchg_relaxed(v, old, new);
1083}
1084#define atomic_try_cmpxchg_release atomic_try_cmpxchg_release
1085#endif
1086
1087#ifndef atomic_try_cmpxchg
765dcd20 1088static __always_inline bool
9fa45070
MR
1089atomic_try_cmpxchg(atomic_t *v, int *old, int new)
1090{
1091 bool ret;
1092 __atomic_pre_full_fence();
1093 ret = atomic_try_cmpxchg_relaxed(v, old, new);
1094 __atomic_post_full_fence();
1095 return ret;
1096}
1097#define atomic_try_cmpxchg atomic_try_cmpxchg
1098#endif
1099
1100#endif /* atomic_try_cmpxchg_relaxed */
1101
5faafd56
PZ
1102#define arch_atomic_sub_and_test atomic_sub_and_test
1103
9fa45070
MR
1104#ifndef atomic_sub_and_test
1105/**
1106 * atomic_sub_and_test - subtract value from variable and test result
1107 * @i: integer value to subtract
1108 * @v: pointer of type atomic_t
1109 *
1110 * Atomically subtracts @i from @v and returns
1111 * true if the result is zero, or false for all
1112 * other cases.
1113 */
765dcd20 1114static __always_inline bool
9fa45070
MR
1115atomic_sub_and_test(int i, atomic_t *v)
1116{
1117 return atomic_sub_return(i, v) == 0;
1118}
1119#define atomic_sub_and_test atomic_sub_and_test
1120#endif
1121
5faafd56
PZ
1122#define arch_atomic_dec_and_test atomic_dec_and_test
1123
9fa45070
MR
1124#ifndef atomic_dec_and_test
1125/**
1126 * atomic_dec_and_test - decrement and test
1127 * @v: pointer of type atomic_t
1128 *
1129 * Atomically decrements @v by 1 and
1130 * returns true if the result is 0, or false for all other
1131 * cases.
1132 */
765dcd20 1133static __always_inline bool
9fa45070
MR
1134atomic_dec_and_test(atomic_t *v)
1135{
1136 return atomic_dec_return(v) == 0;
1137}
1138#define atomic_dec_and_test atomic_dec_and_test
1139#endif
1140
5faafd56
PZ
1141#define arch_atomic_inc_and_test atomic_inc_and_test
1142
9fa45070
MR
1143#ifndef atomic_inc_and_test
1144/**
1145 * atomic_inc_and_test - increment and test
1146 * @v: pointer of type atomic_t
1147 *
1148 * Atomically increments @v by 1
1149 * and returns true if the result is zero, or false for all
1150 * other cases.
1151 */
765dcd20 1152static __always_inline bool
9fa45070
MR
1153atomic_inc_and_test(atomic_t *v)
1154{
1155 return atomic_inc_return(v) == 0;
1156}
1157#define atomic_inc_and_test atomic_inc_and_test
1158#endif
1159
5faafd56
PZ
1160#define arch_atomic_add_negative atomic_add_negative
1161
9fa45070
MR
1162#ifndef atomic_add_negative
1163/**
1164 * atomic_add_negative - add and test if negative
1165 * @i: integer value to add
1166 * @v: pointer of type atomic_t
1167 *
1168 * Atomically adds @i to @v and returns true
1169 * if the result is negative, or false when
1170 * result is greater than or equal to zero.
1171 */
765dcd20 1172static __always_inline bool
9fa45070
MR
1173atomic_add_negative(int i, atomic_t *v)
1174{
1175 return atomic_add_return(i, v) < 0;
1176}
1177#define atomic_add_negative atomic_add_negative
1178#endif
1179
5faafd56
PZ
1180#define arch_atomic_fetch_add_unless atomic_fetch_add_unless
1181
9fa45070
MR
1182#ifndef atomic_fetch_add_unless
1183/**
1184 * atomic_fetch_add_unless - add unless the number is already a given value
1185 * @v: pointer of type atomic_t
1186 * @a: the amount to add to v...
1187 * @u: ...unless v is equal to u.
1188 *
1189 * Atomically adds @a to @v, so long as @v was not already @u.
1190 * Returns original value of @v
1191 */
765dcd20 1192static __always_inline int
9fa45070
MR
1193atomic_fetch_add_unless(atomic_t *v, int a, int u)
1194{
1195 int c = atomic_read(v);
1196
1197 do {
1198 if (unlikely(c == u))
1199 break;
1200 } while (!atomic_try_cmpxchg(v, &c, c + a));
1201
1202 return c;
1203}
1204#define atomic_fetch_add_unless atomic_fetch_add_unless
1205#endif
1206
5faafd56
PZ
1207#define arch_atomic_add_unless atomic_add_unless
1208
9fa45070
MR
1209#ifndef atomic_add_unless
1210/**
1211 * atomic_add_unless - add unless the number is already a given value
1212 * @v: pointer of type atomic_t
1213 * @a: the amount to add to v...
1214 * @u: ...unless v is equal to u.
1215 *
1216 * Atomically adds @a to @v, if @v was not already @u.
1217 * Returns true if the addition was done.
1218 */
765dcd20 1219static __always_inline bool
9fa45070
MR
1220atomic_add_unless(atomic_t *v, int a, int u)
1221{
1222 return atomic_fetch_add_unless(v, a, u) != u;
1223}
1224#define atomic_add_unless atomic_add_unless
1225#endif
1226
5faafd56
PZ
1227#define arch_atomic_inc_not_zero atomic_inc_not_zero
1228
9fa45070
MR
1229#ifndef atomic_inc_not_zero
1230/**
1231 * atomic_inc_not_zero - increment unless the number is zero
1232 * @v: pointer of type atomic_t
1233 *
1234 * Atomically increments @v by 1, if @v is non-zero.
1235 * Returns true if the increment was done.
1236 */
765dcd20 1237static __always_inline bool
9fa45070
MR
1238atomic_inc_not_zero(atomic_t *v)
1239{
1240 return atomic_add_unless(v, 1, 0);
1241}
1242#define atomic_inc_not_zero atomic_inc_not_zero
1243#endif
1244
5faafd56
PZ
1245#define arch_atomic_inc_unless_negative atomic_inc_unless_negative
1246
9fa45070 1247#ifndef atomic_inc_unless_negative
765dcd20 1248static __always_inline bool
9fa45070
MR
1249atomic_inc_unless_negative(atomic_t *v)
1250{
1251 int c = atomic_read(v);
1252
1253 do {
1254 if (unlikely(c < 0))
1255 return false;
1256 } while (!atomic_try_cmpxchg(v, &c, c + 1));
1257
1258 return true;
1259}
1260#define atomic_inc_unless_negative atomic_inc_unless_negative
1261#endif
1262
5faafd56
PZ
1263#define arch_atomic_dec_unless_positive atomic_dec_unless_positive
1264
9fa45070 1265#ifndef atomic_dec_unless_positive
765dcd20 1266static __always_inline bool
9fa45070
MR
1267atomic_dec_unless_positive(atomic_t *v)
1268{
1269 int c = atomic_read(v);
1270
1271 do {
1272 if (unlikely(c > 0))
1273 return false;
1274 } while (!atomic_try_cmpxchg(v, &c, c - 1));
1275
1276 return true;
1277}
1278#define atomic_dec_unless_positive atomic_dec_unless_positive
1279#endif
1280
5faafd56
PZ
1281#define arch_atomic_dec_if_positive atomic_dec_if_positive
1282
9fa45070 1283#ifndef atomic_dec_if_positive
765dcd20 1284static __always_inline int
9fa45070
MR
1285atomic_dec_if_positive(atomic_t *v)
1286{
1287 int dec, c = atomic_read(v);
1288
1289 do {
1290 dec = c - 1;
1291 if (unlikely(dec < 0))
1292 break;
1293 } while (!atomic_try_cmpxchg(v, &c, dec));
1294
1295 return dec;
1296}
1297#define atomic_dec_if_positive atomic_dec_if_positive
1298#endif
1299
9fa45070
MR
1300#ifdef CONFIG_GENERIC_ATOMIC64
1301#include <asm-generic/atomic64.h>
1302#endif
1303
5faafd56
PZ
1304#define arch_atomic64_read atomic64_read
1305#define arch_atomic64_read_acquire atomic64_read_acquire
1306
9fa45070 1307#ifndef atomic64_read_acquire
765dcd20 1308static __always_inline s64
9fa45070
MR
1309atomic64_read_acquire(const atomic64_t *v)
1310{
1311 return smp_load_acquire(&(v)->counter);
1312}
1313#define atomic64_read_acquire atomic64_read_acquire
1314#endif
1315
5faafd56
PZ
1316#define arch_atomic64_set atomic64_set
1317#define arch_atomic64_set_release atomic64_set_release
1318
9fa45070 1319#ifndef atomic64_set_release
765dcd20 1320static __always_inline void
9fa45070
MR
1321atomic64_set_release(atomic64_t *v, s64 i)
1322{
1323 smp_store_release(&(v)->counter, i);
1324}
1325#define atomic64_set_release atomic64_set_release
1326#endif
1327
5faafd56
PZ
1328#define arch_atomic64_add atomic64_add
1329
1330#define arch_atomic64_add_return atomic64_add_return
1331#define arch_atomic64_add_return_acquire atomic64_add_return_acquire
1332#define arch_atomic64_add_return_release atomic64_add_return_release
1333#define arch_atomic64_add_return_relaxed atomic64_add_return_relaxed
1334
9fa45070
MR
1335#ifndef atomic64_add_return_relaxed
1336#define atomic64_add_return_acquire atomic64_add_return
1337#define atomic64_add_return_release atomic64_add_return
1338#define atomic64_add_return_relaxed atomic64_add_return
1339#else /* atomic64_add_return_relaxed */
1340
1341#ifndef atomic64_add_return_acquire
765dcd20 1342static __always_inline s64
9fa45070
MR
1343atomic64_add_return_acquire(s64 i, atomic64_t *v)
1344{
1345 s64 ret = atomic64_add_return_relaxed(i, v);
1346 __atomic_acquire_fence();
1347 return ret;
1348}
1349#define atomic64_add_return_acquire atomic64_add_return_acquire
1350#endif
1351
1352#ifndef atomic64_add_return_release
765dcd20 1353static __always_inline s64
9fa45070
MR
1354atomic64_add_return_release(s64 i, atomic64_t *v)
1355{
1356 __atomic_release_fence();
1357 return atomic64_add_return_relaxed(i, v);
1358}
1359#define atomic64_add_return_release atomic64_add_return_release
1360#endif
1361
1362#ifndef atomic64_add_return
765dcd20 1363static __always_inline s64
9fa45070
MR
1364atomic64_add_return(s64 i, atomic64_t *v)
1365{
1366 s64 ret;
1367 __atomic_pre_full_fence();
1368 ret = atomic64_add_return_relaxed(i, v);
1369 __atomic_post_full_fence();
1370 return ret;
1371}
1372#define atomic64_add_return atomic64_add_return
1373#endif
1374
1375#endif /* atomic64_add_return_relaxed */
1376
5faafd56
PZ
1377#define arch_atomic64_fetch_add atomic64_fetch_add
1378#define arch_atomic64_fetch_add_acquire atomic64_fetch_add_acquire
1379#define arch_atomic64_fetch_add_release atomic64_fetch_add_release
1380#define arch_atomic64_fetch_add_relaxed atomic64_fetch_add_relaxed
1381
9fa45070
MR
1382#ifndef atomic64_fetch_add_relaxed
1383#define atomic64_fetch_add_acquire atomic64_fetch_add
1384#define atomic64_fetch_add_release atomic64_fetch_add
1385#define atomic64_fetch_add_relaxed atomic64_fetch_add
1386#else /* atomic64_fetch_add_relaxed */
1387
1388#ifndef atomic64_fetch_add_acquire
765dcd20 1389static __always_inline s64
9fa45070
MR
1390atomic64_fetch_add_acquire(s64 i, atomic64_t *v)
1391{
1392 s64 ret = atomic64_fetch_add_relaxed(i, v);
1393 __atomic_acquire_fence();
1394 return ret;
1395}
1396#define atomic64_fetch_add_acquire atomic64_fetch_add_acquire
1397#endif
1398
1399#ifndef atomic64_fetch_add_release
765dcd20 1400static __always_inline s64
9fa45070
MR
1401atomic64_fetch_add_release(s64 i, atomic64_t *v)
1402{
1403 __atomic_release_fence();
1404 return atomic64_fetch_add_relaxed(i, v);
1405}
1406#define atomic64_fetch_add_release atomic64_fetch_add_release
1407#endif
1408
1409#ifndef atomic64_fetch_add
765dcd20 1410static __always_inline s64
9fa45070
MR
1411atomic64_fetch_add(s64 i, atomic64_t *v)
1412{
1413 s64 ret;
1414 __atomic_pre_full_fence();
1415 ret = atomic64_fetch_add_relaxed(i, v);
1416 __atomic_post_full_fence();
1417 return ret;
1418}
1419#define atomic64_fetch_add atomic64_fetch_add
1420#endif
1421
1422#endif /* atomic64_fetch_add_relaxed */
1423
5faafd56
PZ
1424#define arch_atomic64_sub atomic64_sub
1425
1426#define arch_atomic64_sub_return atomic64_sub_return
1427#define arch_atomic64_sub_return_acquire atomic64_sub_return_acquire
1428#define arch_atomic64_sub_return_release atomic64_sub_return_release
1429#define arch_atomic64_sub_return_relaxed atomic64_sub_return_relaxed
1430
9fa45070
MR
1431#ifndef atomic64_sub_return_relaxed
1432#define atomic64_sub_return_acquire atomic64_sub_return
1433#define atomic64_sub_return_release atomic64_sub_return
1434#define atomic64_sub_return_relaxed atomic64_sub_return
1435#else /* atomic64_sub_return_relaxed */
1436
1437#ifndef atomic64_sub_return_acquire
765dcd20 1438static __always_inline s64
9fa45070
MR
1439atomic64_sub_return_acquire(s64 i, atomic64_t *v)
1440{
1441 s64 ret = atomic64_sub_return_relaxed(i, v);
1442 __atomic_acquire_fence();
1443 return ret;
1444}
1445#define atomic64_sub_return_acquire atomic64_sub_return_acquire
1446#endif
1447
1448#ifndef atomic64_sub_return_release
765dcd20 1449static __always_inline s64
9fa45070
MR
1450atomic64_sub_return_release(s64 i, atomic64_t *v)
1451{
1452 __atomic_release_fence();
1453 return atomic64_sub_return_relaxed(i, v);
1454}
1455#define atomic64_sub_return_release atomic64_sub_return_release
1456#endif
1457
1458#ifndef atomic64_sub_return
765dcd20 1459static __always_inline s64
9fa45070
MR
1460atomic64_sub_return(s64 i, atomic64_t *v)
1461{
1462 s64 ret;
1463 __atomic_pre_full_fence();
1464 ret = atomic64_sub_return_relaxed(i, v);
1465 __atomic_post_full_fence();
1466 return ret;
1467}
1468#define atomic64_sub_return atomic64_sub_return
1469#endif
1470
1471#endif /* atomic64_sub_return_relaxed */
1472
5faafd56
PZ
1473#define arch_atomic64_fetch_sub atomic64_fetch_sub
1474#define arch_atomic64_fetch_sub_acquire atomic64_fetch_sub_acquire
1475#define arch_atomic64_fetch_sub_release atomic64_fetch_sub_release
1476#define arch_atomic64_fetch_sub_relaxed atomic64_fetch_sub_relaxed
1477
9fa45070
MR
1478#ifndef atomic64_fetch_sub_relaxed
1479#define atomic64_fetch_sub_acquire atomic64_fetch_sub
1480#define atomic64_fetch_sub_release atomic64_fetch_sub
1481#define atomic64_fetch_sub_relaxed atomic64_fetch_sub
1482#else /* atomic64_fetch_sub_relaxed */
1483
1484#ifndef atomic64_fetch_sub_acquire
765dcd20 1485static __always_inline s64
9fa45070
MR
1486atomic64_fetch_sub_acquire(s64 i, atomic64_t *v)
1487{
1488 s64 ret = atomic64_fetch_sub_relaxed(i, v);
1489 __atomic_acquire_fence();
1490 return ret;
1491}
1492#define atomic64_fetch_sub_acquire atomic64_fetch_sub_acquire
1493#endif
1494
1495#ifndef atomic64_fetch_sub_release
765dcd20 1496static __always_inline s64
9fa45070
MR
1497atomic64_fetch_sub_release(s64 i, atomic64_t *v)
1498{
1499 __atomic_release_fence();
1500 return atomic64_fetch_sub_relaxed(i, v);
1501}
1502#define atomic64_fetch_sub_release atomic64_fetch_sub_release
1503#endif
1504
1505#ifndef atomic64_fetch_sub
765dcd20 1506static __always_inline s64
9fa45070
MR
1507atomic64_fetch_sub(s64 i, atomic64_t *v)
1508{
1509 s64 ret;
1510 __atomic_pre_full_fence();
1511 ret = atomic64_fetch_sub_relaxed(i, v);
1512 __atomic_post_full_fence();
1513 return ret;
1514}
1515#define atomic64_fetch_sub atomic64_fetch_sub
1516#endif
1517
1518#endif /* atomic64_fetch_sub_relaxed */
1519
5faafd56
PZ
1520#define arch_atomic64_inc atomic64_inc
1521
9fa45070 1522#ifndef atomic64_inc
765dcd20 1523static __always_inline void
9fa45070
MR
1524atomic64_inc(atomic64_t *v)
1525{
1526 atomic64_add(1, v);
1527}
1528#define atomic64_inc atomic64_inc
1529#endif
1530
5faafd56
PZ
1531#define arch_atomic64_inc_return atomic64_inc_return
1532#define arch_atomic64_inc_return_acquire atomic64_inc_return_acquire
1533#define arch_atomic64_inc_return_release atomic64_inc_return_release
1534#define arch_atomic64_inc_return_relaxed atomic64_inc_return_relaxed
1535
9fa45070
MR
1536#ifndef atomic64_inc_return_relaxed
1537#ifdef atomic64_inc_return
1538#define atomic64_inc_return_acquire atomic64_inc_return
1539#define atomic64_inc_return_release atomic64_inc_return
1540#define atomic64_inc_return_relaxed atomic64_inc_return
1541#endif /* atomic64_inc_return */
1542
1543#ifndef atomic64_inc_return
765dcd20 1544static __always_inline s64
9fa45070
MR
1545atomic64_inc_return(atomic64_t *v)
1546{
1547 return atomic64_add_return(1, v);
1548}
1549#define atomic64_inc_return atomic64_inc_return
1550#endif
1551
1552#ifndef atomic64_inc_return_acquire
765dcd20 1553static __always_inline s64
9fa45070
MR
1554atomic64_inc_return_acquire(atomic64_t *v)
1555{
1556 return atomic64_add_return_acquire(1, v);
1557}
1558#define atomic64_inc_return_acquire atomic64_inc_return_acquire
1559#endif
1560
1561#ifndef atomic64_inc_return_release
765dcd20 1562static __always_inline s64
9fa45070
MR
1563atomic64_inc_return_release(atomic64_t *v)
1564{
1565 return atomic64_add_return_release(1, v);
1566}
1567#define atomic64_inc_return_release atomic64_inc_return_release
1568#endif
1569
1570#ifndef atomic64_inc_return_relaxed
765dcd20 1571static __always_inline s64
9fa45070
MR
1572atomic64_inc_return_relaxed(atomic64_t *v)
1573{
1574 return atomic64_add_return_relaxed(1, v);
1575}
1576#define atomic64_inc_return_relaxed atomic64_inc_return_relaxed
1577#endif
1578
1579#else /* atomic64_inc_return_relaxed */
1580
1581#ifndef atomic64_inc_return_acquire
765dcd20 1582static __always_inline s64
9fa45070
MR
1583atomic64_inc_return_acquire(atomic64_t *v)
1584{
1585 s64 ret = atomic64_inc_return_relaxed(v);
1586 __atomic_acquire_fence();
1587 return ret;
1588}
1589#define atomic64_inc_return_acquire atomic64_inc_return_acquire
1590#endif
1591
1592#ifndef atomic64_inc_return_release
765dcd20 1593static __always_inline s64
9fa45070
MR
1594atomic64_inc_return_release(atomic64_t *v)
1595{
1596 __atomic_release_fence();
1597 return atomic64_inc_return_relaxed(v);
1598}
1599#define atomic64_inc_return_release atomic64_inc_return_release
1600#endif
1601
1602#ifndef atomic64_inc_return
765dcd20 1603static __always_inline s64
9fa45070
MR
1604atomic64_inc_return(atomic64_t *v)
1605{
1606 s64 ret;
1607 __atomic_pre_full_fence();
1608 ret = atomic64_inc_return_relaxed(v);
1609 __atomic_post_full_fence();
1610 return ret;
1611}
1612#define atomic64_inc_return atomic64_inc_return
1613#endif
1614
1615#endif /* atomic64_inc_return_relaxed */
1616
5faafd56
PZ
1617#define arch_atomic64_fetch_inc atomic64_fetch_inc
1618#define arch_atomic64_fetch_inc_acquire atomic64_fetch_inc_acquire
1619#define arch_atomic64_fetch_inc_release atomic64_fetch_inc_release
1620#define arch_atomic64_fetch_inc_relaxed atomic64_fetch_inc_relaxed
1621
9fa45070
MR
1622#ifndef atomic64_fetch_inc_relaxed
1623#ifdef atomic64_fetch_inc
1624#define atomic64_fetch_inc_acquire atomic64_fetch_inc
1625#define atomic64_fetch_inc_release atomic64_fetch_inc
1626#define atomic64_fetch_inc_relaxed atomic64_fetch_inc
1627#endif /* atomic64_fetch_inc */
1628
1629#ifndef atomic64_fetch_inc
765dcd20 1630static __always_inline s64
9fa45070
MR
1631atomic64_fetch_inc(atomic64_t *v)
1632{
1633 return atomic64_fetch_add(1, v);
1634}
1635#define atomic64_fetch_inc atomic64_fetch_inc
1636#endif
1637
1638#ifndef atomic64_fetch_inc_acquire
765dcd20 1639static __always_inline s64
9fa45070
MR
1640atomic64_fetch_inc_acquire(atomic64_t *v)
1641{
1642 return atomic64_fetch_add_acquire(1, v);
1643}
1644#define atomic64_fetch_inc_acquire atomic64_fetch_inc_acquire
1645#endif
1646
1647#ifndef atomic64_fetch_inc_release
765dcd20 1648static __always_inline s64
9fa45070
MR
1649atomic64_fetch_inc_release(atomic64_t *v)
1650{
1651 return atomic64_fetch_add_release(1, v);
1652}
1653#define atomic64_fetch_inc_release atomic64_fetch_inc_release
1654#endif
1655
1656#ifndef atomic64_fetch_inc_relaxed
765dcd20 1657static __always_inline s64
9fa45070
MR
1658atomic64_fetch_inc_relaxed(atomic64_t *v)
1659{
1660 return atomic64_fetch_add_relaxed(1, v);
1661}
1662#define atomic64_fetch_inc_relaxed atomic64_fetch_inc_relaxed
1663#endif
1664
1665#else /* atomic64_fetch_inc_relaxed */
1666
1667#ifndef atomic64_fetch_inc_acquire
765dcd20 1668static __always_inline s64
9fa45070
MR
1669atomic64_fetch_inc_acquire(atomic64_t *v)
1670{
1671 s64 ret = atomic64_fetch_inc_relaxed(v);
1672 __atomic_acquire_fence();
1673 return ret;
1674}
1675#define atomic64_fetch_inc_acquire atomic64_fetch_inc_acquire
1676#endif
1677
1678#ifndef atomic64_fetch_inc_release
765dcd20 1679static __always_inline s64
9fa45070
MR
1680atomic64_fetch_inc_release(atomic64_t *v)
1681{
1682 __atomic_release_fence();
1683 return atomic64_fetch_inc_relaxed(v);
1684}
1685#define atomic64_fetch_inc_release atomic64_fetch_inc_release
1686#endif
1687
1688#ifndef atomic64_fetch_inc
765dcd20 1689static __always_inline s64
9fa45070
MR
1690atomic64_fetch_inc(atomic64_t *v)
1691{
1692 s64 ret;
1693 __atomic_pre_full_fence();
1694 ret = atomic64_fetch_inc_relaxed(v);
1695 __atomic_post_full_fence();
1696 return ret;
1697}
1698#define atomic64_fetch_inc atomic64_fetch_inc
1699#endif
1700
1701#endif /* atomic64_fetch_inc_relaxed */
1702
5faafd56
PZ
1703#define arch_atomic64_dec atomic64_dec
1704
9fa45070 1705#ifndef atomic64_dec
765dcd20 1706static __always_inline void
9fa45070
MR
1707atomic64_dec(atomic64_t *v)
1708{
1709 atomic64_sub(1, v);
1710}
1711#define atomic64_dec atomic64_dec
1712#endif
1713
5faafd56
PZ
1714#define arch_atomic64_dec_return atomic64_dec_return
1715#define arch_atomic64_dec_return_acquire atomic64_dec_return_acquire
1716#define arch_atomic64_dec_return_release atomic64_dec_return_release
1717#define arch_atomic64_dec_return_relaxed atomic64_dec_return_relaxed
1718
9fa45070
MR
1719#ifndef atomic64_dec_return_relaxed
1720#ifdef atomic64_dec_return
1721#define atomic64_dec_return_acquire atomic64_dec_return
1722#define atomic64_dec_return_release atomic64_dec_return
1723#define atomic64_dec_return_relaxed atomic64_dec_return
1724#endif /* atomic64_dec_return */
1725
1726#ifndef atomic64_dec_return
765dcd20 1727static __always_inline s64
9fa45070
MR
1728atomic64_dec_return(atomic64_t *v)
1729{
1730 return atomic64_sub_return(1, v);
1731}
1732#define atomic64_dec_return atomic64_dec_return
1733#endif
1734
1735#ifndef atomic64_dec_return_acquire
765dcd20 1736static __always_inline s64
9fa45070
MR
1737atomic64_dec_return_acquire(atomic64_t *v)
1738{
1739 return atomic64_sub_return_acquire(1, v);
1740}
1741#define atomic64_dec_return_acquire atomic64_dec_return_acquire
1742#endif
1743
1744#ifndef atomic64_dec_return_release
765dcd20 1745static __always_inline s64
9fa45070
MR
1746atomic64_dec_return_release(atomic64_t *v)
1747{
1748 return atomic64_sub_return_release(1, v);
1749}
1750#define atomic64_dec_return_release atomic64_dec_return_release
1751#endif
1752
1753#ifndef atomic64_dec_return_relaxed
765dcd20 1754static __always_inline s64
9fa45070
MR
1755atomic64_dec_return_relaxed(atomic64_t *v)
1756{
1757 return atomic64_sub_return_relaxed(1, v);
1758}
1759#define atomic64_dec_return_relaxed atomic64_dec_return_relaxed
1760#endif
1761
1762#else /* atomic64_dec_return_relaxed */
1763
1764#ifndef atomic64_dec_return_acquire
765dcd20 1765static __always_inline s64
9fa45070
MR
1766atomic64_dec_return_acquire(atomic64_t *v)
1767{
1768 s64 ret = atomic64_dec_return_relaxed(v);
1769 __atomic_acquire_fence();
1770 return ret;
1771}
1772#define atomic64_dec_return_acquire atomic64_dec_return_acquire
1773#endif
1774
1775#ifndef atomic64_dec_return_release
765dcd20 1776static __always_inline s64
9fa45070
MR
1777atomic64_dec_return_release(atomic64_t *v)
1778{
1779 __atomic_release_fence();
1780 return atomic64_dec_return_relaxed(v);
1781}
1782#define atomic64_dec_return_release atomic64_dec_return_release
1783#endif
1784
1785#ifndef atomic64_dec_return
765dcd20 1786static __always_inline s64
9fa45070
MR
1787atomic64_dec_return(atomic64_t *v)
1788{
1789 s64 ret;
1790 __atomic_pre_full_fence();
1791 ret = atomic64_dec_return_relaxed(v);
1792 __atomic_post_full_fence();
1793 return ret;
1794}
1795#define atomic64_dec_return atomic64_dec_return
1796#endif
1797
1798#endif /* atomic64_dec_return_relaxed */
1799
5faafd56
PZ
1800#define arch_atomic64_fetch_dec atomic64_fetch_dec
1801#define arch_atomic64_fetch_dec_acquire atomic64_fetch_dec_acquire
1802#define arch_atomic64_fetch_dec_release atomic64_fetch_dec_release
1803#define arch_atomic64_fetch_dec_relaxed atomic64_fetch_dec_relaxed
1804
9fa45070
MR
1805#ifndef atomic64_fetch_dec_relaxed
1806#ifdef atomic64_fetch_dec
1807#define atomic64_fetch_dec_acquire atomic64_fetch_dec
1808#define atomic64_fetch_dec_release atomic64_fetch_dec
1809#define atomic64_fetch_dec_relaxed atomic64_fetch_dec
1810#endif /* atomic64_fetch_dec */
1811
1812#ifndef atomic64_fetch_dec
765dcd20 1813static __always_inline s64
9fa45070
MR
1814atomic64_fetch_dec(atomic64_t *v)
1815{
1816 return atomic64_fetch_sub(1, v);
1817}
1818#define atomic64_fetch_dec atomic64_fetch_dec
1819#endif
1820
1821#ifndef atomic64_fetch_dec_acquire
765dcd20 1822static __always_inline s64
9fa45070
MR
1823atomic64_fetch_dec_acquire(atomic64_t *v)
1824{
1825 return atomic64_fetch_sub_acquire(1, v);
1826}
1827#define atomic64_fetch_dec_acquire atomic64_fetch_dec_acquire
1828#endif
1829
1830#ifndef atomic64_fetch_dec_release
765dcd20 1831static __always_inline s64
9fa45070
MR
1832atomic64_fetch_dec_release(atomic64_t *v)
1833{
1834 return atomic64_fetch_sub_release(1, v);
1835}
1836#define atomic64_fetch_dec_release atomic64_fetch_dec_release
1837#endif
1838
1839#ifndef atomic64_fetch_dec_relaxed
765dcd20 1840static __always_inline s64
9fa45070
MR
1841atomic64_fetch_dec_relaxed(atomic64_t *v)
1842{
1843 return atomic64_fetch_sub_relaxed(1, v);
1844}
1845#define atomic64_fetch_dec_relaxed atomic64_fetch_dec_relaxed
1846#endif
1847
1848#else /* atomic64_fetch_dec_relaxed */
1849
1850#ifndef atomic64_fetch_dec_acquire
765dcd20 1851static __always_inline s64
9fa45070
MR
1852atomic64_fetch_dec_acquire(atomic64_t *v)
1853{
1854 s64 ret = atomic64_fetch_dec_relaxed(v);
1855 __atomic_acquire_fence();
1856 return ret;
1857}
1858#define atomic64_fetch_dec_acquire atomic64_fetch_dec_acquire
1859#endif
1860
1861#ifndef atomic64_fetch_dec_release
765dcd20 1862static __always_inline s64
9fa45070
MR
1863atomic64_fetch_dec_release(atomic64_t *v)
1864{
1865 __atomic_release_fence();
1866 return atomic64_fetch_dec_relaxed(v);
1867}
1868#define atomic64_fetch_dec_release atomic64_fetch_dec_release
1869#endif
1870
1871#ifndef atomic64_fetch_dec
765dcd20 1872static __always_inline s64
9fa45070
MR
1873atomic64_fetch_dec(atomic64_t *v)
1874{
1875 s64 ret;
1876 __atomic_pre_full_fence();
1877 ret = atomic64_fetch_dec_relaxed(v);
1878 __atomic_post_full_fence();
1879 return ret;
1880}
1881#define atomic64_fetch_dec atomic64_fetch_dec
1882#endif
1883
1884#endif /* atomic64_fetch_dec_relaxed */
1885
5faafd56
PZ
1886#define arch_atomic64_and atomic64_and
1887
1888#define arch_atomic64_fetch_and atomic64_fetch_and
1889#define arch_atomic64_fetch_and_acquire atomic64_fetch_and_acquire
1890#define arch_atomic64_fetch_and_release atomic64_fetch_and_release
1891#define arch_atomic64_fetch_and_relaxed atomic64_fetch_and_relaxed
1892
9fa45070
MR
1893#ifndef atomic64_fetch_and_relaxed
1894#define atomic64_fetch_and_acquire atomic64_fetch_and
1895#define atomic64_fetch_and_release atomic64_fetch_and
1896#define atomic64_fetch_and_relaxed atomic64_fetch_and
1897#else /* atomic64_fetch_and_relaxed */
1898
1899#ifndef atomic64_fetch_and_acquire
765dcd20 1900static __always_inline s64
9fa45070
MR
1901atomic64_fetch_and_acquire(s64 i, atomic64_t *v)
1902{
1903 s64 ret = atomic64_fetch_and_relaxed(i, v);
1904 __atomic_acquire_fence();
1905 return ret;
1906}
1907#define atomic64_fetch_and_acquire atomic64_fetch_and_acquire
1908#endif
1909
1910#ifndef atomic64_fetch_and_release
765dcd20 1911static __always_inline s64
9fa45070
MR
1912atomic64_fetch_and_release(s64 i, atomic64_t *v)
1913{
1914 __atomic_release_fence();
1915 return atomic64_fetch_and_relaxed(i, v);
1916}
1917#define atomic64_fetch_and_release atomic64_fetch_and_release
1918#endif
1919
1920#ifndef atomic64_fetch_and
765dcd20 1921static __always_inline s64
9fa45070
MR
1922atomic64_fetch_and(s64 i, atomic64_t *v)
1923{
1924 s64 ret;
1925 __atomic_pre_full_fence();
1926 ret = atomic64_fetch_and_relaxed(i, v);
1927 __atomic_post_full_fence();
1928 return ret;
1929}
1930#define atomic64_fetch_and atomic64_fetch_and
1931#endif
1932
1933#endif /* atomic64_fetch_and_relaxed */
1934
5faafd56
PZ
1935#define arch_atomic64_andnot atomic64_andnot
1936
9fa45070 1937#ifndef atomic64_andnot
765dcd20 1938static __always_inline void
9fa45070
MR
1939atomic64_andnot(s64 i, atomic64_t *v)
1940{
1941 atomic64_and(~i, v);
1942}
1943#define atomic64_andnot atomic64_andnot
1944#endif
1945
5faafd56
PZ
1946#define arch_atomic64_fetch_andnot atomic64_fetch_andnot
1947#define arch_atomic64_fetch_andnot_acquire atomic64_fetch_andnot_acquire
1948#define arch_atomic64_fetch_andnot_release atomic64_fetch_andnot_release
1949#define arch_atomic64_fetch_andnot_relaxed atomic64_fetch_andnot_relaxed
1950
9fa45070
MR
1951#ifndef atomic64_fetch_andnot_relaxed
1952#ifdef atomic64_fetch_andnot
1953#define atomic64_fetch_andnot_acquire atomic64_fetch_andnot
1954#define atomic64_fetch_andnot_release atomic64_fetch_andnot
1955#define atomic64_fetch_andnot_relaxed atomic64_fetch_andnot
1956#endif /* atomic64_fetch_andnot */
1957
1958#ifndef atomic64_fetch_andnot
765dcd20 1959static __always_inline s64
9fa45070
MR
1960atomic64_fetch_andnot(s64 i, atomic64_t *v)
1961{
1962 return atomic64_fetch_and(~i, v);
1963}
1964#define atomic64_fetch_andnot atomic64_fetch_andnot
1965#endif
1966
1967#ifndef atomic64_fetch_andnot_acquire
765dcd20 1968static __always_inline s64
9fa45070
MR
1969atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v)
1970{
1971 return atomic64_fetch_and_acquire(~i, v);
1972}
1973#define atomic64_fetch_andnot_acquire atomic64_fetch_andnot_acquire
1974#endif
1975
1976#ifndef atomic64_fetch_andnot_release
765dcd20 1977static __always_inline s64
9fa45070
MR
1978atomic64_fetch_andnot_release(s64 i, atomic64_t *v)
1979{
1980 return atomic64_fetch_and_release(~i, v);
1981}
1982#define atomic64_fetch_andnot_release atomic64_fetch_andnot_release
1983#endif
1984
1985#ifndef atomic64_fetch_andnot_relaxed
765dcd20 1986static __always_inline s64
9fa45070
MR
1987atomic64_fetch_andnot_relaxed(s64 i, atomic64_t *v)
1988{
1989 return atomic64_fetch_and_relaxed(~i, v);
1990}
1991#define atomic64_fetch_andnot_relaxed atomic64_fetch_andnot_relaxed
1992#endif
1993
1994#else /* atomic64_fetch_andnot_relaxed */
1995
1996#ifndef atomic64_fetch_andnot_acquire
765dcd20 1997static __always_inline s64
9fa45070
MR
1998atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v)
1999{
2000 s64 ret = atomic64_fetch_andnot_relaxed(i, v);
2001 __atomic_acquire_fence();
2002 return ret;
2003}
2004#define atomic64_fetch_andnot_acquire atomic64_fetch_andnot_acquire
2005#endif
2006
2007#ifndef atomic64_fetch_andnot_release
765dcd20 2008static __always_inline s64
9fa45070
MR
2009atomic64_fetch_andnot_release(s64 i, atomic64_t *v)
2010{
2011 __atomic_release_fence();
2012 return atomic64_fetch_andnot_relaxed(i, v);
2013}
2014#define atomic64_fetch_andnot_release atomic64_fetch_andnot_release
2015#endif
2016
2017#ifndef atomic64_fetch_andnot
765dcd20 2018static __always_inline s64
9fa45070
MR
2019atomic64_fetch_andnot(s64 i, atomic64_t *v)
2020{
2021 s64 ret;
2022 __atomic_pre_full_fence();
2023 ret = atomic64_fetch_andnot_relaxed(i, v);
2024 __atomic_post_full_fence();
2025 return ret;
2026}
2027#define atomic64_fetch_andnot atomic64_fetch_andnot
2028#endif
2029
2030#endif /* atomic64_fetch_andnot_relaxed */
2031
5faafd56
PZ
2032#define arch_atomic64_or atomic64_or
2033
2034#define arch_atomic64_fetch_or atomic64_fetch_or
2035#define arch_atomic64_fetch_or_acquire atomic64_fetch_or_acquire
2036#define arch_atomic64_fetch_or_release atomic64_fetch_or_release
2037#define arch_atomic64_fetch_or_relaxed atomic64_fetch_or_relaxed
2038
9fa45070
MR
2039#ifndef atomic64_fetch_or_relaxed
2040#define atomic64_fetch_or_acquire atomic64_fetch_or
2041#define atomic64_fetch_or_release atomic64_fetch_or
2042#define atomic64_fetch_or_relaxed atomic64_fetch_or
2043#else /* atomic64_fetch_or_relaxed */
2044
2045#ifndef atomic64_fetch_or_acquire
765dcd20 2046static __always_inline s64
9fa45070
MR
2047atomic64_fetch_or_acquire(s64 i, atomic64_t *v)
2048{
2049 s64 ret = atomic64_fetch_or_relaxed(i, v);
2050 __atomic_acquire_fence();
2051 return ret;
2052}
2053#define atomic64_fetch_or_acquire atomic64_fetch_or_acquire
2054#endif
2055
2056#ifndef atomic64_fetch_or_release
765dcd20 2057static __always_inline s64
9fa45070
MR
2058atomic64_fetch_or_release(s64 i, atomic64_t *v)
2059{
2060 __atomic_release_fence();
2061 return atomic64_fetch_or_relaxed(i, v);
2062}
2063#define atomic64_fetch_or_release atomic64_fetch_or_release
2064#endif
2065
2066#ifndef atomic64_fetch_or
765dcd20 2067static __always_inline s64
9fa45070
MR
2068atomic64_fetch_or(s64 i, atomic64_t *v)
2069{
2070 s64 ret;
2071 __atomic_pre_full_fence();
2072 ret = atomic64_fetch_or_relaxed(i, v);
2073 __atomic_post_full_fence();
2074 return ret;
2075}
2076#define atomic64_fetch_or atomic64_fetch_or
2077#endif
2078
2079#endif /* atomic64_fetch_or_relaxed */
2080
5faafd56
PZ
2081#define arch_atomic64_xor atomic64_xor
2082
2083#define arch_atomic64_fetch_xor atomic64_fetch_xor
2084#define arch_atomic64_fetch_xor_acquire atomic64_fetch_xor_acquire
2085#define arch_atomic64_fetch_xor_release atomic64_fetch_xor_release
2086#define arch_atomic64_fetch_xor_relaxed atomic64_fetch_xor_relaxed
2087
9fa45070
MR
2088#ifndef atomic64_fetch_xor_relaxed
2089#define atomic64_fetch_xor_acquire atomic64_fetch_xor
2090#define atomic64_fetch_xor_release atomic64_fetch_xor
2091#define atomic64_fetch_xor_relaxed atomic64_fetch_xor
2092#else /* atomic64_fetch_xor_relaxed */
2093
2094#ifndef atomic64_fetch_xor_acquire
765dcd20 2095static __always_inline s64
9fa45070
MR
2096atomic64_fetch_xor_acquire(s64 i, atomic64_t *v)
2097{
2098 s64 ret = atomic64_fetch_xor_relaxed(i, v);
2099 __atomic_acquire_fence();
2100 return ret;
2101}
2102#define atomic64_fetch_xor_acquire atomic64_fetch_xor_acquire
2103#endif
2104
2105#ifndef atomic64_fetch_xor_release
765dcd20 2106static __always_inline s64
9fa45070
MR
2107atomic64_fetch_xor_release(s64 i, atomic64_t *v)
2108{
2109 __atomic_release_fence();
2110 return atomic64_fetch_xor_relaxed(i, v);
2111}
2112#define atomic64_fetch_xor_release atomic64_fetch_xor_release
2113#endif
2114
2115#ifndef atomic64_fetch_xor
765dcd20 2116static __always_inline s64
9fa45070
MR
2117atomic64_fetch_xor(s64 i, atomic64_t *v)
2118{
2119 s64 ret;
2120 __atomic_pre_full_fence();
2121 ret = atomic64_fetch_xor_relaxed(i, v);
2122 __atomic_post_full_fence();
2123 return ret;
2124}
2125#define atomic64_fetch_xor atomic64_fetch_xor
2126#endif
2127
2128#endif /* atomic64_fetch_xor_relaxed */
2129
5faafd56
PZ
2130#define arch_atomic64_xchg atomic64_xchg
2131#define arch_atomic64_xchg_acquire atomic64_xchg_acquire
2132#define arch_atomic64_xchg_release atomic64_xchg_release
2133#define arch_atomic64_xchg_relaxed atomic64_xchg_relaxed
2134
9fa45070
MR
2135#ifndef atomic64_xchg_relaxed
2136#define atomic64_xchg_acquire atomic64_xchg
2137#define atomic64_xchg_release atomic64_xchg
2138#define atomic64_xchg_relaxed atomic64_xchg
2139#else /* atomic64_xchg_relaxed */
2140
2141#ifndef atomic64_xchg_acquire
765dcd20 2142static __always_inline s64
9fa45070
MR
2143atomic64_xchg_acquire(atomic64_t *v, s64 i)
2144{
2145 s64 ret = atomic64_xchg_relaxed(v, i);
2146 __atomic_acquire_fence();
2147 return ret;
2148}
2149#define atomic64_xchg_acquire atomic64_xchg_acquire
2150#endif
2151
2152#ifndef atomic64_xchg_release
765dcd20 2153static __always_inline s64
9fa45070
MR
2154atomic64_xchg_release(atomic64_t *v, s64 i)
2155{
2156 __atomic_release_fence();
2157 return atomic64_xchg_relaxed(v, i);
2158}
2159#define atomic64_xchg_release atomic64_xchg_release
2160#endif
2161
2162#ifndef atomic64_xchg
765dcd20 2163static __always_inline s64
9fa45070
MR
2164atomic64_xchg(atomic64_t *v, s64 i)
2165{
2166 s64 ret;
2167 __atomic_pre_full_fence();
2168 ret = atomic64_xchg_relaxed(v, i);
2169 __atomic_post_full_fence();
2170 return ret;
2171}
2172#define atomic64_xchg atomic64_xchg
2173#endif
2174
2175#endif /* atomic64_xchg_relaxed */
2176
5faafd56
PZ
2177#define arch_atomic64_cmpxchg atomic64_cmpxchg
2178#define arch_atomic64_cmpxchg_acquire atomic64_cmpxchg_acquire
2179#define arch_atomic64_cmpxchg_release atomic64_cmpxchg_release
2180#define arch_atomic64_cmpxchg_relaxed atomic64_cmpxchg_relaxed
2181
9fa45070
MR
2182#ifndef atomic64_cmpxchg_relaxed
2183#define atomic64_cmpxchg_acquire atomic64_cmpxchg
2184#define atomic64_cmpxchg_release atomic64_cmpxchg
2185#define atomic64_cmpxchg_relaxed atomic64_cmpxchg
2186#else /* atomic64_cmpxchg_relaxed */
2187
2188#ifndef atomic64_cmpxchg_acquire
765dcd20 2189static __always_inline s64
9fa45070
MR
2190atomic64_cmpxchg_acquire(atomic64_t *v, s64 old, s64 new)
2191{
2192 s64 ret = atomic64_cmpxchg_relaxed(v, old, new);
2193 __atomic_acquire_fence();
2194 return ret;
2195}
2196#define atomic64_cmpxchg_acquire atomic64_cmpxchg_acquire
2197#endif
2198
2199#ifndef atomic64_cmpxchg_release
765dcd20 2200static __always_inline s64
9fa45070
MR
2201atomic64_cmpxchg_release(atomic64_t *v, s64 old, s64 new)
2202{
2203 __atomic_release_fence();
2204 return atomic64_cmpxchg_relaxed(v, old, new);
2205}
2206#define atomic64_cmpxchg_release atomic64_cmpxchg_release
2207#endif
2208
2209#ifndef atomic64_cmpxchg
765dcd20 2210static __always_inline s64
9fa45070
MR
2211atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new)
2212{
2213 s64 ret;
2214 __atomic_pre_full_fence();
2215 ret = atomic64_cmpxchg_relaxed(v, old, new);
2216 __atomic_post_full_fence();
2217 return ret;
2218}
2219#define atomic64_cmpxchg atomic64_cmpxchg
2220#endif
2221
2222#endif /* atomic64_cmpxchg_relaxed */
2223
5faafd56
PZ
2224#define arch_atomic64_try_cmpxchg atomic64_try_cmpxchg
2225#define arch_atomic64_try_cmpxchg_acquire atomic64_try_cmpxchg_acquire
2226#define arch_atomic64_try_cmpxchg_release atomic64_try_cmpxchg_release
2227#define arch_atomic64_try_cmpxchg_relaxed atomic64_try_cmpxchg_relaxed
2228
9fa45070
MR
2229#ifndef atomic64_try_cmpxchg_relaxed
2230#ifdef atomic64_try_cmpxchg
2231#define atomic64_try_cmpxchg_acquire atomic64_try_cmpxchg
2232#define atomic64_try_cmpxchg_release atomic64_try_cmpxchg
2233#define atomic64_try_cmpxchg_relaxed atomic64_try_cmpxchg
2234#endif /* atomic64_try_cmpxchg */
2235
2236#ifndef atomic64_try_cmpxchg
765dcd20 2237static __always_inline bool
9fa45070
MR
2238atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
2239{
2240 s64 r, o = *old;
2241 r = atomic64_cmpxchg(v, o, new);
2242 if (unlikely(r != o))
2243 *old = r;
2244 return likely(r == o);
2245}
2246#define atomic64_try_cmpxchg atomic64_try_cmpxchg
2247#endif
2248
2249#ifndef atomic64_try_cmpxchg_acquire
765dcd20 2250static __always_inline bool
9fa45070
MR
2251atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new)
2252{
2253 s64 r, o = *old;
2254 r = atomic64_cmpxchg_acquire(v, o, new);
2255 if (unlikely(r != o))
2256 *old = r;
2257 return likely(r == o);
2258}
2259#define atomic64_try_cmpxchg_acquire atomic64_try_cmpxchg_acquire
2260#endif
2261
2262#ifndef atomic64_try_cmpxchg_release
765dcd20 2263static __always_inline bool
9fa45070
MR
2264atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new)
2265{
2266 s64 r, o = *old;
2267 r = atomic64_cmpxchg_release(v, o, new);
2268 if (unlikely(r != o))
2269 *old = r;
2270 return likely(r == o);
2271}
2272#define atomic64_try_cmpxchg_release atomic64_try_cmpxchg_release
2273#endif
2274
2275#ifndef atomic64_try_cmpxchg_relaxed
765dcd20 2276static __always_inline bool
9fa45070
MR
2277atomic64_try_cmpxchg_relaxed(atomic64_t *v, s64 *old, s64 new)
2278{
2279 s64 r, o = *old;
2280 r = atomic64_cmpxchg_relaxed(v, o, new);
2281 if (unlikely(r != o))
2282 *old = r;
2283 return likely(r == o);
2284}
2285#define atomic64_try_cmpxchg_relaxed atomic64_try_cmpxchg_relaxed
2286#endif
2287
2288#else /* atomic64_try_cmpxchg_relaxed */
2289
2290#ifndef atomic64_try_cmpxchg_acquire
765dcd20 2291static __always_inline bool
9fa45070
MR
2292atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new)
2293{
2294 bool ret = atomic64_try_cmpxchg_relaxed(v, old, new);
2295 __atomic_acquire_fence();
2296 return ret;
2297}
2298#define atomic64_try_cmpxchg_acquire atomic64_try_cmpxchg_acquire
2299#endif
2300
2301#ifndef atomic64_try_cmpxchg_release
765dcd20 2302static __always_inline bool
9fa45070
MR
2303atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new)
2304{
2305 __atomic_release_fence();
2306 return atomic64_try_cmpxchg_relaxed(v, old, new);
2307}
2308#define atomic64_try_cmpxchg_release atomic64_try_cmpxchg_release
2309#endif
2310
2311#ifndef atomic64_try_cmpxchg
765dcd20 2312static __always_inline bool
9fa45070
MR
2313atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
2314{
2315 bool ret;
2316 __atomic_pre_full_fence();
2317 ret = atomic64_try_cmpxchg_relaxed(v, old, new);
2318 __atomic_post_full_fence();
2319 return ret;
2320}
2321#define atomic64_try_cmpxchg atomic64_try_cmpxchg
2322#endif
2323
2324#endif /* atomic64_try_cmpxchg_relaxed */
2325
5faafd56
PZ
2326#define arch_atomic64_sub_and_test atomic64_sub_and_test
2327
9fa45070
MR
2328#ifndef atomic64_sub_and_test
2329/**
2330 * atomic64_sub_and_test - subtract value from variable and test result
2331 * @i: integer value to subtract
2332 * @v: pointer of type atomic64_t
2333 *
2334 * Atomically subtracts @i from @v and returns
2335 * true if the result is zero, or false for all
2336 * other cases.
2337 */
765dcd20 2338static __always_inline bool
9fa45070
MR
2339atomic64_sub_and_test(s64 i, atomic64_t *v)
2340{
2341 return atomic64_sub_return(i, v) == 0;
2342}
2343#define atomic64_sub_and_test atomic64_sub_and_test
2344#endif
2345
5faafd56
PZ
2346#define arch_atomic64_dec_and_test atomic64_dec_and_test
2347
9fa45070
MR
2348#ifndef atomic64_dec_and_test
2349/**
2350 * atomic64_dec_and_test - decrement and test
2351 * @v: pointer of type atomic64_t
2352 *
2353 * Atomically decrements @v by 1 and
2354 * returns true if the result is 0, or false for all other
2355 * cases.
2356 */
765dcd20 2357static __always_inline bool
9fa45070
MR
2358atomic64_dec_and_test(atomic64_t *v)
2359{
2360 return atomic64_dec_return(v) == 0;
2361}
2362#define atomic64_dec_and_test atomic64_dec_and_test
2363#endif
2364
5faafd56
PZ
2365#define arch_atomic64_inc_and_test atomic64_inc_and_test
2366
9fa45070
MR
2367#ifndef atomic64_inc_and_test
2368/**
2369 * atomic64_inc_and_test - increment and test
2370 * @v: pointer of type atomic64_t
2371 *
2372 * Atomically increments @v by 1
2373 * and returns true if the result is zero, or false for all
2374 * other cases.
2375 */
765dcd20 2376static __always_inline bool
9fa45070
MR
2377atomic64_inc_and_test(atomic64_t *v)
2378{
2379 return atomic64_inc_return(v) == 0;
2380}
2381#define atomic64_inc_and_test atomic64_inc_and_test
2382#endif
2383
5faafd56
PZ
2384#define arch_atomic64_add_negative atomic64_add_negative
2385
9fa45070
MR
2386#ifndef atomic64_add_negative
2387/**
2388 * atomic64_add_negative - add and test if negative
2389 * @i: integer value to add
2390 * @v: pointer of type atomic64_t
2391 *
2392 * Atomically adds @i to @v and returns true
2393 * if the result is negative, or false when
2394 * result is greater than or equal to zero.
2395 */
765dcd20 2396static __always_inline bool
9fa45070
MR
2397atomic64_add_negative(s64 i, atomic64_t *v)
2398{
2399 return atomic64_add_return(i, v) < 0;
2400}
2401#define atomic64_add_negative atomic64_add_negative
2402#endif
2403
5faafd56
PZ
2404#define arch_atomic64_fetch_add_unless atomic64_fetch_add_unless
2405
9fa45070
MR
2406#ifndef atomic64_fetch_add_unless
2407/**
2408 * atomic64_fetch_add_unless - add unless the number is already a given value
2409 * @v: pointer of type atomic64_t
2410 * @a: the amount to add to v...
2411 * @u: ...unless v is equal to u.
2412 *
2413 * Atomically adds @a to @v, so long as @v was not already @u.
2414 * Returns original value of @v
2415 */
765dcd20 2416static __always_inline s64
9fa45070
MR
2417atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u)
2418{
2419 s64 c = atomic64_read(v);
2420
2421 do {
2422 if (unlikely(c == u))
2423 break;
2424 } while (!atomic64_try_cmpxchg(v, &c, c + a));
2425
2426 return c;
2427}
2428#define atomic64_fetch_add_unless atomic64_fetch_add_unless
2429#endif
2430
5faafd56
PZ
2431#define arch_atomic64_add_unless atomic64_add_unless
2432
9fa45070
MR
2433#ifndef atomic64_add_unless
2434/**
2435 * atomic64_add_unless - add unless the number is already a given value
2436 * @v: pointer of type atomic64_t
2437 * @a: the amount to add to v...
2438 * @u: ...unless v is equal to u.
2439 *
2440 * Atomically adds @a to @v, if @v was not already @u.
2441 * Returns true if the addition was done.
2442 */
765dcd20 2443static __always_inline bool
9fa45070
MR
2444atomic64_add_unless(atomic64_t *v, s64 a, s64 u)
2445{
2446 return atomic64_fetch_add_unless(v, a, u) != u;
2447}
2448#define atomic64_add_unless atomic64_add_unless
2449#endif
2450
5faafd56
PZ
2451#define arch_atomic64_inc_not_zero atomic64_inc_not_zero
2452
9fa45070
MR
2453#ifndef atomic64_inc_not_zero
2454/**
2455 * atomic64_inc_not_zero - increment unless the number is zero
2456 * @v: pointer of type atomic64_t
2457 *
2458 * Atomically increments @v by 1, if @v is non-zero.
2459 * Returns true if the increment was done.
2460 */
765dcd20 2461static __always_inline bool
9fa45070
MR
2462atomic64_inc_not_zero(atomic64_t *v)
2463{
2464 return atomic64_add_unless(v, 1, 0);
2465}
2466#define atomic64_inc_not_zero atomic64_inc_not_zero
2467#endif
2468
5faafd56
PZ
2469#define arch_atomic64_inc_unless_negative atomic64_inc_unless_negative
2470
9fa45070 2471#ifndef atomic64_inc_unless_negative
765dcd20 2472static __always_inline bool
9fa45070
MR
2473atomic64_inc_unless_negative(atomic64_t *v)
2474{
2475 s64 c = atomic64_read(v);
2476
2477 do {
2478 if (unlikely(c < 0))
2479 return false;
2480 } while (!atomic64_try_cmpxchg(v, &c, c + 1));
2481
2482 return true;
2483}
2484#define atomic64_inc_unless_negative atomic64_inc_unless_negative
2485#endif
2486
5faafd56
PZ
2487#define arch_atomic64_dec_unless_positive atomic64_dec_unless_positive
2488
9fa45070 2489#ifndef atomic64_dec_unless_positive
765dcd20 2490static __always_inline bool
9fa45070
MR
2491atomic64_dec_unless_positive(atomic64_t *v)
2492{
2493 s64 c = atomic64_read(v);
2494
2495 do {
2496 if (unlikely(c > 0))
2497 return false;
2498 } while (!atomic64_try_cmpxchg(v, &c, c - 1));
2499
2500 return true;
2501}
2502#define atomic64_dec_unless_positive atomic64_dec_unless_positive
2503#endif
2504
5faafd56
PZ
2505#define arch_atomic64_dec_if_positive atomic64_dec_if_positive
2506
9fa45070 2507#ifndef atomic64_dec_if_positive
765dcd20 2508static __always_inline s64
9fa45070
MR
2509atomic64_dec_if_positive(atomic64_t *v)
2510{
2511 s64 dec, c = atomic64_read(v);
2512
2513 do {
2514 dec = c - 1;
2515 if (unlikely(dec < 0))
2516 break;
2517 } while (!atomic64_try_cmpxchg(v, &c, dec));
2518
2519 return dec;
2520}
2521#define atomic64_dec_if_positive atomic64_dec_if_positive
2522#endif
2523
9fa45070 2524#endif /* _LINUX_ATOMIC_FALLBACK_H */
5faafd56 2525// 9d95b56f98d82a2a26c7b79ccdd0c47572d50a6f