include/linux: Add instrumented.h infrastructure
[linux-block.git] / include / asm-generic / atomic-instrumented.h
CommitLineData
aa525d06
MR
1// SPDX-License-Identifier: GPL-2.0
2
3// Generated by scripts/atomic/gen-atomic-instrumented.sh
4// DO NOT MODIFY THIS FILE DIRECTLY
5
ac605bee
DV
6/*
7 * This file provides wrappers with KASAN instrumentation for atomic operations.
8 * To use this functionality an arch's atomic.h file needs to define all
9 * atomic operations with arch_ prefix (e.g. arch_atomic_read()) and include
10 * this file at the end. This file provides atomic_read() that forwards to
11 * arch_atomic_read() for actual atomic operation.
12 * Note: if an arch atomic operation is implemented by means of other atomic
13 * operations (e.g. atomic_read()/atomic_cmpxchg() loop), then it needs to use
14 * arch_ variants (i.e. arch_atomic_read()/arch_atomic_cmpxchg()) to avoid
15 * double instrumentation.
16 */
aa525d06
MR
17#ifndef _ASM_GENERIC_ATOMIC_INSTRUMENTED_H
18#define _ASM_GENERIC_ATOMIC_INSTRUMENTED_H
19
20#include <linux/build_bug.h>
c020395b 21#include <linux/compiler.h>
aa525d06 22#include <linux/kasan-checks.h>
e75a6795
ME
23#include <linux/kcsan-checks.h>
24
c020395b 25static __always_inline void __atomic_check_read(const volatile void *v, size_t size)
e75a6795
ME
26{
27 kasan_check_read(v, size);
28 kcsan_check_atomic_read(v, size);
29}
30
c020395b 31static __always_inline void __atomic_check_write(const volatile void *v, size_t size)
e75a6795
ME
32{
33 kasan_check_write(v, size);
34 kcsan_check_atomic_write(v, size);
35}
aa525d06 36
c020395b 37static __always_inline int
aa525d06
MR
38atomic_read(const atomic_t *v)
39{
e75a6795 40 __atomic_check_read(v, sizeof(*v));
aa525d06
MR
41 return arch_atomic_read(v);
42}
43#define atomic_read atomic_read
44
45#if defined(arch_atomic_read_acquire)
c020395b 46static __always_inline int
aa525d06
MR
47atomic_read_acquire(const atomic_t *v)
48{
e75a6795 49 __atomic_check_read(v, sizeof(*v));
aa525d06
MR
50 return arch_atomic_read_acquire(v);
51}
52#define atomic_read_acquire atomic_read_acquire
53#endif
54
c020395b 55static __always_inline void
aa525d06
MR
56atomic_set(atomic_t *v, int i)
57{
e75a6795 58 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
59 arch_atomic_set(v, i);
60}
61#define atomic_set atomic_set
62
63#if defined(arch_atomic_set_release)
c020395b 64static __always_inline void
aa525d06
MR
65atomic_set_release(atomic_t *v, int i)
66{
e75a6795 67 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
68 arch_atomic_set_release(v, i);
69}
70#define atomic_set_release atomic_set_release
71#endif
72
c020395b 73static __always_inline void
aa525d06
MR
74atomic_add(int i, atomic_t *v)
75{
e75a6795 76 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
77 arch_atomic_add(i, v);
78}
79#define atomic_add atomic_add
80
81#if !defined(arch_atomic_add_return_relaxed) || defined(arch_atomic_add_return)
c020395b 82static __always_inline int
aa525d06
MR
83atomic_add_return(int i, atomic_t *v)
84{
e75a6795 85 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
86 return arch_atomic_add_return(i, v);
87}
88#define atomic_add_return atomic_add_return
89#endif
90
91#if defined(arch_atomic_add_return_acquire)
c020395b 92static __always_inline int
aa525d06
MR
93atomic_add_return_acquire(int i, atomic_t *v)
94{
e75a6795 95 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
96 return arch_atomic_add_return_acquire(i, v);
97}
98#define atomic_add_return_acquire atomic_add_return_acquire
99#endif
100
101#if defined(arch_atomic_add_return_release)
c020395b 102static __always_inline int
aa525d06
MR
103atomic_add_return_release(int i, atomic_t *v)
104{
e75a6795 105 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
106 return arch_atomic_add_return_release(i, v);
107}
108#define atomic_add_return_release atomic_add_return_release
109#endif
110
111#if defined(arch_atomic_add_return_relaxed)
c020395b 112static __always_inline int
aa525d06
MR
113atomic_add_return_relaxed(int i, atomic_t *v)
114{
e75a6795 115 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
116 return arch_atomic_add_return_relaxed(i, v);
117}
118#define atomic_add_return_relaxed atomic_add_return_relaxed
119#endif
120
121#if !defined(arch_atomic_fetch_add_relaxed) || defined(arch_atomic_fetch_add)
c020395b 122static __always_inline int
aa525d06
MR
123atomic_fetch_add(int i, atomic_t *v)
124{
e75a6795 125 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
126 return arch_atomic_fetch_add(i, v);
127}
128#define atomic_fetch_add atomic_fetch_add
129#endif
130
131#if defined(arch_atomic_fetch_add_acquire)
c020395b 132static __always_inline int
aa525d06
MR
133atomic_fetch_add_acquire(int i, atomic_t *v)
134{
e75a6795 135 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
136 return arch_atomic_fetch_add_acquire(i, v);
137}
138#define atomic_fetch_add_acquire atomic_fetch_add_acquire
139#endif
140
141#if defined(arch_atomic_fetch_add_release)
c020395b 142static __always_inline int
aa525d06
MR
143atomic_fetch_add_release(int i, atomic_t *v)
144{
e75a6795 145 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
146 return arch_atomic_fetch_add_release(i, v);
147}
148#define atomic_fetch_add_release atomic_fetch_add_release
149#endif
150
151#if defined(arch_atomic_fetch_add_relaxed)
c020395b 152static __always_inline int
aa525d06
MR
153atomic_fetch_add_relaxed(int i, atomic_t *v)
154{
e75a6795 155 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
156 return arch_atomic_fetch_add_relaxed(i, v);
157}
158#define atomic_fetch_add_relaxed atomic_fetch_add_relaxed
159#endif
160
c020395b 161static __always_inline void
aa525d06
MR
162atomic_sub(int i, atomic_t *v)
163{
e75a6795 164 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
165 arch_atomic_sub(i, v);
166}
167#define atomic_sub atomic_sub
168
169#if !defined(arch_atomic_sub_return_relaxed) || defined(arch_atomic_sub_return)
c020395b 170static __always_inline int
aa525d06
MR
171atomic_sub_return(int i, atomic_t *v)
172{
e75a6795 173 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
174 return arch_atomic_sub_return(i, v);
175}
176#define atomic_sub_return atomic_sub_return
177#endif
178
179#if defined(arch_atomic_sub_return_acquire)
c020395b 180static __always_inline int
aa525d06
MR
181atomic_sub_return_acquire(int i, atomic_t *v)
182{
e75a6795 183 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
184 return arch_atomic_sub_return_acquire(i, v);
185}
186#define atomic_sub_return_acquire atomic_sub_return_acquire
187#endif
188
189#if defined(arch_atomic_sub_return_release)
c020395b 190static __always_inline int
aa525d06
MR
191atomic_sub_return_release(int i, atomic_t *v)
192{
e75a6795 193 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
194 return arch_atomic_sub_return_release(i, v);
195}
196#define atomic_sub_return_release atomic_sub_return_release
197#endif
198
199#if defined(arch_atomic_sub_return_relaxed)
c020395b 200static __always_inline int
aa525d06
MR
201atomic_sub_return_relaxed(int i, atomic_t *v)
202{
e75a6795 203 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
204 return arch_atomic_sub_return_relaxed(i, v);
205}
206#define atomic_sub_return_relaxed atomic_sub_return_relaxed
207#endif
208
209#if !defined(arch_atomic_fetch_sub_relaxed) || defined(arch_atomic_fetch_sub)
c020395b 210static __always_inline int
aa525d06
MR
211atomic_fetch_sub(int i, atomic_t *v)
212{
e75a6795 213 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
214 return arch_atomic_fetch_sub(i, v);
215}
216#define atomic_fetch_sub atomic_fetch_sub
217#endif
218
219#if defined(arch_atomic_fetch_sub_acquire)
c020395b 220static __always_inline int
aa525d06
MR
221atomic_fetch_sub_acquire(int i, atomic_t *v)
222{
e75a6795 223 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
224 return arch_atomic_fetch_sub_acquire(i, v);
225}
226#define atomic_fetch_sub_acquire atomic_fetch_sub_acquire
227#endif
228
229#if defined(arch_atomic_fetch_sub_release)
c020395b 230static __always_inline int
aa525d06
MR
231atomic_fetch_sub_release(int i, atomic_t *v)
232{
e75a6795 233 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
234 return arch_atomic_fetch_sub_release(i, v);
235}
236#define atomic_fetch_sub_release atomic_fetch_sub_release
237#endif
238
239#if defined(arch_atomic_fetch_sub_relaxed)
c020395b 240static __always_inline int
aa525d06
MR
241atomic_fetch_sub_relaxed(int i, atomic_t *v)
242{
e75a6795 243 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
244 return arch_atomic_fetch_sub_relaxed(i, v);
245}
246#define atomic_fetch_sub_relaxed atomic_fetch_sub_relaxed
247#endif
248
249#if defined(arch_atomic_inc)
c020395b 250static __always_inline void
aa525d06
MR
251atomic_inc(atomic_t *v)
252{
e75a6795 253 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
254 arch_atomic_inc(v);
255}
256#define atomic_inc atomic_inc
257#endif
258
259#if defined(arch_atomic_inc_return)
c020395b 260static __always_inline int
aa525d06
MR
261atomic_inc_return(atomic_t *v)
262{
e75a6795 263 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
264 return arch_atomic_inc_return(v);
265}
266#define atomic_inc_return atomic_inc_return
267#endif
268
269#if defined(arch_atomic_inc_return_acquire)
c020395b 270static __always_inline int
aa525d06
MR
271atomic_inc_return_acquire(atomic_t *v)
272{
e75a6795 273 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
274 return arch_atomic_inc_return_acquire(v);
275}
276#define atomic_inc_return_acquire atomic_inc_return_acquire
277#endif
278
279#if defined(arch_atomic_inc_return_release)
c020395b 280static __always_inline int
aa525d06
MR
281atomic_inc_return_release(atomic_t *v)
282{
e75a6795 283 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
284 return arch_atomic_inc_return_release(v);
285}
286#define atomic_inc_return_release atomic_inc_return_release
287#endif
288
289#if defined(arch_atomic_inc_return_relaxed)
c020395b 290static __always_inline int
aa525d06
MR
291atomic_inc_return_relaxed(atomic_t *v)
292{
e75a6795 293 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
294 return arch_atomic_inc_return_relaxed(v);
295}
296#define atomic_inc_return_relaxed atomic_inc_return_relaxed
297#endif
298
299#if defined(arch_atomic_fetch_inc)
c020395b 300static __always_inline int
aa525d06
MR
301atomic_fetch_inc(atomic_t *v)
302{
e75a6795 303 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
304 return arch_atomic_fetch_inc(v);
305}
306#define atomic_fetch_inc atomic_fetch_inc
307#endif
308
309#if defined(arch_atomic_fetch_inc_acquire)
c020395b 310static __always_inline int
aa525d06
MR
311atomic_fetch_inc_acquire(atomic_t *v)
312{
e75a6795 313 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
314 return arch_atomic_fetch_inc_acquire(v);
315}
316#define atomic_fetch_inc_acquire atomic_fetch_inc_acquire
317#endif
318
319#if defined(arch_atomic_fetch_inc_release)
c020395b 320static __always_inline int
aa525d06
MR
321atomic_fetch_inc_release(atomic_t *v)
322{
e75a6795 323 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
324 return arch_atomic_fetch_inc_release(v);
325}
326#define atomic_fetch_inc_release atomic_fetch_inc_release
327#endif
328
329#if defined(arch_atomic_fetch_inc_relaxed)
c020395b 330static __always_inline int
aa525d06
MR
331atomic_fetch_inc_relaxed(atomic_t *v)
332{
e75a6795 333 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
334 return arch_atomic_fetch_inc_relaxed(v);
335}
336#define atomic_fetch_inc_relaxed atomic_fetch_inc_relaxed
337#endif
338
339#if defined(arch_atomic_dec)
c020395b 340static __always_inline void
aa525d06
MR
341atomic_dec(atomic_t *v)
342{
e75a6795 343 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
344 arch_atomic_dec(v);
345}
346#define atomic_dec atomic_dec
347#endif
348
349#if defined(arch_atomic_dec_return)
c020395b 350static __always_inline int
aa525d06
MR
351atomic_dec_return(atomic_t *v)
352{
e75a6795 353 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
354 return arch_atomic_dec_return(v);
355}
356#define atomic_dec_return atomic_dec_return
357#endif
358
359#if defined(arch_atomic_dec_return_acquire)
c020395b 360static __always_inline int
aa525d06
MR
361atomic_dec_return_acquire(atomic_t *v)
362{
e75a6795 363 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
364 return arch_atomic_dec_return_acquire(v);
365}
366#define atomic_dec_return_acquire atomic_dec_return_acquire
367#endif
368
369#if defined(arch_atomic_dec_return_release)
c020395b 370static __always_inline int
aa525d06
MR
371atomic_dec_return_release(atomic_t *v)
372{
e75a6795 373 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
374 return arch_atomic_dec_return_release(v);
375}
376#define atomic_dec_return_release atomic_dec_return_release
377#endif
378
379#if defined(arch_atomic_dec_return_relaxed)
c020395b 380static __always_inline int
aa525d06
MR
381atomic_dec_return_relaxed(atomic_t *v)
382{
e75a6795 383 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
384 return arch_atomic_dec_return_relaxed(v);
385}
386#define atomic_dec_return_relaxed atomic_dec_return_relaxed
387#endif
388
389#if defined(arch_atomic_fetch_dec)
c020395b 390static __always_inline int
aa525d06
MR
391atomic_fetch_dec(atomic_t *v)
392{
e75a6795 393 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
394 return arch_atomic_fetch_dec(v);
395}
396#define atomic_fetch_dec atomic_fetch_dec
397#endif
398
399#if defined(arch_atomic_fetch_dec_acquire)
c020395b 400static __always_inline int
aa525d06
MR
401atomic_fetch_dec_acquire(atomic_t *v)
402{
e75a6795 403 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
404 return arch_atomic_fetch_dec_acquire(v);
405}
406#define atomic_fetch_dec_acquire atomic_fetch_dec_acquire
407#endif
408
409#if defined(arch_atomic_fetch_dec_release)
c020395b 410static __always_inline int
aa525d06
MR
411atomic_fetch_dec_release(atomic_t *v)
412{
e75a6795 413 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
414 return arch_atomic_fetch_dec_release(v);
415}
416#define atomic_fetch_dec_release atomic_fetch_dec_release
417#endif
418
419#if defined(arch_atomic_fetch_dec_relaxed)
c020395b 420static __always_inline int
aa525d06
MR
421atomic_fetch_dec_relaxed(atomic_t *v)
422{
e75a6795 423 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
424 return arch_atomic_fetch_dec_relaxed(v);
425}
426#define atomic_fetch_dec_relaxed atomic_fetch_dec_relaxed
427#endif
428
c020395b 429static __always_inline void
aa525d06
MR
430atomic_and(int i, atomic_t *v)
431{
e75a6795 432 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
433 arch_atomic_and(i, v);
434}
435#define atomic_and atomic_and
436
437#if !defined(arch_atomic_fetch_and_relaxed) || defined(arch_atomic_fetch_and)
c020395b 438static __always_inline int
aa525d06
MR
439atomic_fetch_and(int i, atomic_t *v)
440{
e75a6795 441 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
442 return arch_atomic_fetch_and(i, v);
443}
444#define atomic_fetch_and atomic_fetch_and
445#endif
446
447#if defined(arch_atomic_fetch_and_acquire)
c020395b 448static __always_inline int
aa525d06
MR
449atomic_fetch_and_acquire(int i, atomic_t *v)
450{
e75a6795 451 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
452 return arch_atomic_fetch_and_acquire(i, v);
453}
454#define atomic_fetch_and_acquire atomic_fetch_and_acquire
455#endif
456
457#if defined(arch_atomic_fetch_and_release)
c020395b 458static __always_inline int
aa525d06
MR
459atomic_fetch_and_release(int i, atomic_t *v)
460{
e75a6795 461 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
462 return arch_atomic_fetch_and_release(i, v);
463}
464#define atomic_fetch_and_release atomic_fetch_and_release
465#endif
466
467#if defined(arch_atomic_fetch_and_relaxed)
c020395b 468static __always_inline int
aa525d06
MR
469atomic_fetch_and_relaxed(int i, atomic_t *v)
470{
e75a6795 471 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
472 return arch_atomic_fetch_and_relaxed(i, v);
473}
474#define atomic_fetch_and_relaxed atomic_fetch_and_relaxed
475#endif
476
477#if defined(arch_atomic_andnot)
c020395b 478static __always_inline void
aa525d06
MR
479atomic_andnot(int i, atomic_t *v)
480{
e75a6795 481 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
482 arch_atomic_andnot(i, v);
483}
484#define atomic_andnot atomic_andnot
485#endif
486
487#if defined(arch_atomic_fetch_andnot)
c020395b 488static __always_inline int
aa525d06
MR
489atomic_fetch_andnot(int i, atomic_t *v)
490{
e75a6795 491 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
492 return arch_atomic_fetch_andnot(i, v);
493}
494#define atomic_fetch_andnot atomic_fetch_andnot
495#endif
496
497#if defined(arch_atomic_fetch_andnot_acquire)
c020395b 498static __always_inline int
aa525d06
MR
499atomic_fetch_andnot_acquire(int i, atomic_t *v)
500{
e75a6795 501 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
502 return arch_atomic_fetch_andnot_acquire(i, v);
503}
504#define atomic_fetch_andnot_acquire atomic_fetch_andnot_acquire
505#endif
506
507#if defined(arch_atomic_fetch_andnot_release)
c020395b 508static __always_inline int
aa525d06
MR
509atomic_fetch_andnot_release(int i, atomic_t *v)
510{
e75a6795 511 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
512 return arch_atomic_fetch_andnot_release(i, v);
513}
514#define atomic_fetch_andnot_release atomic_fetch_andnot_release
515#endif
516
517#if defined(arch_atomic_fetch_andnot_relaxed)
c020395b 518static __always_inline int
aa525d06
MR
519atomic_fetch_andnot_relaxed(int i, atomic_t *v)
520{
e75a6795 521 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
522 return arch_atomic_fetch_andnot_relaxed(i, v);
523}
524#define atomic_fetch_andnot_relaxed atomic_fetch_andnot_relaxed
525#endif
526
c020395b 527static __always_inline void
aa525d06
MR
528atomic_or(int i, atomic_t *v)
529{
e75a6795 530 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
531 arch_atomic_or(i, v);
532}
533#define atomic_or atomic_or
534
535#if !defined(arch_atomic_fetch_or_relaxed) || defined(arch_atomic_fetch_or)
c020395b 536static __always_inline int
aa525d06
MR
537atomic_fetch_or(int i, atomic_t *v)
538{
e75a6795 539 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
540 return arch_atomic_fetch_or(i, v);
541}
542#define atomic_fetch_or atomic_fetch_or
543#endif
544
545#if defined(arch_atomic_fetch_or_acquire)
c020395b 546static __always_inline int
aa525d06
MR
547atomic_fetch_or_acquire(int i, atomic_t *v)
548{
e75a6795 549 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
550 return arch_atomic_fetch_or_acquire(i, v);
551}
552#define atomic_fetch_or_acquire atomic_fetch_or_acquire
553#endif
554
555#if defined(arch_atomic_fetch_or_release)
c020395b 556static __always_inline int
aa525d06
MR
557atomic_fetch_or_release(int i, atomic_t *v)
558{
e75a6795 559 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
560 return arch_atomic_fetch_or_release(i, v);
561}
562#define atomic_fetch_or_release atomic_fetch_or_release
563#endif
564
565#if defined(arch_atomic_fetch_or_relaxed)
c020395b 566static __always_inline int
aa525d06
MR
567atomic_fetch_or_relaxed(int i, atomic_t *v)
568{
e75a6795 569 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
570 return arch_atomic_fetch_or_relaxed(i, v);
571}
572#define atomic_fetch_or_relaxed atomic_fetch_or_relaxed
573#endif
574
c020395b 575static __always_inline void
aa525d06
MR
576atomic_xor(int i, atomic_t *v)
577{
e75a6795 578 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
579 arch_atomic_xor(i, v);
580}
581#define atomic_xor atomic_xor
582
583#if !defined(arch_atomic_fetch_xor_relaxed) || defined(arch_atomic_fetch_xor)
c020395b 584static __always_inline int
aa525d06
MR
585atomic_fetch_xor(int i, atomic_t *v)
586{
e75a6795 587 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
588 return arch_atomic_fetch_xor(i, v);
589}
590#define atomic_fetch_xor atomic_fetch_xor
591#endif
592
593#if defined(arch_atomic_fetch_xor_acquire)
c020395b 594static __always_inline int
aa525d06
MR
595atomic_fetch_xor_acquire(int i, atomic_t *v)
596{
e75a6795 597 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
598 return arch_atomic_fetch_xor_acquire(i, v);
599}
600#define atomic_fetch_xor_acquire atomic_fetch_xor_acquire
601#endif
602
603#if defined(arch_atomic_fetch_xor_release)
c020395b 604static __always_inline int
aa525d06
MR
605atomic_fetch_xor_release(int i, atomic_t *v)
606{
e75a6795 607 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
608 return arch_atomic_fetch_xor_release(i, v);
609}
610#define atomic_fetch_xor_release atomic_fetch_xor_release
611#endif
612
613#if defined(arch_atomic_fetch_xor_relaxed)
c020395b 614static __always_inline int
aa525d06
MR
615atomic_fetch_xor_relaxed(int i, atomic_t *v)
616{
e75a6795 617 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
618 return arch_atomic_fetch_xor_relaxed(i, v);
619}
620#define atomic_fetch_xor_relaxed atomic_fetch_xor_relaxed
621#endif
622
623#if !defined(arch_atomic_xchg_relaxed) || defined(arch_atomic_xchg)
c020395b 624static __always_inline int
aa525d06
MR
625atomic_xchg(atomic_t *v, int i)
626{
e75a6795 627 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
628 return arch_atomic_xchg(v, i);
629}
630#define atomic_xchg atomic_xchg
631#endif
632
633#if defined(arch_atomic_xchg_acquire)
c020395b 634static __always_inline int
aa525d06
MR
635atomic_xchg_acquire(atomic_t *v, int i)
636{
e75a6795 637 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
638 return arch_atomic_xchg_acquire(v, i);
639}
640#define atomic_xchg_acquire atomic_xchg_acquire
641#endif
642
643#if defined(arch_atomic_xchg_release)
c020395b 644static __always_inline int
aa525d06
MR
645atomic_xchg_release(atomic_t *v, int i)
646{
e75a6795 647 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
648 return arch_atomic_xchg_release(v, i);
649}
650#define atomic_xchg_release atomic_xchg_release
651#endif
652
653#if defined(arch_atomic_xchg_relaxed)
c020395b 654static __always_inline int
aa525d06
MR
655atomic_xchg_relaxed(atomic_t *v, int i)
656{
e75a6795 657 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
658 return arch_atomic_xchg_relaxed(v, i);
659}
660#define atomic_xchg_relaxed atomic_xchg_relaxed
661#endif
662
663#if !defined(arch_atomic_cmpxchg_relaxed) || defined(arch_atomic_cmpxchg)
c020395b 664static __always_inline int
aa525d06
MR
665atomic_cmpxchg(atomic_t *v, int old, int new)
666{
e75a6795 667 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
668 return arch_atomic_cmpxchg(v, old, new);
669}
670#define atomic_cmpxchg atomic_cmpxchg
671#endif
672
673#if defined(arch_atomic_cmpxchg_acquire)
c020395b 674static __always_inline int
aa525d06
MR
675atomic_cmpxchg_acquire(atomic_t *v, int old, int new)
676{
e75a6795 677 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
678 return arch_atomic_cmpxchg_acquire(v, old, new);
679}
680#define atomic_cmpxchg_acquire atomic_cmpxchg_acquire
681#endif
682
683#if defined(arch_atomic_cmpxchg_release)
c020395b 684static __always_inline int
aa525d06
MR
685atomic_cmpxchg_release(atomic_t *v, int old, int new)
686{
e75a6795 687 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
688 return arch_atomic_cmpxchg_release(v, old, new);
689}
690#define atomic_cmpxchg_release atomic_cmpxchg_release
691#endif
692
693#if defined(arch_atomic_cmpxchg_relaxed)
c020395b 694static __always_inline int
aa525d06
MR
695atomic_cmpxchg_relaxed(atomic_t *v, int old, int new)
696{
e75a6795 697 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
698 return arch_atomic_cmpxchg_relaxed(v, old, new);
699}
700#define atomic_cmpxchg_relaxed atomic_cmpxchg_relaxed
701#endif
702
703#if defined(arch_atomic_try_cmpxchg)
c020395b 704static __always_inline bool
aa525d06
MR
705atomic_try_cmpxchg(atomic_t *v, int *old, int new)
706{
e75a6795
ME
707 __atomic_check_write(v, sizeof(*v));
708 __atomic_check_write(old, sizeof(*old));
aa525d06
MR
709 return arch_atomic_try_cmpxchg(v, old, new);
710}
711#define atomic_try_cmpxchg atomic_try_cmpxchg
712#endif
713
714#if defined(arch_atomic_try_cmpxchg_acquire)
c020395b 715static __always_inline bool
aa525d06
MR
716atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new)
717{
e75a6795
ME
718 __atomic_check_write(v, sizeof(*v));
719 __atomic_check_write(old, sizeof(*old));
aa525d06
MR
720 return arch_atomic_try_cmpxchg_acquire(v, old, new);
721}
722#define atomic_try_cmpxchg_acquire atomic_try_cmpxchg_acquire
723#endif
724
725#if defined(arch_atomic_try_cmpxchg_release)
c020395b 726static __always_inline bool
aa525d06
MR
727atomic_try_cmpxchg_release(atomic_t *v, int *old, int new)
728{
e75a6795
ME
729 __atomic_check_write(v, sizeof(*v));
730 __atomic_check_write(old, sizeof(*old));
aa525d06
MR
731 return arch_atomic_try_cmpxchg_release(v, old, new);
732}
733#define atomic_try_cmpxchg_release atomic_try_cmpxchg_release
734#endif
735
736#if defined(arch_atomic_try_cmpxchg_relaxed)
c020395b 737static __always_inline bool
aa525d06
MR
738atomic_try_cmpxchg_relaxed(atomic_t *v, int *old, int new)
739{
e75a6795
ME
740 __atomic_check_write(v, sizeof(*v));
741 __atomic_check_write(old, sizeof(*old));
aa525d06
MR
742 return arch_atomic_try_cmpxchg_relaxed(v, old, new);
743}
744#define atomic_try_cmpxchg_relaxed atomic_try_cmpxchg_relaxed
745#endif
746
747#if defined(arch_atomic_sub_and_test)
c020395b 748static __always_inline bool
aa525d06
MR
749atomic_sub_and_test(int i, atomic_t *v)
750{
e75a6795 751 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
752 return arch_atomic_sub_and_test(i, v);
753}
754#define atomic_sub_and_test atomic_sub_and_test
755#endif
756
757#if defined(arch_atomic_dec_and_test)
c020395b 758static __always_inline bool
aa525d06
MR
759atomic_dec_and_test(atomic_t *v)
760{
e75a6795 761 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
762 return arch_atomic_dec_and_test(v);
763}
764#define atomic_dec_and_test atomic_dec_and_test
765#endif
766
767#if defined(arch_atomic_inc_and_test)
c020395b 768static __always_inline bool
aa525d06
MR
769atomic_inc_and_test(atomic_t *v)
770{
e75a6795 771 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
772 return arch_atomic_inc_and_test(v);
773}
774#define atomic_inc_and_test atomic_inc_and_test
775#endif
776
777#if defined(arch_atomic_add_negative)
c020395b 778static __always_inline bool
aa525d06
MR
779atomic_add_negative(int i, atomic_t *v)
780{
e75a6795 781 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
782 return arch_atomic_add_negative(i, v);
783}
784#define atomic_add_negative atomic_add_negative
785#endif
786
787#if defined(arch_atomic_fetch_add_unless)
c020395b 788static __always_inline int
aa525d06
MR
789atomic_fetch_add_unless(atomic_t *v, int a, int u)
790{
e75a6795 791 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
792 return arch_atomic_fetch_add_unless(v, a, u);
793}
794#define atomic_fetch_add_unless atomic_fetch_add_unless
795#endif
796
797#if defined(arch_atomic_add_unless)
c020395b 798static __always_inline bool
aa525d06
MR
799atomic_add_unless(atomic_t *v, int a, int u)
800{
e75a6795 801 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
802 return arch_atomic_add_unless(v, a, u);
803}
804#define atomic_add_unless atomic_add_unless
805#endif
806
807#if defined(arch_atomic_inc_not_zero)
c020395b 808static __always_inline bool
aa525d06
MR
809atomic_inc_not_zero(atomic_t *v)
810{
e75a6795 811 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
812 return arch_atomic_inc_not_zero(v);
813}
814#define atomic_inc_not_zero atomic_inc_not_zero
815#endif
816
817#if defined(arch_atomic_inc_unless_negative)
c020395b 818static __always_inline bool
aa525d06
MR
819atomic_inc_unless_negative(atomic_t *v)
820{
e75a6795 821 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
822 return arch_atomic_inc_unless_negative(v);
823}
824#define atomic_inc_unless_negative atomic_inc_unless_negative
825#endif
826
827#if defined(arch_atomic_dec_unless_positive)
c020395b 828static __always_inline bool
aa525d06
MR
829atomic_dec_unless_positive(atomic_t *v)
830{
e75a6795 831 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
832 return arch_atomic_dec_unless_positive(v);
833}
834#define atomic_dec_unless_positive atomic_dec_unless_positive
835#endif
836
837#if defined(arch_atomic_dec_if_positive)
c020395b 838static __always_inline int
aa525d06
MR
839atomic_dec_if_positive(atomic_t *v)
840{
e75a6795 841 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
842 return arch_atomic_dec_if_positive(v);
843}
844#define atomic_dec_if_positive atomic_dec_if_positive
845#endif
846
c020395b 847static __always_inline s64
aa525d06
MR
848atomic64_read(const atomic64_t *v)
849{
e75a6795 850 __atomic_check_read(v, sizeof(*v));
aa525d06
MR
851 return arch_atomic64_read(v);
852}
853#define atomic64_read atomic64_read
854
855#if defined(arch_atomic64_read_acquire)
c020395b 856static __always_inline s64
aa525d06
MR
857atomic64_read_acquire(const atomic64_t *v)
858{
e75a6795 859 __atomic_check_read(v, sizeof(*v));
aa525d06
MR
860 return arch_atomic64_read_acquire(v);
861}
862#define atomic64_read_acquire atomic64_read_acquire
863#endif
864
c020395b 865static __always_inline void
aa525d06
MR
866atomic64_set(atomic64_t *v, s64 i)
867{
e75a6795 868 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
869 arch_atomic64_set(v, i);
870}
871#define atomic64_set atomic64_set
872
873#if defined(arch_atomic64_set_release)
c020395b 874static __always_inline void
aa525d06
MR
875atomic64_set_release(atomic64_t *v, s64 i)
876{
e75a6795 877 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
878 arch_atomic64_set_release(v, i);
879}
880#define atomic64_set_release atomic64_set_release
881#endif
882
c020395b 883static __always_inline void
aa525d06
MR
884atomic64_add(s64 i, atomic64_t *v)
885{
e75a6795 886 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
887 arch_atomic64_add(i, v);
888}
889#define atomic64_add atomic64_add
890
891#if !defined(arch_atomic64_add_return_relaxed) || defined(arch_atomic64_add_return)
c020395b 892static __always_inline s64
aa525d06
MR
893atomic64_add_return(s64 i, atomic64_t *v)
894{
e75a6795 895 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
896 return arch_atomic64_add_return(i, v);
897}
898#define atomic64_add_return atomic64_add_return
899#endif
900
901#if defined(arch_atomic64_add_return_acquire)
c020395b 902static __always_inline s64
aa525d06
MR
903atomic64_add_return_acquire(s64 i, atomic64_t *v)
904{
e75a6795 905 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
906 return arch_atomic64_add_return_acquire(i, v);
907}
908#define atomic64_add_return_acquire atomic64_add_return_acquire
909#endif
910
911#if defined(arch_atomic64_add_return_release)
c020395b 912static __always_inline s64
aa525d06
MR
913atomic64_add_return_release(s64 i, atomic64_t *v)
914{
e75a6795 915 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
916 return arch_atomic64_add_return_release(i, v);
917}
918#define atomic64_add_return_release atomic64_add_return_release
919#endif
920
921#if defined(arch_atomic64_add_return_relaxed)
c020395b 922static __always_inline s64
aa525d06
MR
923atomic64_add_return_relaxed(s64 i, atomic64_t *v)
924{
e75a6795 925 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
926 return arch_atomic64_add_return_relaxed(i, v);
927}
928#define atomic64_add_return_relaxed atomic64_add_return_relaxed
929#endif
930
931#if !defined(arch_atomic64_fetch_add_relaxed) || defined(arch_atomic64_fetch_add)
c020395b 932static __always_inline s64
aa525d06
MR
933atomic64_fetch_add(s64 i, atomic64_t *v)
934{
e75a6795 935 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
936 return arch_atomic64_fetch_add(i, v);
937}
938#define atomic64_fetch_add atomic64_fetch_add
939#endif
940
941#if defined(arch_atomic64_fetch_add_acquire)
c020395b 942static __always_inline s64
aa525d06
MR
943atomic64_fetch_add_acquire(s64 i, atomic64_t *v)
944{
e75a6795 945 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
946 return arch_atomic64_fetch_add_acquire(i, v);
947}
948#define atomic64_fetch_add_acquire atomic64_fetch_add_acquire
949#endif
950
951#if defined(arch_atomic64_fetch_add_release)
c020395b 952static __always_inline s64
aa525d06
MR
953atomic64_fetch_add_release(s64 i, atomic64_t *v)
954{
e75a6795 955 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
956 return arch_atomic64_fetch_add_release(i, v);
957}
958#define atomic64_fetch_add_release atomic64_fetch_add_release
959#endif
960
961#if defined(arch_atomic64_fetch_add_relaxed)
c020395b 962static __always_inline s64
aa525d06
MR
963atomic64_fetch_add_relaxed(s64 i, atomic64_t *v)
964{
e75a6795 965 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
966 return arch_atomic64_fetch_add_relaxed(i, v);
967}
968#define atomic64_fetch_add_relaxed atomic64_fetch_add_relaxed
969#endif
970
c020395b 971static __always_inline void
aa525d06
MR
972atomic64_sub(s64 i, atomic64_t *v)
973{
e75a6795 974 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
975 arch_atomic64_sub(i, v);
976}
977#define atomic64_sub atomic64_sub
ac605bee 978
aa525d06 979#if !defined(arch_atomic64_sub_return_relaxed) || defined(arch_atomic64_sub_return)
c020395b 980static __always_inline s64
aa525d06
MR
981atomic64_sub_return(s64 i, atomic64_t *v)
982{
e75a6795 983 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
984 return arch_atomic64_sub_return(i, v);
985}
986#define atomic64_sub_return atomic64_sub_return
987#endif
b06ed71a 988
aa525d06 989#if defined(arch_atomic64_sub_return_acquire)
c020395b 990static __always_inline s64
aa525d06
MR
991atomic64_sub_return_acquire(s64 i, atomic64_t *v)
992{
e75a6795 993 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
994 return arch_atomic64_sub_return_acquire(i, v);
995}
996#define atomic64_sub_return_acquire atomic64_sub_return_acquire
997#endif
b06ed71a 998
aa525d06 999#if defined(arch_atomic64_sub_return_release)
c020395b 1000static __always_inline s64
aa525d06 1001atomic64_sub_return_release(s64 i, atomic64_t *v)
b06ed71a 1002{
e75a6795 1003 __atomic_check_write(v, sizeof(*v));
aa525d06 1004 return arch_atomic64_sub_return_release(i, v);
b06ed71a 1005}
aa525d06
MR
1006#define atomic64_sub_return_release atomic64_sub_return_release
1007#endif
b06ed71a 1008
aa525d06 1009#if defined(arch_atomic64_sub_return_relaxed)
c020395b 1010static __always_inline s64
aa525d06 1011atomic64_sub_return_relaxed(s64 i, atomic64_t *v)
b06ed71a 1012{
e75a6795 1013 __atomic_check_write(v, sizeof(*v));
aa525d06 1014 return arch_atomic64_sub_return_relaxed(i, v);
b06ed71a 1015}
aa525d06
MR
1016#define atomic64_sub_return_relaxed atomic64_sub_return_relaxed
1017#endif
b06ed71a 1018
aa525d06 1019#if !defined(arch_atomic64_fetch_sub_relaxed) || defined(arch_atomic64_fetch_sub)
c020395b 1020static __always_inline s64
aa525d06 1021atomic64_fetch_sub(s64 i, atomic64_t *v)
b06ed71a 1022{
e75a6795 1023 __atomic_check_write(v, sizeof(*v));
aa525d06 1024 return arch_atomic64_fetch_sub(i, v);
b06ed71a 1025}
aa525d06
MR
1026#define atomic64_fetch_sub atomic64_fetch_sub
1027#endif
b06ed71a 1028
aa525d06 1029#if defined(arch_atomic64_fetch_sub_acquire)
c020395b 1030static __always_inline s64
aa525d06 1031atomic64_fetch_sub_acquire(s64 i, atomic64_t *v)
b06ed71a 1032{
e75a6795 1033 __atomic_check_write(v, sizeof(*v));
aa525d06 1034 return arch_atomic64_fetch_sub_acquire(i, v);
b06ed71a 1035}
aa525d06
MR
1036#define atomic64_fetch_sub_acquire atomic64_fetch_sub_acquire
1037#endif
b06ed71a 1038
aa525d06 1039#if defined(arch_atomic64_fetch_sub_release)
c020395b 1040static __always_inline s64
aa525d06 1041atomic64_fetch_sub_release(s64 i, atomic64_t *v)
b06ed71a 1042{
e75a6795 1043 __atomic_check_write(v, sizeof(*v));
aa525d06 1044 return arch_atomic64_fetch_sub_release(i, v);
b06ed71a 1045}
aa525d06
MR
1046#define atomic64_fetch_sub_release atomic64_fetch_sub_release
1047#endif
b06ed71a 1048
aa525d06 1049#if defined(arch_atomic64_fetch_sub_relaxed)
c020395b 1050static __always_inline s64
aa525d06 1051atomic64_fetch_sub_relaxed(s64 i, atomic64_t *v)
b06ed71a 1052{
e75a6795 1053 __atomic_check_write(v, sizeof(*v));
aa525d06 1054 return arch_atomic64_fetch_sub_relaxed(i, v);
b06ed71a 1055}
aa525d06
MR
1056#define atomic64_fetch_sub_relaxed atomic64_fetch_sub_relaxed
1057#endif
b06ed71a 1058
aa525d06 1059#if defined(arch_atomic64_inc)
c020395b 1060static __always_inline void
aa525d06 1061atomic64_inc(atomic64_t *v)
b06ed71a 1062{
e75a6795 1063 __atomic_check_write(v, sizeof(*v));
aa525d06 1064 arch_atomic64_inc(v);
b06ed71a 1065}
aa525d06
MR
1066#define atomic64_inc atomic64_inc
1067#endif
b06ed71a 1068
aa525d06 1069#if defined(arch_atomic64_inc_return)
c020395b 1070static __always_inline s64
aa525d06 1071atomic64_inc_return(atomic64_t *v)
b06ed71a 1072{
e75a6795 1073 __atomic_check_write(v, sizeof(*v));
aa525d06 1074 return arch_atomic64_inc_return(v);
b06ed71a 1075}
aa525d06
MR
1076#define atomic64_inc_return atomic64_inc_return
1077#endif
b06ed71a 1078
aa525d06 1079#if defined(arch_atomic64_inc_return_acquire)
c020395b 1080static __always_inline s64
aa525d06 1081atomic64_inc_return_acquire(atomic64_t *v)
b06ed71a 1082{
e75a6795 1083 __atomic_check_write(v, sizeof(*v));
aa525d06 1084 return arch_atomic64_inc_return_acquire(v);
b06ed71a 1085}
aa525d06 1086#define atomic64_inc_return_acquire atomic64_inc_return_acquire
b06ed71a
DV
1087#endif
1088
aa525d06 1089#if defined(arch_atomic64_inc_return_release)
c020395b 1090static __always_inline s64
aa525d06 1091atomic64_inc_return_release(atomic64_t *v)
b06ed71a 1092{
e75a6795 1093 __atomic_check_write(v, sizeof(*v));
aa525d06 1094 return arch_atomic64_inc_return_release(v);
b06ed71a 1095}
aa525d06 1096#define atomic64_inc_return_release atomic64_inc_return_release
b06ed71a
DV
1097#endif
1098
aa525d06 1099#if defined(arch_atomic64_inc_return_relaxed)
c020395b 1100static __always_inline s64
aa525d06 1101atomic64_inc_return_relaxed(atomic64_t *v)
b06ed71a 1102{
e75a6795 1103 __atomic_check_write(v, sizeof(*v));
aa525d06 1104 return arch_atomic64_inc_return_relaxed(v);
b06ed71a 1105}
aa525d06 1106#define atomic64_inc_return_relaxed atomic64_inc_return_relaxed
eccc2da8 1107#endif
b06ed71a 1108
aa525d06 1109#if defined(arch_atomic64_fetch_inc)
c020395b 1110static __always_inline s64
aa525d06 1111atomic64_fetch_inc(atomic64_t *v)
0ae1d994 1112{
e75a6795 1113 __atomic_check_write(v, sizeof(*v));
aa525d06 1114 return arch_atomic64_fetch_inc(v);
0ae1d994 1115}
aa525d06 1116#define atomic64_fetch_inc atomic64_fetch_inc
0ae1d994 1117#endif
b06ed71a 1118
aa525d06 1119#if defined(arch_atomic64_fetch_inc_acquire)
c020395b 1120static __always_inline s64
aa525d06 1121atomic64_fetch_inc_acquire(atomic64_t *v)
b06ed71a 1122{
e75a6795 1123 __atomic_check_write(v, sizeof(*v));
aa525d06 1124 return arch_atomic64_fetch_inc_acquire(v);
b06ed71a 1125}
aa525d06 1126#define atomic64_fetch_inc_acquire atomic64_fetch_inc_acquire
9837559d 1127#endif
b06ed71a 1128
aa525d06 1129#if defined(arch_atomic64_fetch_inc_release)
c020395b 1130static __always_inline s64
aa525d06 1131atomic64_fetch_inc_release(atomic64_t *v)
b06ed71a 1132{
e75a6795 1133 __atomic_check_write(v, sizeof(*v));
aa525d06 1134 return arch_atomic64_fetch_inc_release(v);
b06ed71a 1135}
aa525d06 1136#define atomic64_fetch_inc_release atomic64_fetch_inc_release
9837559d 1137#endif
b06ed71a 1138
aa525d06 1139#if defined(arch_atomic64_fetch_inc_relaxed)
c020395b 1140static __always_inline s64
aa525d06 1141atomic64_fetch_inc_relaxed(atomic64_t *v)
b06ed71a 1142{
e75a6795 1143 __atomic_check_write(v, sizeof(*v));
aa525d06 1144 return arch_atomic64_fetch_inc_relaxed(v);
b06ed71a 1145}
aa525d06 1146#define atomic64_fetch_inc_relaxed atomic64_fetch_inc_relaxed
9837559d 1147#endif
b06ed71a 1148
aa525d06 1149#if defined(arch_atomic64_dec)
c020395b 1150static __always_inline void
aa525d06 1151atomic64_dec(atomic64_t *v)
b06ed71a 1152{
e75a6795 1153 __atomic_check_write(v, sizeof(*v));
b06ed71a
DV
1154 arch_atomic64_dec(v);
1155}
aa525d06 1156#define atomic64_dec atomic64_dec
9837559d 1157#endif
b06ed71a 1158
aa525d06 1159#if defined(arch_atomic64_dec_return)
c020395b 1160static __always_inline s64
aa525d06 1161atomic64_dec_return(atomic64_t *v)
b06ed71a 1162{
e75a6795 1163 __atomic_check_write(v, sizeof(*v));
aa525d06 1164 return arch_atomic64_dec_return(v);
b06ed71a 1165}
aa525d06
MR
1166#define atomic64_dec_return atomic64_dec_return
1167#endif
b06ed71a 1168
aa525d06 1169#if defined(arch_atomic64_dec_return_acquire)
c020395b 1170static __always_inline s64
aa525d06 1171atomic64_dec_return_acquire(atomic64_t *v)
b06ed71a 1172{
e75a6795 1173 __atomic_check_write(v, sizeof(*v));
aa525d06 1174 return arch_atomic64_dec_return_acquire(v);
b06ed71a 1175}
aa525d06
MR
1176#define atomic64_dec_return_acquire atomic64_dec_return_acquire
1177#endif
b06ed71a 1178
aa525d06 1179#if defined(arch_atomic64_dec_return_release)
c020395b 1180static __always_inline s64
aa525d06 1181atomic64_dec_return_release(atomic64_t *v)
b06ed71a 1182{
e75a6795 1183 __atomic_check_write(v, sizeof(*v));
aa525d06 1184 return arch_atomic64_dec_return_release(v);
b06ed71a 1185}
aa525d06
MR
1186#define atomic64_dec_return_release atomic64_dec_return_release
1187#endif
b06ed71a 1188
aa525d06 1189#if defined(arch_atomic64_dec_return_relaxed)
c020395b 1190static __always_inline s64
aa525d06 1191atomic64_dec_return_relaxed(atomic64_t *v)
b06ed71a 1192{
e75a6795 1193 __atomic_check_write(v, sizeof(*v));
aa525d06 1194 return arch_atomic64_dec_return_relaxed(v);
b06ed71a 1195}
aa525d06
MR
1196#define atomic64_dec_return_relaxed atomic64_dec_return_relaxed
1197#endif
b06ed71a 1198
aa525d06 1199#if defined(arch_atomic64_fetch_dec)
c020395b 1200static __always_inline s64
aa525d06 1201atomic64_fetch_dec(atomic64_t *v)
b06ed71a 1202{
e75a6795 1203 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
1204 return arch_atomic64_fetch_dec(v);
1205}
1206#define atomic64_fetch_dec atomic64_fetch_dec
1207#endif
1208
1209#if defined(arch_atomic64_fetch_dec_acquire)
c020395b 1210static __always_inline s64
aa525d06
MR
1211atomic64_fetch_dec_acquire(atomic64_t *v)
1212{
e75a6795 1213 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
1214 return arch_atomic64_fetch_dec_acquire(v);
1215}
1216#define atomic64_fetch_dec_acquire atomic64_fetch_dec_acquire
1217#endif
1218
1219#if defined(arch_atomic64_fetch_dec_release)
c020395b 1220static __always_inline s64
aa525d06
MR
1221atomic64_fetch_dec_release(atomic64_t *v)
1222{
e75a6795 1223 __atomic_check_write(v, sizeof(*v));
aa525d06 1224 return arch_atomic64_fetch_dec_release(v);
b06ed71a 1225}
aa525d06
MR
1226#define atomic64_fetch_dec_release atomic64_fetch_dec_release
1227#endif
1228
1229#if defined(arch_atomic64_fetch_dec_relaxed)
c020395b 1230static __always_inline s64
aa525d06
MR
1231atomic64_fetch_dec_relaxed(atomic64_t *v)
1232{
e75a6795 1233 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
1234 return arch_atomic64_fetch_dec_relaxed(v);
1235}
1236#define atomic64_fetch_dec_relaxed atomic64_fetch_dec_relaxed
1237#endif
b06ed71a 1238
c020395b 1239static __always_inline void
aa525d06 1240atomic64_and(s64 i, atomic64_t *v)
b06ed71a 1241{
e75a6795 1242 __atomic_check_write(v, sizeof(*v));
b06ed71a
DV
1243 arch_atomic64_and(i, v);
1244}
aa525d06 1245#define atomic64_and atomic64_and
b06ed71a 1246
aa525d06 1247#if !defined(arch_atomic64_fetch_and_relaxed) || defined(arch_atomic64_fetch_and)
c020395b 1248static __always_inline s64
aa525d06 1249atomic64_fetch_and(s64 i, atomic64_t *v)
b06ed71a 1250{
e75a6795 1251 __atomic_check_write(v, sizeof(*v));
aa525d06 1252 return arch_atomic64_fetch_and(i, v);
b06ed71a 1253}
aa525d06
MR
1254#define atomic64_fetch_and atomic64_fetch_and
1255#endif
b06ed71a 1256
aa525d06 1257#if defined(arch_atomic64_fetch_and_acquire)
c020395b 1258static __always_inline s64
aa525d06 1259atomic64_fetch_and_acquire(s64 i, atomic64_t *v)
b06ed71a 1260{
e75a6795 1261 __atomic_check_write(v, sizeof(*v));
aa525d06 1262 return arch_atomic64_fetch_and_acquire(i, v);
b06ed71a 1263}
aa525d06
MR
1264#define atomic64_fetch_and_acquire atomic64_fetch_and_acquire
1265#endif
b06ed71a 1266
aa525d06 1267#if defined(arch_atomic64_fetch_and_release)
c020395b 1268static __always_inline s64
aa525d06 1269atomic64_fetch_and_release(s64 i, atomic64_t *v)
b06ed71a 1270{
e75a6795 1271 __atomic_check_write(v, sizeof(*v));
aa525d06 1272 return arch_atomic64_fetch_and_release(i, v);
b06ed71a 1273}
aa525d06
MR
1274#define atomic64_fetch_and_release atomic64_fetch_and_release
1275#endif
b06ed71a 1276
aa525d06 1277#if defined(arch_atomic64_fetch_and_relaxed)
c020395b 1278static __always_inline s64
aa525d06 1279atomic64_fetch_and_relaxed(s64 i, atomic64_t *v)
b06ed71a 1280{
e75a6795 1281 __atomic_check_write(v, sizeof(*v));
aa525d06 1282 return arch_atomic64_fetch_and_relaxed(i, v);
b06ed71a 1283}
aa525d06
MR
1284#define atomic64_fetch_and_relaxed atomic64_fetch_and_relaxed
1285#endif
b06ed71a 1286
aa525d06 1287#if defined(arch_atomic64_andnot)
c020395b 1288static __always_inline void
aa525d06 1289atomic64_andnot(s64 i, atomic64_t *v)
b06ed71a 1290{
e75a6795 1291 __atomic_check_write(v, sizeof(*v));
aa525d06 1292 arch_atomic64_andnot(i, v);
b06ed71a 1293}
aa525d06 1294#define atomic64_andnot atomic64_andnot
9837559d 1295#endif
b06ed71a 1296
aa525d06 1297#if defined(arch_atomic64_fetch_andnot)
c020395b 1298static __always_inline s64
aa525d06 1299atomic64_fetch_andnot(s64 i, atomic64_t *v)
b06ed71a 1300{
e75a6795 1301 __atomic_check_write(v, sizeof(*v));
aa525d06 1302 return arch_atomic64_fetch_andnot(i, v);
b06ed71a 1303}
aa525d06 1304#define atomic64_fetch_andnot atomic64_fetch_andnot
9837559d 1305#endif
b06ed71a 1306
aa525d06 1307#if defined(arch_atomic64_fetch_andnot_acquire)
c020395b 1308static __always_inline s64
aa525d06 1309atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v)
b06ed71a 1310{
e75a6795 1311 __atomic_check_write(v, sizeof(*v));
aa525d06 1312 return arch_atomic64_fetch_andnot_acquire(i, v);
b06ed71a 1313}
aa525d06 1314#define atomic64_fetch_andnot_acquire atomic64_fetch_andnot_acquire
9837559d 1315#endif
b06ed71a 1316
aa525d06 1317#if defined(arch_atomic64_fetch_andnot_release)
c020395b 1318static __always_inline s64
aa525d06 1319atomic64_fetch_andnot_release(s64 i, atomic64_t *v)
b06ed71a 1320{
e75a6795 1321 __atomic_check_write(v, sizeof(*v));
aa525d06 1322 return arch_atomic64_fetch_andnot_release(i, v);
b06ed71a 1323}
aa525d06 1324#define atomic64_fetch_andnot_release atomic64_fetch_andnot_release
9837559d 1325#endif
b06ed71a 1326
aa525d06 1327#if defined(arch_atomic64_fetch_andnot_relaxed)
c020395b 1328static __always_inline s64
aa525d06 1329atomic64_fetch_andnot_relaxed(s64 i, atomic64_t *v)
b06ed71a 1330{
e75a6795 1331 __atomic_check_write(v, sizeof(*v));
aa525d06 1332 return arch_atomic64_fetch_andnot_relaxed(i, v);
b06ed71a 1333}
aa525d06 1334#define atomic64_fetch_andnot_relaxed atomic64_fetch_andnot_relaxed
bef82820 1335#endif
b06ed71a 1336
c020395b 1337static __always_inline void
aa525d06 1338atomic64_or(s64 i, atomic64_t *v)
b06ed71a 1339{
e75a6795 1340 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
1341 arch_atomic64_or(i, v);
1342}
1343#define atomic64_or atomic64_or
1344
1345#if !defined(arch_atomic64_fetch_or_relaxed) || defined(arch_atomic64_fetch_or)
c020395b 1346static __always_inline s64
aa525d06
MR
1347atomic64_fetch_or(s64 i, atomic64_t *v)
1348{
e75a6795 1349 __atomic_check_write(v, sizeof(*v));
aa525d06 1350 return arch_atomic64_fetch_or(i, v);
b06ed71a 1351}
aa525d06 1352#define atomic64_fetch_or atomic64_fetch_or
b3a2a05f 1353#endif
b06ed71a 1354
aa525d06 1355#if defined(arch_atomic64_fetch_or_acquire)
c020395b 1356static __always_inline s64
aa525d06 1357atomic64_fetch_or_acquire(s64 i, atomic64_t *v)
b06ed71a 1358{
e75a6795 1359 __atomic_check_write(v, sizeof(*v));
aa525d06 1360 return arch_atomic64_fetch_or_acquire(i, v);
b06ed71a 1361}
aa525d06 1362#define atomic64_fetch_or_acquire atomic64_fetch_or_acquire
18cc1814 1363#endif
b06ed71a 1364
aa525d06 1365#if defined(arch_atomic64_fetch_or_release)
c020395b 1366static __always_inline s64
aa525d06 1367atomic64_fetch_or_release(s64 i, atomic64_t *v)
b06ed71a 1368{
e75a6795 1369 __atomic_check_write(v, sizeof(*v));
aa525d06 1370 return arch_atomic64_fetch_or_release(i, v);
b06ed71a 1371}
aa525d06 1372#define atomic64_fetch_or_release atomic64_fetch_or_release
18cc1814 1373#endif
b06ed71a 1374
aa525d06 1375#if defined(arch_atomic64_fetch_or_relaxed)
c020395b 1376static __always_inline s64
aa525d06 1377atomic64_fetch_or_relaxed(s64 i, atomic64_t *v)
b06ed71a 1378{
e75a6795 1379 __atomic_check_write(v, sizeof(*v));
aa525d06 1380 return arch_atomic64_fetch_or_relaxed(i, v);
b06ed71a 1381}
aa525d06 1382#define atomic64_fetch_or_relaxed atomic64_fetch_or_relaxed
18cc1814 1383#endif
b06ed71a 1384
c020395b 1385static __always_inline void
aa525d06 1386atomic64_xor(s64 i, atomic64_t *v)
b06ed71a 1387{
e75a6795 1388 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
1389 arch_atomic64_xor(i, v);
1390}
1391#define atomic64_xor atomic64_xor
1392
1393#if !defined(arch_atomic64_fetch_xor_relaxed) || defined(arch_atomic64_fetch_xor)
c020395b 1394static __always_inline s64
aa525d06
MR
1395atomic64_fetch_xor(s64 i, atomic64_t *v)
1396{
e75a6795 1397 __atomic_check_write(v, sizeof(*v));
aa525d06 1398 return arch_atomic64_fetch_xor(i, v);
b06ed71a 1399}
aa525d06 1400#define atomic64_fetch_xor atomic64_fetch_xor
18cc1814 1401#endif
b06ed71a 1402
aa525d06 1403#if defined(arch_atomic64_fetch_xor_acquire)
c020395b 1404static __always_inline s64
aa525d06 1405atomic64_fetch_xor_acquire(s64 i, atomic64_t *v)
b06ed71a 1406{
e75a6795 1407 __atomic_check_write(v, sizeof(*v));
aa525d06 1408 return arch_atomic64_fetch_xor_acquire(i, v);
b06ed71a 1409}
aa525d06
MR
1410#define atomic64_fetch_xor_acquire atomic64_fetch_xor_acquire
1411#endif
b06ed71a 1412
aa525d06 1413#if defined(arch_atomic64_fetch_xor_release)
c020395b 1414static __always_inline s64
aa525d06 1415atomic64_fetch_xor_release(s64 i, atomic64_t *v)
b06ed71a 1416{
e75a6795 1417 __atomic_check_write(v, sizeof(*v));
aa525d06 1418 return arch_atomic64_fetch_xor_release(i, v);
b06ed71a 1419}
aa525d06
MR
1420#define atomic64_fetch_xor_release atomic64_fetch_xor_release
1421#endif
b06ed71a 1422
aa525d06 1423#if defined(arch_atomic64_fetch_xor_relaxed)
c020395b 1424static __always_inline s64
aa525d06 1425atomic64_fetch_xor_relaxed(s64 i, atomic64_t *v)
b06ed71a 1426{
e75a6795 1427 __atomic_check_write(v, sizeof(*v));
aa525d06 1428 return arch_atomic64_fetch_xor_relaxed(i, v);
b06ed71a 1429}
aa525d06
MR
1430#define atomic64_fetch_xor_relaxed atomic64_fetch_xor_relaxed
1431#endif
b06ed71a 1432
aa525d06 1433#if !defined(arch_atomic64_xchg_relaxed) || defined(arch_atomic64_xchg)
c020395b 1434static __always_inline s64
aa525d06 1435atomic64_xchg(atomic64_t *v, s64 i)
b06ed71a 1436{
e75a6795 1437 __atomic_check_write(v, sizeof(*v));
aa525d06 1438 return arch_atomic64_xchg(v, i);
b06ed71a 1439}
aa525d06
MR
1440#define atomic64_xchg atomic64_xchg
1441#endif
b06ed71a 1442
aa525d06 1443#if defined(arch_atomic64_xchg_acquire)
c020395b 1444static __always_inline s64
aa525d06 1445atomic64_xchg_acquire(atomic64_t *v, s64 i)
b06ed71a 1446{
e75a6795 1447 __atomic_check_write(v, sizeof(*v));
aa525d06 1448 return arch_atomic64_xchg_acquire(v, i);
b06ed71a 1449}
aa525d06
MR
1450#define atomic64_xchg_acquire atomic64_xchg_acquire
1451#endif
b06ed71a 1452
aa525d06 1453#if defined(arch_atomic64_xchg_release)
c020395b 1454static __always_inline s64
aa525d06 1455atomic64_xchg_release(atomic64_t *v, s64 i)
b06ed71a 1456{
e75a6795 1457 __atomic_check_write(v, sizeof(*v));
aa525d06 1458 return arch_atomic64_xchg_release(v, i);
b06ed71a 1459}
aa525d06
MR
1460#define atomic64_xchg_release atomic64_xchg_release
1461#endif
b06ed71a 1462
aa525d06 1463#if defined(arch_atomic64_xchg_relaxed)
c020395b 1464static __always_inline s64
aa525d06 1465atomic64_xchg_relaxed(atomic64_t *v, s64 i)
b06ed71a 1466{
e75a6795 1467 __atomic_check_write(v, sizeof(*v));
aa525d06 1468 return arch_atomic64_xchg_relaxed(v, i);
b06ed71a 1469}
aa525d06
MR
1470#define atomic64_xchg_relaxed atomic64_xchg_relaxed
1471#endif
b06ed71a 1472
aa525d06 1473#if !defined(arch_atomic64_cmpxchg_relaxed) || defined(arch_atomic64_cmpxchg)
c020395b 1474static __always_inline s64
aa525d06 1475atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new)
b06ed71a 1476{
e75a6795 1477 __atomic_check_write(v, sizeof(*v));
aa525d06 1478 return arch_atomic64_cmpxchg(v, old, new);
b06ed71a 1479}
aa525d06
MR
1480#define atomic64_cmpxchg atomic64_cmpxchg
1481#endif
b06ed71a 1482
aa525d06 1483#if defined(arch_atomic64_cmpxchg_acquire)
c020395b 1484static __always_inline s64
aa525d06 1485atomic64_cmpxchg_acquire(atomic64_t *v, s64 old, s64 new)
b06ed71a 1486{
e75a6795 1487 __atomic_check_write(v, sizeof(*v));
aa525d06 1488 return arch_atomic64_cmpxchg_acquire(v, old, new);
b06ed71a 1489}
aa525d06
MR
1490#define atomic64_cmpxchg_acquire atomic64_cmpxchg_acquire
1491#endif
b06ed71a 1492
aa525d06 1493#if defined(arch_atomic64_cmpxchg_release)
c020395b 1494static __always_inline s64
aa525d06 1495atomic64_cmpxchg_release(atomic64_t *v, s64 old, s64 new)
b06ed71a 1496{
e75a6795 1497 __atomic_check_write(v, sizeof(*v));
aa525d06 1498 return arch_atomic64_cmpxchg_release(v, old, new);
b06ed71a 1499}
aa525d06
MR
1500#define atomic64_cmpxchg_release atomic64_cmpxchg_release
1501#endif
b06ed71a 1502
aa525d06 1503#if defined(arch_atomic64_cmpxchg_relaxed)
c020395b 1504static __always_inline s64
aa525d06 1505atomic64_cmpxchg_relaxed(atomic64_t *v, s64 old, s64 new)
b06ed71a 1506{
e75a6795 1507 __atomic_check_write(v, sizeof(*v));
aa525d06 1508 return arch_atomic64_cmpxchg_relaxed(v, old, new);
b06ed71a 1509}
aa525d06
MR
1510#define atomic64_cmpxchg_relaxed atomic64_cmpxchg_relaxed
1511#endif
b06ed71a 1512
aa525d06 1513#if defined(arch_atomic64_try_cmpxchg)
c020395b 1514static __always_inline bool
aa525d06 1515atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
b06ed71a 1516{
e75a6795
ME
1517 __atomic_check_write(v, sizeof(*v));
1518 __atomic_check_write(old, sizeof(*old));
aa525d06 1519 return arch_atomic64_try_cmpxchg(v, old, new);
b06ed71a 1520}
aa525d06
MR
1521#define atomic64_try_cmpxchg atomic64_try_cmpxchg
1522#endif
b06ed71a 1523
aa525d06 1524#if defined(arch_atomic64_try_cmpxchg_acquire)
c020395b 1525static __always_inline bool
aa525d06 1526atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new)
b06ed71a 1527{
e75a6795
ME
1528 __atomic_check_write(v, sizeof(*v));
1529 __atomic_check_write(old, sizeof(*old));
aa525d06 1530 return arch_atomic64_try_cmpxchg_acquire(v, old, new);
b06ed71a 1531}
aa525d06
MR
1532#define atomic64_try_cmpxchg_acquire atomic64_try_cmpxchg_acquire
1533#endif
b06ed71a 1534
aa525d06 1535#if defined(arch_atomic64_try_cmpxchg_release)
c020395b 1536static __always_inline bool
aa525d06 1537atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new)
b06ed71a 1538{
e75a6795
ME
1539 __atomic_check_write(v, sizeof(*v));
1540 __atomic_check_write(old, sizeof(*old));
aa525d06 1541 return arch_atomic64_try_cmpxchg_release(v, old, new);
b06ed71a 1542}
aa525d06
MR
1543#define atomic64_try_cmpxchg_release atomic64_try_cmpxchg_release
1544#endif
b06ed71a 1545
aa525d06 1546#if defined(arch_atomic64_try_cmpxchg_relaxed)
c020395b 1547static __always_inline bool
aa525d06 1548atomic64_try_cmpxchg_relaxed(atomic64_t *v, s64 *old, s64 new)
b06ed71a 1549{
e75a6795
ME
1550 __atomic_check_write(v, sizeof(*v));
1551 __atomic_check_write(old, sizeof(*old));
aa525d06 1552 return arch_atomic64_try_cmpxchg_relaxed(v, old, new);
b06ed71a 1553}
aa525d06 1554#define atomic64_try_cmpxchg_relaxed atomic64_try_cmpxchg_relaxed
18cc1814 1555#endif
b06ed71a 1556
aa525d06 1557#if defined(arch_atomic64_sub_and_test)
c020395b 1558static __always_inline bool
aa525d06 1559atomic64_sub_and_test(s64 i, atomic64_t *v)
b06ed71a 1560{
e75a6795 1561 __atomic_check_write(v, sizeof(*v));
b06ed71a
DV
1562 return arch_atomic64_sub_and_test(i, v);
1563}
aa525d06 1564#define atomic64_sub_and_test atomic64_sub_and_test
18cc1814 1565#endif
b06ed71a 1566
aa525d06 1567#if defined(arch_atomic64_dec_and_test)
c020395b 1568static __always_inline bool
aa525d06 1569atomic64_dec_and_test(atomic64_t *v)
b06ed71a 1570{
e75a6795 1571 __atomic_check_write(v, sizeof(*v));
aa525d06 1572 return arch_atomic64_dec_and_test(v);
b06ed71a 1573}
aa525d06 1574#define atomic64_dec_and_test atomic64_dec_and_test
18cc1814 1575#endif
b06ed71a 1576
aa525d06 1577#if defined(arch_atomic64_inc_and_test)
c020395b 1578static __always_inline bool
aa525d06
MR
1579atomic64_inc_and_test(atomic64_t *v)
1580{
e75a6795 1581 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
1582 return arch_atomic64_inc_and_test(v);
1583}
1584#define atomic64_inc_and_test atomic64_inc_and_test
1585#endif
1586
1587#if defined(arch_atomic64_add_negative)
c020395b 1588static __always_inline bool
aa525d06 1589atomic64_add_negative(s64 i, atomic64_t *v)
b06ed71a 1590{
e75a6795 1591 __atomic_check_write(v, sizeof(*v));
b06ed71a
DV
1592 return arch_atomic64_add_negative(i, v);
1593}
aa525d06
MR
1594#define atomic64_add_negative atomic64_add_negative
1595#endif
1596
1597#if defined(arch_atomic64_fetch_add_unless)
c020395b 1598static __always_inline s64
aa525d06
MR
1599atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u)
1600{
e75a6795 1601 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
1602 return arch_atomic64_fetch_add_unless(v, a, u);
1603}
1604#define atomic64_fetch_add_unless atomic64_fetch_add_unless
1605#endif
1606
1607#if defined(arch_atomic64_add_unless)
c020395b 1608static __always_inline bool
aa525d06
MR
1609atomic64_add_unless(atomic64_t *v, s64 a, s64 u)
1610{
e75a6795 1611 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
1612 return arch_atomic64_add_unless(v, a, u);
1613}
1614#define atomic64_add_unless atomic64_add_unless
1615#endif
1616
1617#if defined(arch_atomic64_inc_not_zero)
c020395b 1618static __always_inline bool
aa525d06
MR
1619atomic64_inc_not_zero(atomic64_t *v)
1620{
e75a6795 1621 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
1622 return arch_atomic64_inc_not_zero(v);
1623}
1624#define atomic64_inc_not_zero atomic64_inc_not_zero
1625#endif
1626
1627#if defined(arch_atomic64_inc_unless_negative)
c020395b 1628static __always_inline bool
aa525d06
MR
1629atomic64_inc_unless_negative(atomic64_t *v)
1630{
e75a6795 1631 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
1632 return arch_atomic64_inc_unless_negative(v);
1633}
1634#define atomic64_inc_unless_negative atomic64_inc_unless_negative
18cc1814 1635#endif
b06ed71a 1636
aa525d06 1637#if defined(arch_atomic64_dec_unless_positive)
c020395b 1638static __always_inline bool
aa525d06
MR
1639atomic64_dec_unless_positive(atomic64_t *v)
1640{
e75a6795 1641 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
1642 return arch_atomic64_dec_unless_positive(v);
1643}
1644#define atomic64_dec_unless_positive atomic64_dec_unless_positive
1645#endif
1646
1647#if defined(arch_atomic64_dec_if_positive)
c020395b 1648static __always_inline s64
aa525d06
MR
1649atomic64_dec_if_positive(atomic64_t *v)
1650{
e75a6795 1651 __atomic_check_write(v, sizeof(*v));
aa525d06
MR
1652 return arch_atomic64_dec_if_positive(v);
1653}
1654#define atomic64_dec_if_positive atomic64_dec_if_positive
1655#endif
1656
1657#if !defined(arch_xchg_relaxed) || defined(arch_xchg)
1658#define xchg(ptr, ...) \
1659({ \
1660 typeof(ptr) __ai_ptr = (ptr); \
e75a6795 1661 __atomic_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
aa525d06
MR
1662 arch_xchg(__ai_ptr, __VA_ARGS__); \
1663})
1664#endif
1665
1666#if defined(arch_xchg_acquire)
1667#define xchg_acquire(ptr, ...) \
1668({ \
1669 typeof(ptr) __ai_ptr = (ptr); \
e75a6795 1670 __atomic_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
aa525d06
MR
1671 arch_xchg_acquire(__ai_ptr, __VA_ARGS__); \
1672})
1673#endif
1674
1675#if defined(arch_xchg_release)
1676#define xchg_release(ptr, ...) \
1677({ \
1678 typeof(ptr) __ai_ptr = (ptr); \
e75a6795 1679 __atomic_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
aa525d06
MR
1680 arch_xchg_release(__ai_ptr, __VA_ARGS__); \
1681})
1682#endif
1683
1684#if defined(arch_xchg_relaxed)
1685#define xchg_relaxed(ptr, ...) \
1686({ \
1687 typeof(ptr) __ai_ptr = (ptr); \
e75a6795 1688 __atomic_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
aa525d06
MR
1689 arch_xchg_relaxed(__ai_ptr, __VA_ARGS__); \
1690})
1691#endif
1692
1693#if !defined(arch_cmpxchg_relaxed) || defined(arch_cmpxchg)
1694#define cmpxchg(ptr, ...) \
f9881cc4
MR
1695({ \
1696 typeof(ptr) __ai_ptr = (ptr); \
e75a6795 1697 __atomic_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
aa525d06 1698 arch_cmpxchg(__ai_ptr, __VA_ARGS__); \
f9881cc4 1699})
aa525d06
MR
1700#endif
1701
1702#if defined(arch_cmpxchg_acquire)
1703#define cmpxchg_acquire(ptr, ...) \
1704({ \
1705 typeof(ptr) __ai_ptr = (ptr); \
e75a6795 1706 __atomic_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
aa525d06
MR
1707 arch_cmpxchg_acquire(__ai_ptr, __VA_ARGS__); \
1708})
1709#endif
1710
1711#if defined(arch_cmpxchg_release)
1712#define cmpxchg_release(ptr, ...) \
1713({ \
1714 typeof(ptr) __ai_ptr = (ptr); \
e75a6795 1715 __atomic_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
aa525d06
MR
1716 arch_cmpxchg_release(__ai_ptr, __VA_ARGS__); \
1717})
1718#endif
1719
1720#if defined(arch_cmpxchg_relaxed)
1721#define cmpxchg_relaxed(ptr, ...) \
1722({ \
1723 typeof(ptr) __ai_ptr = (ptr); \
e75a6795 1724 __atomic_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
aa525d06
MR
1725 arch_cmpxchg_relaxed(__ai_ptr, __VA_ARGS__); \
1726})
1727#endif
f9881cc4 1728
aa525d06
MR
1729#if !defined(arch_cmpxchg64_relaxed) || defined(arch_cmpxchg64)
1730#define cmpxchg64(ptr, ...) \
b06ed71a 1731({ \
df79ed2c 1732 typeof(ptr) __ai_ptr = (ptr); \
e75a6795 1733 __atomic_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
aa525d06 1734 arch_cmpxchg64(__ai_ptr, __VA_ARGS__); \
b06ed71a 1735})
aa525d06
MR
1736#endif
1737
1738#if defined(arch_cmpxchg64_acquire)
1739#define cmpxchg64_acquire(ptr, ...) \
1740({ \
1741 typeof(ptr) __ai_ptr = (ptr); \
e75a6795 1742 __atomic_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
aa525d06
MR
1743 arch_cmpxchg64_acquire(__ai_ptr, __VA_ARGS__); \
1744})
1745#endif
1746
1747#if defined(arch_cmpxchg64_release)
1748#define cmpxchg64_release(ptr, ...) \
1749({ \
1750 typeof(ptr) __ai_ptr = (ptr); \
e75a6795 1751 __atomic_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
aa525d06
MR
1752 arch_cmpxchg64_release(__ai_ptr, __VA_ARGS__); \
1753})
1754#endif
b06ed71a 1755
aa525d06
MR
1756#if defined(arch_cmpxchg64_relaxed)
1757#define cmpxchg64_relaxed(ptr, ...) \
b06ed71a 1758({ \
df79ed2c 1759 typeof(ptr) __ai_ptr = (ptr); \
e75a6795 1760 __atomic_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
aa525d06 1761 arch_cmpxchg64_relaxed(__ai_ptr, __VA_ARGS__); \
b06ed71a 1762})
aa525d06 1763#endif
b06ed71a 1764
aa525d06 1765#define cmpxchg_local(ptr, ...) \
b06ed71a 1766({ \
df79ed2c 1767 typeof(ptr) __ai_ptr = (ptr); \
e75a6795 1768 __atomic_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
aa525d06 1769 arch_cmpxchg_local(__ai_ptr, __VA_ARGS__); \
b06ed71a
DV
1770})
1771
aa525d06 1772#define cmpxchg64_local(ptr, ...) \
b06ed71a 1773({ \
df79ed2c 1774 typeof(ptr) __ai_ptr = (ptr); \
e75a6795 1775 __atomic_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
aa525d06 1776 arch_cmpxchg64_local(__ai_ptr, __VA_ARGS__); \
b06ed71a
DV
1777})
1778
aa525d06 1779#define sync_cmpxchg(ptr, ...) \
b06ed71a 1780({ \
df79ed2c 1781 typeof(ptr) __ai_ptr = (ptr); \
e75a6795 1782 __atomic_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
aa525d06 1783 arch_sync_cmpxchg(__ai_ptr, __VA_ARGS__); \
b06ed71a
DV
1784})
1785
aa525d06 1786#define cmpxchg_double(ptr, ...) \
b06ed71a 1787({ \
aa525d06 1788 typeof(ptr) __ai_ptr = (ptr); \
e75a6795 1789 __atomic_check_write(__ai_ptr, 2 * sizeof(*__ai_ptr)); \
aa525d06 1790 arch_cmpxchg_double(__ai_ptr, __VA_ARGS__); \
b06ed71a
DV
1791})
1792
aa525d06
MR
1793
1794#define cmpxchg_double_local(ptr, ...) \
1795({ \
1796 typeof(ptr) __ai_ptr = (ptr); \
e75a6795 1797 __atomic_check_write(__ai_ptr, 2 * sizeof(*__ai_ptr)); \
aa525d06 1798 arch_cmpxchg_double_local(__ai_ptr, __VA_ARGS__); \
b06ed71a
DV
1799})
1800
aa525d06 1801#endif /* _ASM_GENERIC_ATOMIC_INSTRUMENTED_H */
c020395b 1802// 7b7e2af0e75c8ecb6f02298a7075f503f30d244c