Merge tag 'scmi-updates-5.8' of git://git.kernel.org/pub/scm/linux/kernel/git/sudeep...
[linux-block.git] / include / asm-generic / atomic-instrumented.h
CommitLineData
aa525d06
MR
1// SPDX-License-Identifier: GPL-2.0
2
3// Generated by scripts/atomic/gen-atomic-instrumented.sh
4// DO NOT MODIFY THIS FILE DIRECTLY
5
ac605bee
DV
6/*
7 * This file provides wrappers with KASAN instrumentation for atomic operations.
8 * To use this functionality an arch's atomic.h file needs to define all
9 * atomic operations with arch_ prefix (e.g. arch_atomic_read()) and include
10 * this file at the end. This file provides atomic_read() that forwards to
11 * arch_atomic_read() for actual atomic operation.
12 * Note: if an arch atomic operation is implemented by means of other atomic
13 * operations (e.g. atomic_read()/atomic_cmpxchg() loop), then it needs to use
14 * arch_ variants (i.e. arch_atomic_read()/arch_atomic_cmpxchg()) to avoid
15 * double instrumentation.
16 */
aa525d06
MR
17#ifndef _ASM_GENERIC_ATOMIC_INSTRUMENTED_H
18#define _ASM_GENERIC_ATOMIC_INSTRUMENTED_H
19
20#include <linux/build_bug.h>
21#include <linux/kasan-checks.h>
22
23static inline int
24atomic_read(const atomic_t *v)
25{
26 kasan_check_read(v, sizeof(*v));
27 return arch_atomic_read(v);
28}
29#define atomic_read atomic_read
30
31#if defined(arch_atomic_read_acquire)
32static inline int
33atomic_read_acquire(const atomic_t *v)
34{
35 kasan_check_read(v, sizeof(*v));
36 return arch_atomic_read_acquire(v);
37}
38#define atomic_read_acquire atomic_read_acquire
39#endif
40
41static inline void
42atomic_set(atomic_t *v, int i)
43{
44 kasan_check_write(v, sizeof(*v));
45 arch_atomic_set(v, i);
46}
47#define atomic_set atomic_set
48
49#if defined(arch_atomic_set_release)
50static inline void
51atomic_set_release(atomic_t *v, int i)
52{
53 kasan_check_write(v, sizeof(*v));
54 arch_atomic_set_release(v, i);
55}
56#define atomic_set_release atomic_set_release
57#endif
58
59static inline void
60atomic_add(int i, atomic_t *v)
61{
62 kasan_check_write(v, sizeof(*v));
63 arch_atomic_add(i, v);
64}
65#define atomic_add atomic_add
66
67#if !defined(arch_atomic_add_return_relaxed) || defined(arch_atomic_add_return)
68static inline int
69atomic_add_return(int i, atomic_t *v)
70{
71 kasan_check_write(v, sizeof(*v));
72 return arch_atomic_add_return(i, v);
73}
74#define atomic_add_return atomic_add_return
75#endif
76
77#if defined(arch_atomic_add_return_acquire)
78static inline int
79atomic_add_return_acquire(int i, atomic_t *v)
80{
81 kasan_check_write(v, sizeof(*v));
82 return arch_atomic_add_return_acquire(i, v);
83}
84#define atomic_add_return_acquire atomic_add_return_acquire
85#endif
86
87#if defined(arch_atomic_add_return_release)
88static inline int
89atomic_add_return_release(int i, atomic_t *v)
90{
91 kasan_check_write(v, sizeof(*v));
92 return arch_atomic_add_return_release(i, v);
93}
94#define atomic_add_return_release atomic_add_return_release
95#endif
96
97#if defined(arch_atomic_add_return_relaxed)
98static inline int
99atomic_add_return_relaxed(int i, atomic_t *v)
100{
101 kasan_check_write(v, sizeof(*v));
102 return arch_atomic_add_return_relaxed(i, v);
103}
104#define atomic_add_return_relaxed atomic_add_return_relaxed
105#endif
106
107#if !defined(arch_atomic_fetch_add_relaxed) || defined(arch_atomic_fetch_add)
108static inline int
109atomic_fetch_add(int i, atomic_t *v)
110{
111 kasan_check_write(v, sizeof(*v));
112 return arch_atomic_fetch_add(i, v);
113}
114#define atomic_fetch_add atomic_fetch_add
115#endif
116
117#if defined(arch_atomic_fetch_add_acquire)
118static inline int
119atomic_fetch_add_acquire(int i, atomic_t *v)
120{
121 kasan_check_write(v, sizeof(*v));
122 return arch_atomic_fetch_add_acquire(i, v);
123}
124#define atomic_fetch_add_acquire atomic_fetch_add_acquire
125#endif
126
127#if defined(arch_atomic_fetch_add_release)
128static inline int
129atomic_fetch_add_release(int i, atomic_t *v)
130{
131 kasan_check_write(v, sizeof(*v));
132 return arch_atomic_fetch_add_release(i, v);
133}
134#define atomic_fetch_add_release atomic_fetch_add_release
135#endif
136
137#if defined(arch_atomic_fetch_add_relaxed)
138static inline int
139atomic_fetch_add_relaxed(int i, atomic_t *v)
140{
141 kasan_check_write(v, sizeof(*v));
142 return arch_atomic_fetch_add_relaxed(i, v);
143}
144#define atomic_fetch_add_relaxed atomic_fetch_add_relaxed
145#endif
146
147static inline void
148atomic_sub(int i, atomic_t *v)
149{
150 kasan_check_write(v, sizeof(*v));
151 arch_atomic_sub(i, v);
152}
153#define atomic_sub atomic_sub
154
155#if !defined(arch_atomic_sub_return_relaxed) || defined(arch_atomic_sub_return)
156static inline int
157atomic_sub_return(int i, atomic_t *v)
158{
159 kasan_check_write(v, sizeof(*v));
160 return arch_atomic_sub_return(i, v);
161}
162#define atomic_sub_return atomic_sub_return
163#endif
164
165#if defined(arch_atomic_sub_return_acquire)
166static inline int
167atomic_sub_return_acquire(int i, atomic_t *v)
168{
169 kasan_check_write(v, sizeof(*v));
170 return arch_atomic_sub_return_acquire(i, v);
171}
172#define atomic_sub_return_acquire atomic_sub_return_acquire
173#endif
174
175#if defined(arch_atomic_sub_return_release)
176static inline int
177atomic_sub_return_release(int i, atomic_t *v)
178{
179 kasan_check_write(v, sizeof(*v));
180 return arch_atomic_sub_return_release(i, v);
181}
182#define atomic_sub_return_release atomic_sub_return_release
183#endif
184
185#if defined(arch_atomic_sub_return_relaxed)
186static inline int
187atomic_sub_return_relaxed(int i, atomic_t *v)
188{
189 kasan_check_write(v, sizeof(*v));
190 return arch_atomic_sub_return_relaxed(i, v);
191}
192#define atomic_sub_return_relaxed atomic_sub_return_relaxed
193#endif
194
195#if !defined(arch_atomic_fetch_sub_relaxed) || defined(arch_atomic_fetch_sub)
196static inline int
197atomic_fetch_sub(int i, atomic_t *v)
198{
199 kasan_check_write(v, sizeof(*v));
200 return arch_atomic_fetch_sub(i, v);
201}
202#define atomic_fetch_sub atomic_fetch_sub
203#endif
204
205#if defined(arch_atomic_fetch_sub_acquire)
206static inline int
207atomic_fetch_sub_acquire(int i, atomic_t *v)
208{
209 kasan_check_write(v, sizeof(*v));
210 return arch_atomic_fetch_sub_acquire(i, v);
211}
212#define atomic_fetch_sub_acquire atomic_fetch_sub_acquire
213#endif
214
215#if defined(arch_atomic_fetch_sub_release)
216static inline int
217atomic_fetch_sub_release(int i, atomic_t *v)
218{
219 kasan_check_write(v, sizeof(*v));
220 return arch_atomic_fetch_sub_release(i, v);
221}
222#define atomic_fetch_sub_release atomic_fetch_sub_release
223#endif
224
225#if defined(arch_atomic_fetch_sub_relaxed)
226static inline int
227atomic_fetch_sub_relaxed(int i, atomic_t *v)
228{
229 kasan_check_write(v, sizeof(*v));
230 return arch_atomic_fetch_sub_relaxed(i, v);
231}
232#define atomic_fetch_sub_relaxed atomic_fetch_sub_relaxed
233#endif
234
235#if defined(arch_atomic_inc)
236static inline void
237atomic_inc(atomic_t *v)
238{
239 kasan_check_write(v, sizeof(*v));
240 arch_atomic_inc(v);
241}
242#define atomic_inc atomic_inc
243#endif
244
245#if defined(arch_atomic_inc_return)
246static inline int
247atomic_inc_return(atomic_t *v)
248{
249 kasan_check_write(v, sizeof(*v));
250 return arch_atomic_inc_return(v);
251}
252#define atomic_inc_return atomic_inc_return
253#endif
254
255#if defined(arch_atomic_inc_return_acquire)
256static inline int
257atomic_inc_return_acquire(atomic_t *v)
258{
259 kasan_check_write(v, sizeof(*v));
260 return arch_atomic_inc_return_acquire(v);
261}
262#define atomic_inc_return_acquire atomic_inc_return_acquire
263#endif
264
265#if defined(arch_atomic_inc_return_release)
266static inline int
267atomic_inc_return_release(atomic_t *v)
268{
269 kasan_check_write(v, sizeof(*v));
270 return arch_atomic_inc_return_release(v);
271}
272#define atomic_inc_return_release atomic_inc_return_release
273#endif
274
275#if defined(arch_atomic_inc_return_relaxed)
276static inline int
277atomic_inc_return_relaxed(atomic_t *v)
278{
279 kasan_check_write(v, sizeof(*v));
280 return arch_atomic_inc_return_relaxed(v);
281}
282#define atomic_inc_return_relaxed atomic_inc_return_relaxed
283#endif
284
285#if defined(arch_atomic_fetch_inc)
286static inline int
287atomic_fetch_inc(atomic_t *v)
288{
289 kasan_check_write(v, sizeof(*v));
290 return arch_atomic_fetch_inc(v);
291}
292#define atomic_fetch_inc atomic_fetch_inc
293#endif
294
295#if defined(arch_atomic_fetch_inc_acquire)
296static inline int
297atomic_fetch_inc_acquire(atomic_t *v)
298{
299 kasan_check_write(v, sizeof(*v));
300 return arch_atomic_fetch_inc_acquire(v);
301}
302#define atomic_fetch_inc_acquire atomic_fetch_inc_acquire
303#endif
304
305#if defined(arch_atomic_fetch_inc_release)
306static inline int
307atomic_fetch_inc_release(atomic_t *v)
308{
309 kasan_check_write(v, sizeof(*v));
310 return arch_atomic_fetch_inc_release(v);
311}
312#define atomic_fetch_inc_release atomic_fetch_inc_release
313#endif
314
315#if defined(arch_atomic_fetch_inc_relaxed)
316static inline int
317atomic_fetch_inc_relaxed(atomic_t *v)
318{
319 kasan_check_write(v, sizeof(*v));
320 return arch_atomic_fetch_inc_relaxed(v);
321}
322#define atomic_fetch_inc_relaxed atomic_fetch_inc_relaxed
323#endif
324
325#if defined(arch_atomic_dec)
326static inline void
327atomic_dec(atomic_t *v)
328{
329 kasan_check_write(v, sizeof(*v));
330 arch_atomic_dec(v);
331}
332#define atomic_dec atomic_dec
333#endif
334
335#if defined(arch_atomic_dec_return)
336static inline int
337atomic_dec_return(atomic_t *v)
338{
339 kasan_check_write(v, sizeof(*v));
340 return arch_atomic_dec_return(v);
341}
342#define atomic_dec_return atomic_dec_return
343#endif
344
345#if defined(arch_atomic_dec_return_acquire)
346static inline int
347atomic_dec_return_acquire(atomic_t *v)
348{
349 kasan_check_write(v, sizeof(*v));
350 return arch_atomic_dec_return_acquire(v);
351}
352#define atomic_dec_return_acquire atomic_dec_return_acquire
353#endif
354
355#if defined(arch_atomic_dec_return_release)
356static inline int
357atomic_dec_return_release(atomic_t *v)
358{
359 kasan_check_write(v, sizeof(*v));
360 return arch_atomic_dec_return_release(v);
361}
362#define atomic_dec_return_release atomic_dec_return_release
363#endif
364
365#if defined(arch_atomic_dec_return_relaxed)
366static inline int
367atomic_dec_return_relaxed(atomic_t *v)
368{
369 kasan_check_write(v, sizeof(*v));
370 return arch_atomic_dec_return_relaxed(v);
371}
372#define atomic_dec_return_relaxed atomic_dec_return_relaxed
373#endif
374
375#if defined(arch_atomic_fetch_dec)
376static inline int
377atomic_fetch_dec(atomic_t *v)
378{
379 kasan_check_write(v, sizeof(*v));
380 return arch_atomic_fetch_dec(v);
381}
382#define atomic_fetch_dec atomic_fetch_dec
383#endif
384
385#if defined(arch_atomic_fetch_dec_acquire)
386static inline int
387atomic_fetch_dec_acquire(atomic_t *v)
388{
389 kasan_check_write(v, sizeof(*v));
390 return arch_atomic_fetch_dec_acquire(v);
391}
392#define atomic_fetch_dec_acquire atomic_fetch_dec_acquire
393#endif
394
395#if defined(arch_atomic_fetch_dec_release)
396static inline int
397atomic_fetch_dec_release(atomic_t *v)
398{
399 kasan_check_write(v, sizeof(*v));
400 return arch_atomic_fetch_dec_release(v);
401}
402#define atomic_fetch_dec_release atomic_fetch_dec_release
403#endif
404
405#if defined(arch_atomic_fetch_dec_relaxed)
406static inline int
407atomic_fetch_dec_relaxed(atomic_t *v)
408{
409 kasan_check_write(v, sizeof(*v));
410 return arch_atomic_fetch_dec_relaxed(v);
411}
412#define atomic_fetch_dec_relaxed atomic_fetch_dec_relaxed
413#endif
414
415static inline void
416atomic_and(int i, atomic_t *v)
417{
418 kasan_check_write(v, sizeof(*v));
419 arch_atomic_and(i, v);
420}
421#define atomic_and atomic_and
422
423#if !defined(arch_atomic_fetch_and_relaxed) || defined(arch_atomic_fetch_and)
424static inline int
425atomic_fetch_and(int i, atomic_t *v)
426{
427 kasan_check_write(v, sizeof(*v));
428 return arch_atomic_fetch_and(i, v);
429}
430#define atomic_fetch_and atomic_fetch_and
431#endif
432
433#if defined(arch_atomic_fetch_and_acquire)
434static inline int
435atomic_fetch_and_acquire(int i, atomic_t *v)
436{
437 kasan_check_write(v, sizeof(*v));
438 return arch_atomic_fetch_and_acquire(i, v);
439}
440#define atomic_fetch_and_acquire atomic_fetch_and_acquire
441#endif
442
443#if defined(arch_atomic_fetch_and_release)
444static inline int
445atomic_fetch_and_release(int i, atomic_t *v)
446{
447 kasan_check_write(v, sizeof(*v));
448 return arch_atomic_fetch_and_release(i, v);
449}
450#define atomic_fetch_and_release atomic_fetch_and_release
451#endif
452
453#if defined(arch_atomic_fetch_and_relaxed)
454static inline int
455atomic_fetch_and_relaxed(int i, atomic_t *v)
456{
457 kasan_check_write(v, sizeof(*v));
458 return arch_atomic_fetch_and_relaxed(i, v);
459}
460#define atomic_fetch_and_relaxed atomic_fetch_and_relaxed
461#endif
462
463#if defined(arch_atomic_andnot)
464static inline void
465atomic_andnot(int i, atomic_t *v)
466{
467 kasan_check_write(v, sizeof(*v));
468 arch_atomic_andnot(i, v);
469}
470#define atomic_andnot atomic_andnot
471#endif
472
473#if defined(arch_atomic_fetch_andnot)
474static inline int
475atomic_fetch_andnot(int i, atomic_t *v)
476{
477 kasan_check_write(v, sizeof(*v));
478 return arch_atomic_fetch_andnot(i, v);
479}
480#define atomic_fetch_andnot atomic_fetch_andnot
481#endif
482
483#if defined(arch_atomic_fetch_andnot_acquire)
484static inline int
485atomic_fetch_andnot_acquire(int i, atomic_t *v)
486{
487 kasan_check_write(v, sizeof(*v));
488 return arch_atomic_fetch_andnot_acquire(i, v);
489}
490#define atomic_fetch_andnot_acquire atomic_fetch_andnot_acquire
491#endif
492
493#if defined(arch_atomic_fetch_andnot_release)
494static inline int
495atomic_fetch_andnot_release(int i, atomic_t *v)
496{
497 kasan_check_write(v, sizeof(*v));
498 return arch_atomic_fetch_andnot_release(i, v);
499}
500#define atomic_fetch_andnot_release atomic_fetch_andnot_release
501#endif
502
503#if defined(arch_atomic_fetch_andnot_relaxed)
504static inline int
505atomic_fetch_andnot_relaxed(int i, atomic_t *v)
506{
507 kasan_check_write(v, sizeof(*v));
508 return arch_atomic_fetch_andnot_relaxed(i, v);
509}
510#define atomic_fetch_andnot_relaxed atomic_fetch_andnot_relaxed
511#endif
512
513static inline void
514atomic_or(int i, atomic_t *v)
515{
516 kasan_check_write(v, sizeof(*v));
517 arch_atomic_or(i, v);
518}
519#define atomic_or atomic_or
520
521#if !defined(arch_atomic_fetch_or_relaxed) || defined(arch_atomic_fetch_or)
522static inline int
523atomic_fetch_or(int i, atomic_t *v)
524{
525 kasan_check_write(v, sizeof(*v));
526 return arch_atomic_fetch_or(i, v);
527}
528#define atomic_fetch_or atomic_fetch_or
529#endif
530
531#if defined(arch_atomic_fetch_or_acquire)
532static inline int
533atomic_fetch_or_acquire(int i, atomic_t *v)
534{
535 kasan_check_write(v, sizeof(*v));
536 return arch_atomic_fetch_or_acquire(i, v);
537}
538#define atomic_fetch_or_acquire atomic_fetch_or_acquire
539#endif
540
541#if defined(arch_atomic_fetch_or_release)
542static inline int
543atomic_fetch_or_release(int i, atomic_t *v)
544{
545 kasan_check_write(v, sizeof(*v));
546 return arch_atomic_fetch_or_release(i, v);
547}
548#define atomic_fetch_or_release atomic_fetch_or_release
549#endif
550
551#if defined(arch_atomic_fetch_or_relaxed)
552static inline int
553atomic_fetch_or_relaxed(int i, atomic_t *v)
554{
555 kasan_check_write(v, sizeof(*v));
556 return arch_atomic_fetch_or_relaxed(i, v);
557}
558#define atomic_fetch_or_relaxed atomic_fetch_or_relaxed
559#endif
560
561static inline void
562atomic_xor(int i, atomic_t *v)
563{
564 kasan_check_write(v, sizeof(*v));
565 arch_atomic_xor(i, v);
566}
567#define atomic_xor atomic_xor
568
569#if !defined(arch_atomic_fetch_xor_relaxed) || defined(arch_atomic_fetch_xor)
570static inline int
571atomic_fetch_xor(int i, atomic_t *v)
572{
573 kasan_check_write(v, sizeof(*v));
574 return arch_atomic_fetch_xor(i, v);
575}
576#define atomic_fetch_xor atomic_fetch_xor
577#endif
578
579#if defined(arch_atomic_fetch_xor_acquire)
580static inline int
581atomic_fetch_xor_acquire(int i, atomic_t *v)
582{
583 kasan_check_write(v, sizeof(*v));
584 return arch_atomic_fetch_xor_acquire(i, v);
585}
586#define atomic_fetch_xor_acquire atomic_fetch_xor_acquire
587#endif
588
589#if defined(arch_atomic_fetch_xor_release)
590static inline int
591atomic_fetch_xor_release(int i, atomic_t *v)
592{
593 kasan_check_write(v, sizeof(*v));
594 return arch_atomic_fetch_xor_release(i, v);
595}
596#define atomic_fetch_xor_release atomic_fetch_xor_release
597#endif
598
599#if defined(arch_atomic_fetch_xor_relaxed)
600static inline int
601atomic_fetch_xor_relaxed(int i, atomic_t *v)
602{
603 kasan_check_write(v, sizeof(*v));
604 return arch_atomic_fetch_xor_relaxed(i, v);
605}
606#define atomic_fetch_xor_relaxed atomic_fetch_xor_relaxed
607#endif
608
609#if !defined(arch_atomic_xchg_relaxed) || defined(arch_atomic_xchg)
610static inline int
611atomic_xchg(atomic_t *v, int i)
612{
613 kasan_check_write(v, sizeof(*v));
614 return arch_atomic_xchg(v, i);
615}
616#define atomic_xchg atomic_xchg
617#endif
618
619#if defined(arch_atomic_xchg_acquire)
620static inline int
621atomic_xchg_acquire(atomic_t *v, int i)
622{
623 kasan_check_write(v, sizeof(*v));
624 return arch_atomic_xchg_acquire(v, i);
625}
626#define atomic_xchg_acquire atomic_xchg_acquire
627#endif
628
629#if defined(arch_atomic_xchg_release)
630static inline int
631atomic_xchg_release(atomic_t *v, int i)
632{
633 kasan_check_write(v, sizeof(*v));
634 return arch_atomic_xchg_release(v, i);
635}
636#define atomic_xchg_release atomic_xchg_release
637#endif
638
639#if defined(arch_atomic_xchg_relaxed)
640static inline int
641atomic_xchg_relaxed(atomic_t *v, int i)
642{
643 kasan_check_write(v, sizeof(*v));
644 return arch_atomic_xchg_relaxed(v, i);
645}
646#define atomic_xchg_relaxed atomic_xchg_relaxed
647#endif
648
649#if !defined(arch_atomic_cmpxchg_relaxed) || defined(arch_atomic_cmpxchg)
650static inline int
651atomic_cmpxchg(atomic_t *v, int old, int new)
652{
653 kasan_check_write(v, sizeof(*v));
654 return arch_atomic_cmpxchg(v, old, new);
655}
656#define atomic_cmpxchg atomic_cmpxchg
657#endif
658
659#if defined(arch_atomic_cmpxchg_acquire)
660static inline int
661atomic_cmpxchg_acquire(atomic_t *v, int old, int new)
662{
663 kasan_check_write(v, sizeof(*v));
664 return arch_atomic_cmpxchg_acquire(v, old, new);
665}
666#define atomic_cmpxchg_acquire atomic_cmpxchg_acquire
667#endif
668
669#if defined(arch_atomic_cmpxchg_release)
670static inline int
671atomic_cmpxchg_release(atomic_t *v, int old, int new)
672{
673 kasan_check_write(v, sizeof(*v));
674 return arch_atomic_cmpxchg_release(v, old, new);
675}
676#define atomic_cmpxchg_release atomic_cmpxchg_release
677#endif
678
679#if defined(arch_atomic_cmpxchg_relaxed)
680static inline int
681atomic_cmpxchg_relaxed(atomic_t *v, int old, int new)
682{
683 kasan_check_write(v, sizeof(*v));
684 return arch_atomic_cmpxchg_relaxed(v, old, new);
685}
686#define atomic_cmpxchg_relaxed atomic_cmpxchg_relaxed
687#endif
688
689#if defined(arch_atomic_try_cmpxchg)
690static inline bool
691atomic_try_cmpxchg(atomic_t *v, int *old, int new)
692{
693 kasan_check_write(v, sizeof(*v));
694 kasan_check_write(old, sizeof(*old));
695 return arch_atomic_try_cmpxchg(v, old, new);
696}
697#define atomic_try_cmpxchg atomic_try_cmpxchg
698#endif
699
700#if defined(arch_atomic_try_cmpxchg_acquire)
701static inline bool
702atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new)
703{
704 kasan_check_write(v, sizeof(*v));
705 kasan_check_write(old, sizeof(*old));
706 return arch_atomic_try_cmpxchg_acquire(v, old, new);
707}
708#define atomic_try_cmpxchg_acquire atomic_try_cmpxchg_acquire
709#endif
710
711#if defined(arch_atomic_try_cmpxchg_release)
712static inline bool
713atomic_try_cmpxchg_release(atomic_t *v, int *old, int new)
714{
715 kasan_check_write(v, sizeof(*v));
716 kasan_check_write(old, sizeof(*old));
717 return arch_atomic_try_cmpxchg_release(v, old, new);
718}
719#define atomic_try_cmpxchg_release atomic_try_cmpxchg_release
720#endif
721
722#if defined(arch_atomic_try_cmpxchg_relaxed)
723static inline bool
724atomic_try_cmpxchg_relaxed(atomic_t *v, int *old, int new)
725{
726 kasan_check_write(v, sizeof(*v));
727 kasan_check_write(old, sizeof(*old));
728 return arch_atomic_try_cmpxchg_relaxed(v, old, new);
729}
730#define atomic_try_cmpxchg_relaxed atomic_try_cmpxchg_relaxed
731#endif
732
733#if defined(arch_atomic_sub_and_test)
734static inline bool
735atomic_sub_and_test(int i, atomic_t *v)
736{
737 kasan_check_write(v, sizeof(*v));
738 return arch_atomic_sub_and_test(i, v);
739}
740#define atomic_sub_and_test atomic_sub_and_test
741#endif
742
743#if defined(arch_atomic_dec_and_test)
744static inline bool
745atomic_dec_and_test(atomic_t *v)
746{
747 kasan_check_write(v, sizeof(*v));
748 return arch_atomic_dec_and_test(v);
749}
750#define atomic_dec_and_test atomic_dec_and_test
751#endif
752
753#if defined(arch_atomic_inc_and_test)
754static inline bool
755atomic_inc_and_test(atomic_t *v)
756{
757 kasan_check_write(v, sizeof(*v));
758 return arch_atomic_inc_and_test(v);
759}
760#define atomic_inc_and_test atomic_inc_and_test
761#endif
762
763#if defined(arch_atomic_add_negative)
764static inline bool
765atomic_add_negative(int i, atomic_t *v)
766{
767 kasan_check_write(v, sizeof(*v));
768 return arch_atomic_add_negative(i, v);
769}
770#define atomic_add_negative atomic_add_negative
771#endif
772
773#if defined(arch_atomic_fetch_add_unless)
774static inline int
775atomic_fetch_add_unless(atomic_t *v, int a, int u)
776{
777 kasan_check_write(v, sizeof(*v));
778 return arch_atomic_fetch_add_unless(v, a, u);
779}
780#define atomic_fetch_add_unless atomic_fetch_add_unless
781#endif
782
783#if defined(arch_atomic_add_unless)
784static inline bool
785atomic_add_unless(atomic_t *v, int a, int u)
786{
787 kasan_check_write(v, sizeof(*v));
788 return arch_atomic_add_unless(v, a, u);
789}
790#define atomic_add_unless atomic_add_unless
791#endif
792
793#if defined(arch_atomic_inc_not_zero)
794static inline bool
795atomic_inc_not_zero(atomic_t *v)
796{
797 kasan_check_write(v, sizeof(*v));
798 return arch_atomic_inc_not_zero(v);
799}
800#define atomic_inc_not_zero atomic_inc_not_zero
801#endif
802
803#if defined(arch_atomic_inc_unless_negative)
804static inline bool
805atomic_inc_unless_negative(atomic_t *v)
806{
807 kasan_check_write(v, sizeof(*v));
808 return arch_atomic_inc_unless_negative(v);
809}
810#define atomic_inc_unless_negative atomic_inc_unless_negative
811#endif
812
813#if defined(arch_atomic_dec_unless_positive)
814static inline bool
815atomic_dec_unless_positive(atomic_t *v)
816{
817 kasan_check_write(v, sizeof(*v));
818 return arch_atomic_dec_unless_positive(v);
819}
820#define atomic_dec_unless_positive atomic_dec_unless_positive
821#endif
822
823#if defined(arch_atomic_dec_if_positive)
824static inline int
825atomic_dec_if_positive(atomic_t *v)
826{
827 kasan_check_write(v, sizeof(*v));
828 return arch_atomic_dec_if_positive(v);
829}
830#define atomic_dec_if_positive atomic_dec_if_positive
831#endif
832
833static inline s64
834atomic64_read(const atomic64_t *v)
835{
836 kasan_check_read(v, sizeof(*v));
837 return arch_atomic64_read(v);
838}
839#define atomic64_read atomic64_read
840
841#if defined(arch_atomic64_read_acquire)
842static inline s64
843atomic64_read_acquire(const atomic64_t *v)
844{
845 kasan_check_read(v, sizeof(*v));
846 return arch_atomic64_read_acquire(v);
847}
848#define atomic64_read_acquire atomic64_read_acquire
849#endif
850
851static inline void
852atomic64_set(atomic64_t *v, s64 i)
853{
854 kasan_check_write(v, sizeof(*v));
855 arch_atomic64_set(v, i);
856}
857#define atomic64_set atomic64_set
858
859#if defined(arch_atomic64_set_release)
860static inline void
861atomic64_set_release(atomic64_t *v, s64 i)
862{
863 kasan_check_write(v, sizeof(*v));
864 arch_atomic64_set_release(v, i);
865}
866#define atomic64_set_release atomic64_set_release
867#endif
868
869static inline void
870atomic64_add(s64 i, atomic64_t *v)
871{
872 kasan_check_write(v, sizeof(*v));
873 arch_atomic64_add(i, v);
874}
875#define atomic64_add atomic64_add
876
877#if !defined(arch_atomic64_add_return_relaxed) || defined(arch_atomic64_add_return)
878static inline s64
879atomic64_add_return(s64 i, atomic64_t *v)
880{
881 kasan_check_write(v, sizeof(*v));
882 return arch_atomic64_add_return(i, v);
883}
884#define atomic64_add_return atomic64_add_return
885#endif
886
887#if defined(arch_atomic64_add_return_acquire)
888static inline s64
889atomic64_add_return_acquire(s64 i, atomic64_t *v)
890{
891 kasan_check_write(v, sizeof(*v));
892 return arch_atomic64_add_return_acquire(i, v);
893}
894#define atomic64_add_return_acquire atomic64_add_return_acquire
895#endif
896
897#if defined(arch_atomic64_add_return_release)
898static inline s64
899atomic64_add_return_release(s64 i, atomic64_t *v)
900{
901 kasan_check_write(v, sizeof(*v));
902 return arch_atomic64_add_return_release(i, v);
903}
904#define atomic64_add_return_release atomic64_add_return_release
905#endif
906
907#if defined(arch_atomic64_add_return_relaxed)
908static inline s64
909atomic64_add_return_relaxed(s64 i, atomic64_t *v)
910{
911 kasan_check_write(v, sizeof(*v));
912 return arch_atomic64_add_return_relaxed(i, v);
913}
914#define atomic64_add_return_relaxed atomic64_add_return_relaxed
915#endif
916
917#if !defined(arch_atomic64_fetch_add_relaxed) || defined(arch_atomic64_fetch_add)
918static inline s64
919atomic64_fetch_add(s64 i, atomic64_t *v)
920{
921 kasan_check_write(v, sizeof(*v));
922 return arch_atomic64_fetch_add(i, v);
923}
924#define atomic64_fetch_add atomic64_fetch_add
925#endif
926
927#if defined(arch_atomic64_fetch_add_acquire)
928static inline s64
929atomic64_fetch_add_acquire(s64 i, atomic64_t *v)
930{
931 kasan_check_write(v, sizeof(*v));
932 return arch_atomic64_fetch_add_acquire(i, v);
933}
934#define atomic64_fetch_add_acquire atomic64_fetch_add_acquire
935#endif
936
937#if defined(arch_atomic64_fetch_add_release)
938static inline s64
939atomic64_fetch_add_release(s64 i, atomic64_t *v)
940{
941 kasan_check_write(v, sizeof(*v));
942 return arch_atomic64_fetch_add_release(i, v);
943}
944#define atomic64_fetch_add_release atomic64_fetch_add_release
945#endif
946
947#if defined(arch_atomic64_fetch_add_relaxed)
948static inline s64
949atomic64_fetch_add_relaxed(s64 i, atomic64_t *v)
950{
951 kasan_check_write(v, sizeof(*v));
952 return arch_atomic64_fetch_add_relaxed(i, v);
953}
954#define atomic64_fetch_add_relaxed atomic64_fetch_add_relaxed
955#endif
956
957static inline void
958atomic64_sub(s64 i, atomic64_t *v)
959{
960 kasan_check_write(v, sizeof(*v));
961 arch_atomic64_sub(i, v);
962}
963#define atomic64_sub atomic64_sub
ac605bee 964
aa525d06
MR
965#if !defined(arch_atomic64_sub_return_relaxed) || defined(arch_atomic64_sub_return)
966static inline s64
967atomic64_sub_return(s64 i, atomic64_t *v)
968{
969 kasan_check_write(v, sizeof(*v));
970 return arch_atomic64_sub_return(i, v);
971}
972#define atomic64_sub_return atomic64_sub_return
973#endif
b06ed71a 974
aa525d06
MR
975#if defined(arch_atomic64_sub_return_acquire)
976static inline s64
977atomic64_sub_return_acquire(s64 i, atomic64_t *v)
978{
979 kasan_check_write(v, sizeof(*v));
980 return arch_atomic64_sub_return_acquire(i, v);
981}
982#define atomic64_sub_return_acquire atomic64_sub_return_acquire
983#endif
b06ed71a 984
aa525d06
MR
985#if defined(arch_atomic64_sub_return_release)
986static inline s64
987atomic64_sub_return_release(s64 i, atomic64_t *v)
b06ed71a 988{
aa525d06
MR
989 kasan_check_write(v, sizeof(*v));
990 return arch_atomic64_sub_return_release(i, v);
b06ed71a 991}
aa525d06
MR
992#define atomic64_sub_return_release atomic64_sub_return_release
993#endif
b06ed71a 994
aa525d06
MR
995#if defined(arch_atomic64_sub_return_relaxed)
996static inline s64
997atomic64_sub_return_relaxed(s64 i, atomic64_t *v)
b06ed71a 998{
aa525d06
MR
999 kasan_check_write(v, sizeof(*v));
1000 return arch_atomic64_sub_return_relaxed(i, v);
b06ed71a 1001}
aa525d06
MR
1002#define atomic64_sub_return_relaxed atomic64_sub_return_relaxed
1003#endif
b06ed71a 1004
aa525d06
MR
1005#if !defined(arch_atomic64_fetch_sub_relaxed) || defined(arch_atomic64_fetch_sub)
1006static inline s64
1007atomic64_fetch_sub(s64 i, atomic64_t *v)
b06ed71a 1008{
a35353bb 1009 kasan_check_write(v, sizeof(*v));
aa525d06 1010 return arch_atomic64_fetch_sub(i, v);
b06ed71a 1011}
aa525d06
MR
1012#define atomic64_fetch_sub atomic64_fetch_sub
1013#endif
b06ed71a 1014
aa525d06
MR
1015#if defined(arch_atomic64_fetch_sub_acquire)
1016static inline s64
1017atomic64_fetch_sub_acquire(s64 i, atomic64_t *v)
b06ed71a 1018{
a35353bb 1019 kasan_check_write(v, sizeof(*v));
aa525d06 1020 return arch_atomic64_fetch_sub_acquire(i, v);
b06ed71a 1021}
aa525d06
MR
1022#define atomic64_fetch_sub_acquire atomic64_fetch_sub_acquire
1023#endif
b06ed71a 1024
aa525d06
MR
1025#if defined(arch_atomic64_fetch_sub_release)
1026static inline s64
1027atomic64_fetch_sub_release(s64 i, atomic64_t *v)
b06ed71a 1028{
a35353bb 1029 kasan_check_write(v, sizeof(*v));
aa525d06 1030 return arch_atomic64_fetch_sub_release(i, v);
b06ed71a 1031}
aa525d06
MR
1032#define atomic64_fetch_sub_release atomic64_fetch_sub_release
1033#endif
b06ed71a 1034
aa525d06
MR
1035#if defined(arch_atomic64_fetch_sub_relaxed)
1036static inline s64
1037atomic64_fetch_sub_relaxed(s64 i, atomic64_t *v)
b06ed71a 1038{
a35353bb 1039 kasan_check_write(v, sizeof(*v));
aa525d06 1040 return arch_atomic64_fetch_sub_relaxed(i, v);
b06ed71a 1041}
aa525d06
MR
1042#define atomic64_fetch_sub_relaxed atomic64_fetch_sub_relaxed
1043#endif
b06ed71a 1044
aa525d06
MR
1045#if defined(arch_atomic64_inc)
1046static inline void
1047atomic64_inc(atomic64_t *v)
b06ed71a 1048{
a35353bb 1049 kasan_check_write(v, sizeof(*v));
aa525d06 1050 arch_atomic64_inc(v);
b06ed71a 1051}
aa525d06
MR
1052#define atomic64_inc atomic64_inc
1053#endif
b06ed71a 1054
aa525d06
MR
1055#if defined(arch_atomic64_inc_return)
1056static inline s64
1057atomic64_inc_return(atomic64_t *v)
b06ed71a 1058{
a35353bb 1059 kasan_check_write(v, sizeof(*v));
aa525d06 1060 return arch_atomic64_inc_return(v);
b06ed71a 1061}
aa525d06
MR
1062#define atomic64_inc_return atomic64_inc_return
1063#endif
b06ed71a 1064
aa525d06
MR
1065#if defined(arch_atomic64_inc_return_acquire)
1066static inline s64
1067atomic64_inc_return_acquire(atomic64_t *v)
b06ed71a 1068{
a35353bb 1069 kasan_check_write(v, sizeof(*v));
aa525d06 1070 return arch_atomic64_inc_return_acquire(v);
b06ed71a 1071}
aa525d06 1072#define atomic64_inc_return_acquire atomic64_inc_return_acquire
b06ed71a
DV
1073#endif
1074
aa525d06
MR
1075#if defined(arch_atomic64_inc_return_release)
1076static inline s64
1077atomic64_inc_return_release(atomic64_t *v)
b06ed71a 1078{
a35353bb 1079 kasan_check_write(v, sizeof(*v));
aa525d06 1080 return arch_atomic64_inc_return_release(v);
b06ed71a 1081}
aa525d06 1082#define atomic64_inc_return_release atomic64_inc_return_release
b06ed71a
DV
1083#endif
1084
aa525d06
MR
1085#if defined(arch_atomic64_inc_return_relaxed)
1086static inline s64
1087atomic64_inc_return_relaxed(atomic64_t *v)
b06ed71a 1088{
a35353bb 1089 kasan_check_write(v, sizeof(*v));
aa525d06 1090 return arch_atomic64_inc_return_relaxed(v);
b06ed71a 1091}
aa525d06 1092#define atomic64_inc_return_relaxed atomic64_inc_return_relaxed
eccc2da8 1093#endif
b06ed71a 1094
aa525d06
MR
1095#if defined(arch_atomic64_fetch_inc)
1096static inline s64
1097atomic64_fetch_inc(atomic64_t *v)
0ae1d994
MR
1098{
1099 kasan_check_write(v, sizeof(*v));
aa525d06 1100 return arch_atomic64_fetch_inc(v);
0ae1d994 1101}
aa525d06 1102#define atomic64_fetch_inc atomic64_fetch_inc
0ae1d994 1103#endif
b06ed71a 1104
aa525d06
MR
1105#if defined(arch_atomic64_fetch_inc_acquire)
1106static inline s64
1107atomic64_fetch_inc_acquire(atomic64_t *v)
b06ed71a 1108{
a35353bb 1109 kasan_check_write(v, sizeof(*v));
aa525d06 1110 return arch_atomic64_fetch_inc_acquire(v);
b06ed71a 1111}
aa525d06 1112#define atomic64_fetch_inc_acquire atomic64_fetch_inc_acquire
9837559d 1113#endif
b06ed71a 1114
aa525d06
MR
1115#if defined(arch_atomic64_fetch_inc_release)
1116static inline s64
1117atomic64_fetch_inc_release(atomic64_t *v)
b06ed71a 1118{
a35353bb 1119 kasan_check_write(v, sizeof(*v));
aa525d06 1120 return arch_atomic64_fetch_inc_release(v);
b06ed71a 1121}
aa525d06 1122#define atomic64_fetch_inc_release atomic64_fetch_inc_release
9837559d 1123#endif
b06ed71a 1124
aa525d06
MR
1125#if defined(arch_atomic64_fetch_inc_relaxed)
1126static inline s64
1127atomic64_fetch_inc_relaxed(atomic64_t *v)
b06ed71a 1128{
a35353bb 1129 kasan_check_write(v, sizeof(*v));
aa525d06 1130 return arch_atomic64_fetch_inc_relaxed(v);
b06ed71a 1131}
aa525d06 1132#define atomic64_fetch_inc_relaxed atomic64_fetch_inc_relaxed
9837559d 1133#endif
b06ed71a 1134
aa525d06
MR
1135#if defined(arch_atomic64_dec)
1136static inline void
1137atomic64_dec(atomic64_t *v)
b06ed71a 1138{
a35353bb 1139 kasan_check_write(v, sizeof(*v));
b06ed71a
DV
1140 arch_atomic64_dec(v);
1141}
aa525d06 1142#define atomic64_dec atomic64_dec
9837559d 1143#endif
b06ed71a 1144
aa525d06
MR
1145#if defined(arch_atomic64_dec_return)
1146static inline s64
1147atomic64_dec_return(atomic64_t *v)
b06ed71a 1148{
a35353bb 1149 kasan_check_write(v, sizeof(*v));
aa525d06 1150 return arch_atomic64_dec_return(v);
b06ed71a 1151}
aa525d06
MR
1152#define atomic64_dec_return atomic64_dec_return
1153#endif
b06ed71a 1154
aa525d06
MR
1155#if defined(arch_atomic64_dec_return_acquire)
1156static inline s64
1157atomic64_dec_return_acquire(atomic64_t *v)
b06ed71a 1158{
a35353bb 1159 kasan_check_write(v, sizeof(*v));
aa525d06 1160 return arch_atomic64_dec_return_acquire(v);
b06ed71a 1161}
aa525d06
MR
1162#define atomic64_dec_return_acquire atomic64_dec_return_acquire
1163#endif
b06ed71a 1164
aa525d06
MR
1165#if defined(arch_atomic64_dec_return_release)
1166static inline s64
1167atomic64_dec_return_release(atomic64_t *v)
b06ed71a 1168{
a35353bb 1169 kasan_check_write(v, sizeof(*v));
aa525d06 1170 return arch_atomic64_dec_return_release(v);
b06ed71a 1171}
aa525d06
MR
1172#define atomic64_dec_return_release atomic64_dec_return_release
1173#endif
b06ed71a 1174
aa525d06
MR
1175#if defined(arch_atomic64_dec_return_relaxed)
1176static inline s64
1177atomic64_dec_return_relaxed(atomic64_t *v)
b06ed71a 1178{
a35353bb 1179 kasan_check_write(v, sizeof(*v));
aa525d06 1180 return arch_atomic64_dec_return_relaxed(v);
b06ed71a 1181}
aa525d06
MR
1182#define atomic64_dec_return_relaxed atomic64_dec_return_relaxed
1183#endif
b06ed71a 1184
aa525d06
MR
1185#if defined(arch_atomic64_fetch_dec)
1186static inline s64
1187atomic64_fetch_dec(atomic64_t *v)
b06ed71a 1188{
a35353bb 1189 kasan_check_write(v, sizeof(*v));
aa525d06
MR
1190 return arch_atomic64_fetch_dec(v);
1191}
1192#define atomic64_fetch_dec atomic64_fetch_dec
1193#endif
1194
1195#if defined(arch_atomic64_fetch_dec_acquire)
1196static inline s64
1197atomic64_fetch_dec_acquire(atomic64_t *v)
1198{
1199 kasan_check_write(v, sizeof(*v));
1200 return arch_atomic64_fetch_dec_acquire(v);
1201}
1202#define atomic64_fetch_dec_acquire atomic64_fetch_dec_acquire
1203#endif
1204
1205#if defined(arch_atomic64_fetch_dec_release)
1206static inline s64
1207atomic64_fetch_dec_release(atomic64_t *v)
1208{
1209 kasan_check_write(v, sizeof(*v));
1210 return arch_atomic64_fetch_dec_release(v);
b06ed71a 1211}
aa525d06
MR
1212#define atomic64_fetch_dec_release atomic64_fetch_dec_release
1213#endif
1214
1215#if defined(arch_atomic64_fetch_dec_relaxed)
1216static inline s64
1217atomic64_fetch_dec_relaxed(atomic64_t *v)
1218{
1219 kasan_check_write(v, sizeof(*v));
1220 return arch_atomic64_fetch_dec_relaxed(v);
1221}
1222#define atomic64_fetch_dec_relaxed atomic64_fetch_dec_relaxed
1223#endif
b06ed71a 1224
aa525d06
MR
1225static inline void
1226atomic64_and(s64 i, atomic64_t *v)
b06ed71a 1227{
a35353bb 1228 kasan_check_write(v, sizeof(*v));
b06ed71a
DV
1229 arch_atomic64_and(i, v);
1230}
aa525d06 1231#define atomic64_and atomic64_and
b06ed71a 1232
aa525d06
MR
1233#if !defined(arch_atomic64_fetch_and_relaxed) || defined(arch_atomic64_fetch_and)
1234static inline s64
1235atomic64_fetch_and(s64 i, atomic64_t *v)
b06ed71a 1236{
a35353bb 1237 kasan_check_write(v, sizeof(*v));
aa525d06 1238 return arch_atomic64_fetch_and(i, v);
b06ed71a 1239}
aa525d06
MR
1240#define atomic64_fetch_and atomic64_fetch_and
1241#endif
b06ed71a 1242
aa525d06
MR
1243#if defined(arch_atomic64_fetch_and_acquire)
1244static inline s64
1245atomic64_fetch_and_acquire(s64 i, atomic64_t *v)
b06ed71a 1246{
a35353bb 1247 kasan_check_write(v, sizeof(*v));
aa525d06 1248 return arch_atomic64_fetch_and_acquire(i, v);
b06ed71a 1249}
aa525d06
MR
1250#define atomic64_fetch_and_acquire atomic64_fetch_and_acquire
1251#endif
b06ed71a 1252
aa525d06
MR
1253#if defined(arch_atomic64_fetch_and_release)
1254static inline s64
1255atomic64_fetch_and_release(s64 i, atomic64_t *v)
b06ed71a 1256{
a35353bb 1257 kasan_check_write(v, sizeof(*v));
aa525d06 1258 return arch_atomic64_fetch_and_release(i, v);
b06ed71a 1259}
aa525d06
MR
1260#define atomic64_fetch_and_release atomic64_fetch_and_release
1261#endif
b06ed71a 1262
aa525d06
MR
1263#if defined(arch_atomic64_fetch_and_relaxed)
1264static inline s64
1265atomic64_fetch_and_relaxed(s64 i, atomic64_t *v)
b06ed71a 1266{
a35353bb 1267 kasan_check_write(v, sizeof(*v));
aa525d06 1268 return arch_atomic64_fetch_and_relaxed(i, v);
b06ed71a 1269}
aa525d06
MR
1270#define atomic64_fetch_and_relaxed atomic64_fetch_and_relaxed
1271#endif
b06ed71a 1272
aa525d06
MR
1273#if defined(arch_atomic64_andnot)
1274static inline void
1275atomic64_andnot(s64 i, atomic64_t *v)
b06ed71a 1276{
a35353bb 1277 kasan_check_write(v, sizeof(*v));
aa525d06 1278 arch_atomic64_andnot(i, v);
b06ed71a 1279}
aa525d06 1280#define atomic64_andnot atomic64_andnot
9837559d 1281#endif
b06ed71a 1282
aa525d06
MR
1283#if defined(arch_atomic64_fetch_andnot)
1284static inline s64
1285atomic64_fetch_andnot(s64 i, atomic64_t *v)
b06ed71a 1286{
a35353bb 1287 kasan_check_write(v, sizeof(*v));
aa525d06 1288 return arch_atomic64_fetch_andnot(i, v);
b06ed71a 1289}
aa525d06 1290#define atomic64_fetch_andnot atomic64_fetch_andnot
9837559d 1291#endif
b06ed71a 1292
aa525d06
MR
1293#if defined(arch_atomic64_fetch_andnot_acquire)
1294static inline s64
1295atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v)
b06ed71a 1296{
a35353bb 1297 kasan_check_write(v, sizeof(*v));
aa525d06 1298 return arch_atomic64_fetch_andnot_acquire(i, v);
b06ed71a 1299}
aa525d06 1300#define atomic64_fetch_andnot_acquire atomic64_fetch_andnot_acquire
9837559d 1301#endif
b06ed71a 1302
aa525d06
MR
1303#if defined(arch_atomic64_fetch_andnot_release)
1304static inline s64
1305atomic64_fetch_andnot_release(s64 i, atomic64_t *v)
b06ed71a 1306{
a35353bb 1307 kasan_check_write(v, sizeof(*v));
aa525d06 1308 return arch_atomic64_fetch_andnot_release(i, v);
b06ed71a 1309}
aa525d06 1310#define atomic64_fetch_andnot_release atomic64_fetch_andnot_release
9837559d 1311#endif
b06ed71a 1312
aa525d06
MR
1313#if defined(arch_atomic64_fetch_andnot_relaxed)
1314static inline s64
1315atomic64_fetch_andnot_relaxed(s64 i, atomic64_t *v)
b06ed71a 1316{
a35353bb 1317 kasan_check_write(v, sizeof(*v));
aa525d06 1318 return arch_atomic64_fetch_andnot_relaxed(i, v);
b06ed71a 1319}
aa525d06 1320#define atomic64_fetch_andnot_relaxed atomic64_fetch_andnot_relaxed
bef82820 1321#endif
b06ed71a 1322
aa525d06
MR
1323static inline void
1324atomic64_or(s64 i, atomic64_t *v)
b06ed71a 1325{
a35353bb 1326 kasan_check_write(v, sizeof(*v));
aa525d06
MR
1327 arch_atomic64_or(i, v);
1328}
1329#define atomic64_or atomic64_or
1330
1331#if !defined(arch_atomic64_fetch_or_relaxed) || defined(arch_atomic64_fetch_or)
1332static inline s64
1333atomic64_fetch_or(s64 i, atomic64_t *v)
1334{
1335 kasan_check_write(v, sizeof(*v));
1336 return arch_atomic64_fetch_or(i, v);
b06ed71a 1337}
aa525d06 1338#define atomic64_fetch_or atomic64_fetch_or
b3a2a05f 1339#endif
b06ed71a 1340
aa525d06
MR
1341#if defined(arch_atomic64_fetch_or_acquire)
1342static inline s64
1343atomic64_fetch_or_acquire(s64 i, atomic64_t *v)
b06ed71a 1344{
a35353bb 1345 kasan_check_write(v, sizeof(*v));
aa525d06 1346 return arch_atomic64_fetch_or_acquire(i, v);
b06ed71a 1347}
aa525d06 1348#define atomic64_fetch_or_acquire atomic64_fetch_or_acquire
18cc1814 1349#endif
b06ed71a 1350
aa525d06
MR
1351#if defined(arch_atomic64_fetch_or_release)
1352static inline s64
1353atomic64_fetch_or_release(s64 i, atomic64_t *v)
b06ed71a 1354{
a35353bb 1355 kasan_check_write(v, sizeof(*v));
aa525d06 1356 return arch_atomic64_fetch_or_release(i, v);
b06ed71a 1357}
aa525d06 1358#define atomic64_fetch_or_release atomic64_fetch_or_release
18cc1814 1359#endif
b06ed71a 1360
aa525d06
MR
1361#if defined(arch_atomic64_fetch_or_relaxed)
1362static inline s64
1363atomic64_fetch_or_relaxed(s64 i, atomic64_t *v)
b06ed71a 1364{
a35353bb 1365 kasan_check_write(v, sizeof(*v));
aa525d06 1366 return arch_atomic64_fetch_or_relaxed(i, v);
b06ed71a 1367}
aa525d06 1368#define atomic64_fetch_or_relaxed atomic64_fetch_or_relaxed
18cc1814 1369#endif
b06ed71a 1370
aa525d06
MR
1371static inline void
1372atomic64_xor(s64 i, atomic64_t *v)
b06ed71a 1373{
a35353bb 1374 kasan_check_write(v, sizeof(*v));
aa525d06
MR
1375 arch_atomic64_xor(i, v);
1376}
1377#define atomic64_xor atomic64_xor
1378
1379#if !defined(arch_atomic64_fetch_xor_relaxed) || defined(arch_atomic64_fetch_xor)
1380static inline s64
1381atomic64_fetch_xor(s64 i, atomic64_t *v)
1382{
1383 kasan_check_write(v, sizeof(*v));
1384 return arch_atomic64_fetch_xor(i, v);
b06ed71a 1385}
aa525d06 1386#define atomic64_fetch_xor atomic64_fetch_xor
18cc1814 1387#endif
b06ed71a 1388
aa525d06
MR
1389#if defined(arch_atomic64_fetch_xor_acquire)
1390static inline s64
1391atomic64_fetch_xor_acquire(s64 i, atomic64_t *v)
b06ed71a 1392{
a35353bb 1393 kasan_check_write(v, sizeof(*v));
aa525d06 1394 return arch_atomic64_fetch_xor_acquire(i, v);
b06ed71a 1395}
aa525d06
MR
1396#define atomic64_fetch_xor_acquire atomic64_fetch_xor_acquire
1397#endif
b06ed71a 1398
aa525d06
MR
1399#if defined(arch_atomic64_fetch_xor_release)
1400static inline s64
1401atomic64_fetch_xor_release(s64 i, atomic64_t *v)
b06ed71a 1402{
a35353bb 1403 kasan_check_write(v, sizeof(*v));
aa525d06 1404 return arch_atomic64_fetch_xor_release(i, v);
b06ed71a 1405}
aa525d06
MR
1406#define atomic64_fetch_xor_release atomic64_fetch_xor_release
1407#endif
b06ed71a 1408
aa525d06
MR
1409#if defined(arch_atomic64_fetch_xor_relaxed)
1410static inline s64
1411atomic64_fetch_xor_relaxed(s64 i, atomic64_t *v)
b06ed71a 1412{
a35353bb 1413 kasan_check_write(v, sizeof(*v));
aa525d06 1414 return arch_atomic64_fetch_xor_relaxed(i, v);
b06ed71a 1415}
aa525d06
MR
1416#define atomic64_fetch_xor_relaxed atomic64_fetch_xor_relaxed
1417#endif
b06ed71a 1418
aa525d06
MR
1419#if !defined(arch_atomic64_xchg_relaxed) || defined(arch_atomic64_xchg)
1420static inline s64
1421atomic64_xchg(atomic64_t *v, s64 i)
b06ed71a 1422{
a35353bb 1423 kasan_check_write(v, sizeof(*v));
aa525d06 1424 return arch_atomic64_xchg(v, i);
b06ed71a 1425}
aa525d06
MR
1426#define atomic64_xchg atomic64_xchg
1427#endif
b06ed71a 1428
aa525d06
MR
1429#if defined(arch_atomic64_xchg_acquire)
1430static inline s64
1431atomic64_xchg_acquire(atomic64_t *v, s64 i)
b06ed71a 1432{
a35353bb 1433 kasan_check_write(v, sizeof(*v));
aa525d06 1434 return arch_atomic64_xchg_acquire(v, i);
b06ed71a 1435}
aa525d06
MR
1436#define atomic64_xchg_acquire atomic64_xchg_acquire
1437#endif
b06ed71a 1438
aa525d06
MR
1439#if defined(arch_atomic64_xchg_release)
1440static inline s64
1441atomic64_xchg_release(atomic64_t *v, s64 i)
b06ed71a 1442{
a35353bb 1443 kasan_check_write(v, sizeof(*v));
aa525d06 1444 return arch_atomic64_xchg_release(v, i);
b06ed71a 1445}
aa525d06
MR
1446#define atomic64_xchg_release atomic64_xchg_release
1447#endif
b06ed71a 1448
aa525d06
MR
1449#if defined(arch_atomic64_xchg_relaxed)
1450static inline s64
1451atomic64_xchg_relaxed(atomic64_t *v, s64 i)
b06ed71a 1452{
a35353bb 1453 kasan_check_write(v, sizeof(*v));
aa525d06 1454 return arch_atomic64_xchg_relaxed(v, i);
b06ed71a 1455}
aa525d06
MR
1456#define atomic64_xchg_relaxed atomic64_xchg_relaxed
1457#endif
b06ed71a 1458
aa525d06
MR
1459#if !defined(arch_atomic64_cmpxchg_relaxed) || defined(arch_atomic64_cmpxchg)
1460static inline s64
1461atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new)
b06ed71a 1462{
a35353bb 1463 kasan_check_write(v, sizeof(*v));
aa525d06 1464 return arch_atomic64_cmpxchg(v, old, new);
b06ed71a 1465}
aa525d06
MR
1466#define atomic64_cmpxchg atomic64_cmpxchg
1467#endif
b06ed71a 1468
aa525d06
MR
1469#if defined(arch_atomic64_cmpxchg_acquire)
1470static inline s64
1471atomic64_cmpxchg_acquire(atomic64_t *v, s64 old, s64 new)
b06ed71a 1472{
a35353bb 1473 kasan_check_write(v, sizeof(*v));
aa525d06 1474 return arch_atomic64_cmpxchg_acquire(v, old, new);
b06ed71a 1475}
aa525d06
MR
1476#define atomic64_cmpxchg_acquire atomic64_cmpxchg_acquire
1477#endif
b06ed71a 1478
aa525d06
MR
1479#if defined(arch_atomic64_cmpxchg_release)
1480static inline s64
1481atomic64_cmpxchg_release(atomic64_t *v, s64 old, s64 new)
b06ed71a 1482{
a35353bb 1483 kasan_check_write(v, sizeof(*v));
aa525d06 1484 return arch_atomic64_cmpxchg_release(v, old, new);
b06ed71a 1485}
aa525d06
MR
1486#define atomic64_cmpxchg_release atomic64_cmpxchg_release
1487#endif
b06ed71a 1488
aa525d06
MR
1489#if defined(arch_atomic64_cmpxchg_relaxed)
1490static inline s64
1491atomic64_cmpxchg_relaxed(atomic64_t *v, s64 old, s64 new)
b06ed71a 1492{
a35353bb 1493 kasan_check_write(v, sizeof(*v));
aa525d06 1494 return arch_atomic64_cmpxchg_relaxed(v, old, new);
b06ed71a 1495}
aa525d06
MR
1496#define atomic64_cmpxchg_relaxed atomic64_cmpxchg_relaxed
1497#endif
b06ed71a 1498
aa525d06
MR
1499#if defined(arch_atomic64_try_cmpxchg)
1500static inline bool
1501atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
b06ed71a 1502{
a35353bb 1503 kasan_check_write(v, sizeof(*v));
aa525d06
MR
1504 kasan_check_write(old, sizeof(*old));
1505 return arch_atomic64_try_cmpxchg(v, old, new);
b06ed71a 1506}
aa525d06
MR
1507#define atomic64_try_cmpxchg atomic64_try_cmpxchg
1508#endif
b06ed71a 1509
aa525d06
MR
1510#if defined(arch_atomic64_try_cmpxchg_acquire)
1511static inline bool
1512atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new)
b06ed71a 1513{
a35353bb 1514 kasan_check_write(v, sizeof(*v));
aa525d06
MR
1515 kasan_check_write(old, sizeof(*old));
1516 return arch_atomic64_try_cmpxchg_acquire(v, old, new);
b06ed71a 1517}
aa525d06
MR
1518#define atomic64_try_cmpxchg_acquire atomic64_try_cmpxchg_acquire
1519#endif
b06ed71a 1520
aa525d06
MR
1521#if defined(arch_atomic64_try_cmpxchg_release)
1522static inline bool
1523atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new)
b06ed71a 1524{
a35353bb 1525 kasan_check_write(v, sizeof(*v));
aa525d06
MR
1526 kasan_check_write(old, sizeof(*old));
1527 return arch_atomic64_try_cmpxchg_release(v, old, new);
b06ed71a 1528}
aa525d06
MR
1529#define atomic64_try_cmpxchg_release atomic64_try_cmpxchg_release
1530#endif
b06ed71a 1531
aa525d06
MR
1532#if defined(arch_atomic64_try_cmpxchg_relaxed)
1533static inline bool
1534atomic64_try_cmpxchg_relaxed(atomic64_t *v, s64 *old, s64 new)
b06ed71a 1535{
a35353bb 1536 kasan_check_write(v, sizeof(*v));
aa525d06
MR
1537 kasan_check_write(old, sizeof(*old));
1538 return arch_atomic64_try_cmpxchg_relaxed(v, old, new);
b06ed71a 1539}
aa525d06 1540#define atomic64_try_cmpxchg_relaxed atomic64_try_cmpxchg_relaxed
18cc1814 1541#endif
b06ed71a 1542
aa525d06
MR
1543#if defined(arch_atomic64_sub_and_test)
1544static inline bool
1545atomic64_sub_and_test(s64 i, atomic64_t *v)
b06ed71a 1546{
a35353bb 1547 kasan_check_write(v, sizeof(*v));
b06ed71a
DV
1548 return arch_atomic64_sub_and_test(i, v);
1549}
aa525d06 1550#define atomic64_sub_and_test atomic64_sub_and_test
18cc1814 1551#endif
b06ed71a 1552
aa525d06
MR
1553#if defined(arch_atomic64_dec_and_test)
1554static inline bool
1555atomic64_dec_and_test(atomic64_t *v)
b06ed71a 1556{
a35353bb 1557 kasan_check_write(v, sizeof(*v));
aa525d06 1558 return arch_atomic64_dec_and_test(v);
b06ed71a 1559}
aa525d06 1560#define atomic64_dec_and_test atomic64_dec_and_test
18cc1814 1561#endif
b06ed71a 1562
aa525d06
MR
1563#if defined(arch_atomic64_inc_and_test)
1564static inline bool
1565atomic64_inc_and_test(atomic64_t *v)
1566{
1567 kasan_check_write(v, sizeof(*v));
1568 return arch_atomic64_inc_and_test(v);
1569}
1570#define atomic64_inc_and_test atomic64_inc_and_test
1571#endif
1572
1573#if defined(arch_atomic64_add_negative)
1574static inline bool
1575atomic64_add_negative(s64 i, atomic64_t *v)
b06ed71a 1576{
a35353bb 1577 kasan_check_write(v, sizeof(*v));
b06ed71a
DV
1578 return arch_atomic64_add_negative(i, v);
1579}
aa525d06
MR
1580#define atomic64_add_negative atomic64_add_negative
1581#endif
1582
1583#if defined(arch_atomic64_fetch_add_unless)
1584static inline s64
1585atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u)
1586{
1587 kasan_check_write(v, sizeof(*v));
1588 return arch_atomic64_fetch_add_unless(v, a, u);
1589}
1590#define atomic64_fetch_add_unless atomic64_fetch_add_unless
1591#endif
1592
1593#if defined(arch_atomic64_add_unless)
1594static inline bool
1595atomic64_add_unless(atomic64_t *v, s64 a, s64 u)
1596{
1597 kasan_check_write(v, sizeof(*v));
1598 return arch_atomic64_add_unless(v, a, u);
1599}
1600#define atomic64_add_unless atomic64_add_unless
1601#endif
1602
1603#if defined(arch_atomic64_inc_not_zero)
1604static inline bool
1605atomic64_inc_not_zero(atomic64_t *v)
1606{
1607 kasan_check_write(v, sizeof(*v));
1608 return arch_atomic64_inc_not_zero(v);
1609}
1610#define atomic64_inc_not_zero atomic64_inc_not_zero
1611#endif
1612
1613#if defined(arch_atomic64_inc_unless_negative)
1614static inline bool
1615atomic64_inc_unless_negative(atomic64_t *v)
1616{
1617 kasan_check_write(v, sizeof(*v));
1618 return arch_atomic64_inc_unless_negative(v);
1619}
1620#define atomic64_inc_unless_negative atomic64_inc_unless_negative
18cc1814 1621#endif
b06ed71a 1622
aa525d06
MR
1623#if defined(arch_atomic64_dec_unless_positive)
1624static inline bool
1625atomic64_dec_unless_positive(atomic64_t *v)
1626{
1627 kasan_check_write(v, sizeof(*v));
1628 return arch_atomic64_dec_unless_positive(v);
1629}
1630#define atomic64_dec_unless_positive atomic64_dec_unless_positive
1631#endif
1632
1633#if defined(arch_atomic64_dec_if_positive)
1634static inline s64
1635atomic64_dec_if_positive(atomic64_t *v)
1636{
1637 kasan_check_write(v, sizeof(*v));
1638 return arch_atomic64_dec_if_positive(v);
1639}
1640#define atomic64_dec_if_positive atomic64_dec_if_positive
1641#endif
1642
1643#if !defined(arch_xchg_relaxed) || defined(arch_xchg)
1644#define xchg(ptr, ...) \
1645({ \
1646 typeof(ptr) __ai_ptr = (ptr); \
1647 kasan_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
1648 arch_xchg(__ai_ptr, __VA_ARGS__); \
1649})
1650#endif
1651
1652#if defined(arch_xchg_acquire)
1653#define xchg_acquire(ptr, ...) \
1654({ \
1655 typeof(ptr) __ai_ptr = (ptr); \
1656 kasan_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
1657 arch_xchg_acquire(__ai_ptr, __VA_ARGS__); \
1658})
1659#endif
1660
1661#if defined(arch_xchg_release)
1662#define xchg_release(ptr, ...) \
1663({ \
1664 typeof(ptr) __ai_ptr = (ptr); \
1665 kasan_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
1666 arch_xchg_release(__ai_ptr, __VA_ARGS__); \
1667})
1668#endif
1669
1670#if defined(arch_xchg_relaxed)
1671#define xchg_relaxed(ptr, ...) \
1672({ \
1673 typeof(ptr) __ai_ptr = (ptr); \
1674 kasan_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
1675 arch_xchg_relaxed(__ai_ptr, __VA_ARGS__); \
1676})
1677#endif
1678
1679#if !defined(arch_cmpxchg_relaxed) || defined(arch_cmpxchg)
1680#define cmpxchg(ptr, ...) \
f9881cc4
MR
1681({ \
1682 typeof(ptr) __ai_ptr = (ptr); \
aa525d06
MR
1683 kasan_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
1684 arch_cmpxchg(__ai_ptr, __VA_ARGS__); \
f9881cc4 1685})
aa525d06
MR
1686#endif
1687
1688#if defined(arch_cmpxchg_acquire)
1689#define cmpxchg_acquire(ptr, ...) \
1690({ \
1691 typeof(ptr) __ai_ptr = (ptr); \
1692 kasan_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
1693 arch_cmpxchg_acquire(__ai_ptr, __VA_ARGS__); \
1694})
1695#endif
1696
1697#if defined(arch_cmpxchg_release)
1698#define cmpxchg_release(ptr, ...) \
1699({ \
1700 typeof(ptr) __ai_ptr = (ptr); \
1701 kasan_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
1702 arch_cmpxchg_release(__ai_ptr, __VA_ARGS__); \
1703})
1704#endif
1705
1706#if defined(arch_cmpxchg_relaxed)
1707#define cmpxchg_relaxed(ptr, ...) \
1708({ \
1709 typeof(ptr) __ai_ptr = (ptr); \
1710 kasan_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
1711 arch_cmpxchg_relaxed(__ai_ptr, __VA_ARGS__); \
1712})
1713#endif
f9881cc4 1714
aa525d06
MR
1715#if !defined(arch_cmpxchg64_relaxed) || defined(arch_cmpxchg64)
1716#define cmpxchg64(ptr, ...) \
b06ed71a 1717({ \
df79ed2c 1718 typeof(ptr) __ai_ptr = (ptr); \
aa525d06
MR
1719 kasan_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
1720 arch_cmpxchg64(__ai_ptr, __VA_ARGS__); \
b06ed71a 1721})
aa525d06
MR
1722#endif
1723
1724#if defined(arch_cmpxchg64_acquire)
1725#define cmpxchg64_acquire(ptr, ...) \
1726({ \
1727 typeof(ptr) __ai_ptr = (ptr); \
1728 kasan_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
1729 arch_cmpxchg64_acquire(__ai_ptr, __VA_ARGS__); \
1730})
1731#endif
1732
1733#if defined(arch_cmpxchg64_release)
1734#define cmpxchg64_release(ptr, ...) \
1735({ \
1736 typeof(ptr) __ai_ptr = (ptr); \
1737 kasan_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
1738 arch_cmpxchg64_release(__ai_ptr, __VA_ARGS__); \
1739})
1740#endif
b06ed71a 1741
aa525d06
MR
1742#if defined(arch_cmpxchg64_relaxed)
1743#define cmpxchg64_relaxed(ptr, ...) \
b06ed71a 1744({ \
df79ed2c 1745 typeof(ptr) __ai_ptr = (ptr); \
aa525d06
MR
1746 kasan_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
1747 arch_cmpxchg64_relaxed(__ai_ptr, __VA_ARGS__); \
b06ed71a 1748})
aa525d06 1749#endif
b06ed71a 1750
aa525d06 1751#define cmpxchg_local(ptr, ...) \
b06ed71a 1752({ \
df79ed2c 1753 typeof(ptr) __ai_ptr = (ptr); \
aa525d06
MR
1754 kasan_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
1755 arch_cmpxchg_local(__ai_ptr, __VA_ARGS__); \
b06ed71a
DV
1756})
1757
aa525d06 1758#define cmpxchg64_local(ptr, ...) \
b06ed71a 1759({ \
df79ed2c 1760 typeof(ptr) __ai_ptr = (ptr); \
aa525d06
MR
1761 kasan_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
1762 arch_cmpxchg64_local(__ai_ptr, __VA_ARGS__); \
b06ed71a
DV
1763})
1764
aa525d06 1765#define sync_cmpxchg(ptr, ...) \
b06ed71a 1766({ \
df79ed2c 1767 typeof(ptr) __ai_ptr = (ptr); \
aa525d06
MR
1768 kasan_check_write(__ai_ptr, sizeof(*__ai_ptr)); \
1769 arch_sync_cmpxchg(__ai_ptr, __VA_ARGS__); \
b06ed71a
DV
1770})
1771
aa525d06 1772#define cmpxchg_double(ptr, ...) \
b06ed71a 1773({ \
aa525d06
MR
1774 typeof(ptr) __ai_ptr = (ptr); \
1775 kasan_check_write(__ai_ptr, 2 * sizeof(*__ai_ptr)); \
1776 arch_cmpxchg_double(__ai_ptr, __VA_ARGS__); \
b06ed71a
DV
1777})
1778
aa525d06
MR
1779
1780#define cmpxchg_double_local(ptr, ...) \
1781({ \
1782 typeof(ptr) __ai_ptr = (ptr); \
1783 kasan_check_write(__ai_ptr, 2 * sizeof(*__ai_ptr)); \
1784 arch_cmpxchg_double_local(__ai_ptr, __VA_ARGS__); \
b06ed71a
DV
1785})
1786
aa525d06 1787#endif /* _ASM_GENERIC_ATOMIC_INSTRUMENTED_H */
0cf264b3 1788// b29b625d5de9280f680e42c7be859b55b15e5f6a