2 * This file provides wrappers with KASAN instrumentation for atomic operations.
3 * To use this functionality an arch's atomic.h file needs to define all
4 * atomic operations with arch_ prefix (e.g. arch_atomic_read()) and include
5 * this file at the end. This file provides atomic_read() that forwards to
6 * arch_atomic_read() for actual atomic operation.
7 * Note: if an arch atomic operation is implemented by means of other atomic
8 * operations (e.g. atomic_read()/atomic_cmpxchg() loop), then it needs to use
9 * arch_ variants (i.e. arch_atomic_read()/arch_atomic_cmpxchg()) to avoid
10 * double instrumentation.
13 #ifndef _LINUX_ATOMIC_INSTRUMENTED_H
14 #define _LINUX_ATOMIC_INSTRUMENTED_H
16 #include <linux/build_bug.h>
17 #include <linux/kasan-checks.h>
19 static __always_inline int atomic_read(const atomic_t *v)
21 kasan_check_read(v, sizeof(*v));
22 return arch_atomic_read(v);
25 static __always_inline s64 atomic64_read(const atomic64_t *v)
27 kasan_check_read(v, sizeof(*v));
28 return arch_atomic64_read(v);
31 static __always_inline void atomic_set(atomic_t *v, int i)
33 kasan_check_write(v, sizeof(*v));
34 arch_atomic_set(v, i);
37 static __always_inline void atomic64_set(atomic64_t *v, s64 i)
39 kasan_check_write(v, sizeof(*v));
40 arch_atomic64_set(v, i);
43 static __always_inline int atomic_xchg(atomic_t *v, int i)
45 kasan_check_write(v, sizeof(*v));
46 return arch_atomic_xchg(v, i);
49 static __always_inline s64 atomic64_xchg(atomic64_t *v, s64 i)
51 kasan_check_write(v, sizeof(*v));
52 return arch_atomic64_xchg(v, i);
55 static __always_inline int atomic_cmpxchg(atomic_t *v, int old, int new)
57 kasan_check_write(v, sizeof(*v));
58 return arch_atomic_cmpxchg(v, old, new);
61 static __always_inline s64 atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new)
63 kasan_check_write(v, sizeof(*v));
64 return arch_atomic64_cmpxchg(v, old, new);
67 #ifdef arch_atomic_try_cmpxchg
68 #define atomic_try_cmpxchg atomic_try_cmpxchg
69 static __always_inline bool atomic_try_cmpxchg(atomic_t *v, int *old, int new)
71 kasan_check_write(v, sizeof(*v));
72 kasan_check_read(old, sizeof(*old));
73 return arch_atomic_try_cmpxchg(v, old, new);
77 #ifdef arch_atomic64_try_cmpxchg
78 #define atomic64_try_cmpxchg atomic64_try_cmpxchg
79 static __always_inline bool atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
81 kasan_check_write(v, sizeof(*v));
82 kasan_check_read(old, sizeof(*old));
83 return arch_atomic64_try_cmpxchg(v, old, new);
87 static __always_inline int __atomic_add_unless(atomic_t *v, int a, int u)
89 kasan_check_write(v, sizeof(*v));
90 return __arch_atomic_add_unless(v, a, u);
94 static __always_inline bool atomic64_add_unless(atomic64_t *v, s64 a, s64 u)
96 kasan_check_write(v, sizeof(*v));
97 return arch_atomic64_add_unless(v, a, u);
100 static __always_inline void atomic_inc(atomic_t *v)
102 kasan_check_write(v, sizeof(*v));
106 static __always_inline void atomic64_inc(atomic64_t *v)
108 kasan_check_write(v, sizeof(*v));
109 arch_atomic64_inc(v);
112 static __always_inline void atomic_dec(atomic_t *v)
114 kasan_check_write(v, sizeof(*v));
118 static __always_inline void atomic64_dec(atomic64_t *v)
120 kasan_check_write(v, sizeof(*v));
121 arch_atomic64_dec(v);
124 static __always_inline void atomic_add(int i, atomic_t *v)
126 kasan_check_write(v, sizeof(*v));
127 arch_atomic_add(i, v);
130 static __always_inline void atomic64_add(s64 i, atomic64_t *v)
132 kasan_check_write(v, sizeof(*v));
133 arch_atomic64_add(i, v);
136 static __always_inline void atomic_sub(int i, atomic_t *v)
138 kasan_check_write(v, sizeof(*v));
139 arch_atomic_sub(i, v);
142 static __always_inline void atomic64_sub(s64 i, atomic64_t *v)
144 kasan_check_write(v, sizeof(*v));
145 arch_atomic64_sub(i, v);
148 static __always_inline void atomic_and(int i, atomic_t *v)
150 kasan_check_write(v, sizeof(*v));
151 arch_atomic_and(i, v);
154 static __always_inline void atomic64_and(s64 i, atomic64_t *v)
156 kasan_check_write(v, sizeof(*v));
157 arch_atomic64_and(i, v);
160 static __always_inline void atomic_or(int i, atomic_t *v)
162 kasan_check_write(v, sizeof(*v));
163 arch_atomic_or(i, v);
166 static __always_inline void atomic64_or(s64 i, atomic64_t *v)
168 kasan_check_write(v, sizeof(*v));
169 arch_atomic64_or(i, v);
172 static __always_inline void atomic_xor(int i, atomic_t *v)
174 kasan_check_write(v, sizeof(*v));
175 arch_atomic_xor(i, v);
178 static __always_inline void atomic64_xor(s64 i, atomic64_t *v)
180 kasan_check_write(v, sizeof(*v));
181 arch_atomic64_xor(i, v);
184 static __always_inline int atomic_inc_return(atomic_t *v)
186 kasan_check_write(v, sizeof(*v));
187 return arch_atomic_inc_return(v);
190 static __always_inline s64 atomic64_inc_return(atomic64_t *v)
192 kasan_check_write(v, sizeof(*v));
193 return arch_atomic64_inc_return(v);
196 static __always_inline int atomic_dec_return(atomic_t *v)
198 kasan_check_write(v, sizeof(*v));
199 return arch_atomic_dec_return(v);
202 static __always_inline s64 atomic64_dec_return(atomic64_t *v)
204 kasan_check_write(v, sizeof(*v));
205 return arch_atomic64_dec_return(v);
208 static __always_inline s64 atomic64_inc_not_zero(atomic64_t *v)
210 kasan_check_write(v, sizeof(*v));
211 return arch_atomic64_inc_not_zero(v);
214 static __always_inline s64 atomic64_dec_if_positive(atomic64_t *v)
216 kasan_check_write(v, sizeof(*v));
217 return arch_atomic64_dec_if_positive(v);
220 static __always_inline bool atomic_dec_and_test(atomic_t *v)
222 kasan_check_write(v, sizeof(*v));
223 return arch_atomic_dec_and_test(v);
226 static __always_inline bool atomic64_dec_and_test(atomic64_t *v)
228 kasan_check_write(v, sizeof(*v));
229 return arch_atomic64_dec_and_test(v);
232 static __always_inline bool atomic_inc_and_test(atomic_t *v)
234 kasan_check_write(v, sizeof(*v));
235 return arch_atomic_inc_and_test(v);
238 static __always_inline bool atomic64_inc_and_test(atomic64_t *v)
240 kasan_check_write(v, sizeof(*v));
241 return arch_atomic64_inc_and_test(v);
244 static __always_inline int atomic_add_return(int i, atomic_t *v)
246 kasan_check_write(v, sizeof(*v));
247 return arch_atomic_add_return(i, v);
250 static __always_inline s64 atomic64_add_return(s64 i, atomic64_t *v)
252 kasan_check_write(v, sizeof(*v));
253 return arch_atomic64_add_return(i, v);
256 static __always_inline int atomic_sub_return(int i, atomic_t *v)
258 kasan_check_write(v, sizeof(*v));
259 return arch_atomic_sub_return(i, v);
262 static __always_inline s64 atomic64_sub_return(s64 i, atomic64_t *v)
264 kasan_check_write(v, sizeof(*v));
265 return arch_atomic64_sub_return(i, v);
268 static __always_inline int atomic_fetch_add(int i, atomic_t *v)
270 kasan_check_write(v, sizeof(*v));
271 return arch_atomic_fetch_add(i, v);
274 static __always_inline s64 atomic64_fetch_add(s64 i, atomic64_t *v)
276 kasan_check_write(v, sizeof(*v));
277 return arch_atomic64_fetch_add(i, v);
280 static __always_inline int atomic_fetch_sub(int i, atomic_t *v)
282 kasan_check_write(v, sizeof(*v));
283 return arch_atomic_fetch_sub(i, v);
286 static __always_inline s64 atomic64_fetch_sub(s64 i, atomic64_t *v)
288 kasan_check_write(v, sizeof(*v));
289 return arch_atomic64_fetch_sub(i, v);
292 static __always_inline int atomic_fetch_and(int i, atomic_t *v)
294 kasan_check_write(v, sizeof(*v));
295 return arch_atomic_fetch_and(i, v);
298 static __always_inline s64 atomic64_fetch_and(s64 i, atomic64_t *v)
300 kasan_check_write(v, sizeof(*v));
301 return arch_atomic64_fetch_and(i, v);
304 static __always_inline int atomic_fetch_or(int i, atomic_t *v)
306 kasan_check_write(v, sizeof(*v));
307 return arch_atomic_fetch_or(i, v);
310 static __always_inline s64 atomic64_fetch_or(s64 i, atomic64_t *v)
312 kasan_check_write(v, sizeof(*v));
313 return arch_atomic64_fetch_or(i, v);
316 static __always_inline int atomic_fetch_xor(int i, atomic_t *v)
318 kasan_check_write(v, sizeof(*v));
319 return arch_atomic_fetch_xor(i, v);
322 static __always_inline s64 atomic64_fetch_xor(s64 i, atomic64_t *v)
324 kasan_check_write(v, sizeof(*v));
325 return arch_atomic64_fetch_xor(i, v);
328 static __always_inline bool atomic_sub_and_test(int i, atomic_t *v)
330 kasan_check_write(v, sizeof(*v));
331 return arch_atomic_sub_and_test(i, v);
334 static __always_inline bool atomic64_sub_and_test(s64 i, atomic64_t *v)
336 kasan_check_write(v, sizeof(*v));
337 return arch_atomic64_sub_and_test(i, v);
340 static __always_inline bool atomic_add_negative(int i, atomic_t *v)
342 kasan_check_write(v, sizeof(*v));
343 return arch_atomic_add_negative(i, v);
346 static __always_inline bool atomic64_add_negative(s64 i, atomic64_t *v)
348 kasan_check_write(v, sizeof(*v));
349 return arch_atomic64_add_negative(i, v);
352 static __always_inline unsigned long
353 cmpxchg_size(volatile void *ptr, unsigned long old, unsigned long new, int size)
355 kasan_check_write(ptr, size);
358 return arch_cmpxchg((u8 *)ptr, (u8)old, (u8)new);
360 return arch_cmpxchg((u16 *)ptr, (u16)old, (u16)new);
362 return arch_cmpxchg((u32 *)ptr, (u32)old, (u32)new);
364 BUILD_BUG_ON(sizeof(unsigned long) != 8);
365 return arch_cmpxchg((u64 *)ptr, (u64)old, (u64)new);
371 #define cmpxchg(ptr, old, new) \
373 ((__typeof__(*(ptr)))cmpxchg_size((ptr), (unsigned long)(old), \
374 (unsigned long)(new), sizeof(*(ptr)))); \
377 static __always_inline unsigned long
378 sync_cmpxchg_size(volatile void *ptr, unsigned long old, unsigned long new,
381 kasan_check_write(ptr, size);
384 return arch_sync_cmpxchg((u8 *)ptr, (u8)old, (u8)new);
386 return arch_sync_cmpxchg((u16 *)ptr, (u16)old, (u16)new);
388 return arch_sync_cmpxchg((u32 *)ptr, (u32)old, (u32)new);
390 BUILD_BUG_ON(sizeof(unsigned long) != 8);
391 return arch_sync_cmpxchg((u64 *)ptr, (u64)old, (u64)new);
397 #define sync_cmpxchg(ptr, old, new) \
399 ((__typeof__(*(ptr)))sync_cmpxchg_size((ptr), \
400 (unsigned long)(old), (unsigned long)(new), \
404 static __always_inline unsigned long
405 cmpxchg_local_size(volatile void *ptr, unsigned long old, unsigned long new,
408 kasan_check_write(ptr, size);
411 return arch_cmpxchg_local((u8 *)ptr, (u8)old, (u8)new);
413 return arch_cmpxchg_local((u16 *)ptr, (u16)old, (u16)new);
415 return arch_cmpxchg_local((u32 *)ptr, (u32)old, (u32)new);
417 BUILD_BUG_ON(sizeof(unsigned long) != 8);
418 return arch_cmpxchg_local((u64 *)ptr, (u64)old, (u64)new);
424 #define cmpxchg_local(ptr, old, new) \
426 ((__typeof__(*(ptr)))cmpxchg_local_size((ptr), \
427 (unsigned long)(old), (unsigned long)(new), \
431 static __always_inline u64
432 cmpxchg64_size(volatile u64 *ptr, u64 old, u64 new)
434 kasan_check_write(ptr, sizeof(*ptr));
435 return arch_cmpxchg64(ptr, old, new);
438 #define cmpxchg64(ptr, old, new) \
440 ((__typeof__(*(ptr)))cmpxchg64_size((ptr), (u64)(old), \
444 static __always_inline u64
445 cmpxchg64_local_size(volatile u64 *ptr, u64 old, u64 new)
447 kasan_check_write(ptr, sizeof(*ptr));
448 return arch_cmpxchg64_local(ptr, old, new);
451 #define cmpxchg64_local(ptr, old, new) \
453 ((__typeof__(*(ptr)))cmpxchg64_local_size((ptr), (u64)(old), \
458 * Originally we had the following code here:
459 * __typeof__(p1) ____p1 = (p1);
460 * kasan_check_write(____p1, 2 * sizeof(*____p1));
461 * arch_cmpxchg_double(____p1, (p2), (o1), (o2), (n1), (n2));
462 * But it leads to compilation failures (see gcc issue 72873).
463 * So for now it's left non-instrumented.
464 * There are few callers of cmpxchg_double(), so it's not critical.
466 #define cmpxchg_double(p1, p2, o1, o2, n1, n2) \
468 arch_cmpxchg_double((p1), (p2), (o1), (o2), (n1), (n2)); \
471 #define cmpxchg_double_local(p1, p2, o1, o2, n1, n2) \
473 arch_cmpxchg_double_local((p1), (p2), (o1), (o2), (n1), (n2)); \
476 #endif /* _LINUX_ATOMIC_INSTRUMENTED_H */