Commit | Line | Data |
---|---|---|
ac605bee DV |
1 | /* |
2 | * This file provides wrappers with KASAN instrumentation for atomic operations. | |
3 | * To use this functionality an arch's atomic.h file needs to define all | |
4 | * atomic operations with arch_ prefix (e.g. arch_atomic_read()) and include | |
5 | * this file at the end. This file provides atomic_read() that forwards to | |
6 | * arch_atomic_read() for actual atomic operation. | |
7 | * Note: if an arch atomic operation is implemented by means of other atomic | |
8 | * operations (e.g. atomic_read()/atomic_cmpxchg() loop), then it needs to use | |
9 | * arch_ variants (i.e. arch_atomic_read()/arch_atomic_cmpxchg()) to avoid | |
10 | * double instrumentation. | |
11 | */ | |
12 | ||
b06ed71a DV |
13 | #ifndef _LINUX_ATOMIC_INSTRUMENTED_H |
14 | #define _LINUX_ATOMIC_INSTRUMENTED_H | |
15 | ||
16 | #include <linux/build_bug.h> | |
a35353bb | 17 | #include <linux/kasan-checks.h> |
b06ed71a DV |
18 | |
19 | static __always_inline int atomic_read(const atomic_t *v) | |
20 | { | |
a35353bb | 21 | kasan_check_read(v, sizeof(*v)); |
b06ed71a DV |
22 | return arch_atomic_read(v); |
23 | } | |
24 | ||
25 | static __always_inline s64 atomic64_read(const atomic64_t *v) | |
26 | { | |
a35353bb | 27 | kasan_check_read(v, sizeof(*v)); |
b06ed71a DV |
28 | return arch_atomic64_read(v); |
29 | } | |
30 | ||
31 | static __always_inline void atomic_set(atomic_t *v, int i) | |
32 | { | |
a35353bb | 33 | kasan_check_write(v, sizeof(*v)); |
b06ed71a DV |
34 | arch_atomic_set(v, i); |
35 | } | |
36 | ||
37 | static __always_inline void atomic64_set(atomic64_t *v, s64 i) | |
38 | { | |
a35353bb | 39 | kasan_check_write(v, sizeof(*v)); |
b06ed71a DV |
40 | arch_atomic64_set(v, i); |
41 | } | |
42 | ||
43 | static __always_inline int atomic_xchg(atomic_t *v, int i) | |
44 | { | |
a35353bb | 45 | kasan_check_write(v, sizeof(*v)); |
b06ed71a DV |
46 | return arch_atomic_xchg(v, i); |
47 | } | |
48 | ||
49 | static __always_inline s64 atomic64_xchg(atomic64_t *v, s64 i) | |
50 | { | |
a35353bb | 51 | kasan_check_write(v, sizeof(*v)); |
b06ed71a DV |
52 | return arch_atomic64_xchg(v, i); |
53 | } | |
54 | ||
55 | static __always_inline int atomic_cmpxchg(atomic_t *v, int old, int new) | |
56 | { | |
a35353bb | 57 | kasan_check_write(v, sizeof(*v)); |
b06ed71a DV |
58 | return arch_atomic_cmpxchg(v, old, new); |
59 | } | |
60 | ||
61 | static __always_inline s64 atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new) | |
62 | { | |
a35353bb | 63 | kasan_check_write(v, sizeof(*v)); |
b06ed71a DV |
64 | return arch_atomic64_cmpxchg(v, old, new); |
65 | } | |
66 | ||
67 | #ifdef arch_atomic_try_cmpxchg | |
68 | #define atomic_try_cmpxchg atomic_try_cmpxchg | |
69 | static __always_inline bool atomic_try_cmpxchg(atomic_t *v, int *old, int new) | |
70 | { | |
a35353bb DV |
71 | kasan_check_write(v, sizeof(*v)); |
72 | kasan_check_read(old, sizeof(*old)); | |
b06ed71a DV |
73 | return arch_atomic_try_cmpxchg(v, old, new); |
74 | } | |
75 | #endif | |
76 | ||
77 | #ifdef arch_atomic64_try_cmpxchg | |
78 | #define atomic64_try_cmpxchg atomic64_try_cmpxchg | |
79 | static __always_inline bool atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new) | |
80 | { | |
a35353bb DV |
81 | kasan_check_write(v, sizeof(*v)); |
82 | kasan_check_read(old, sizeof(*old)); | |
b06ed71a DV |
83 | return arch_atomic64_try_cmpxchg(v, old, new); |
84 | } | |
85 | #endif | |
86 | ||
87 | static __always_inline int __atomic_add_unless(atomic_t *v, int a, int u) | |
88 | { | |
a35353bb | 89 | kasan_check_write(v, sizeof(*v)); |
b06ed71a DV |
90 | return __arch_atomic_add_unless(v, a, u); |
91 | } | |
92 | ||
93 | ||
94 | static __always_inline bool atomic64_add_unless(atomic64_t *v, s64 a, s64 u) | |
95 | { | |
a35353bb | 96 | kasan_check_write(v, sizeof(*v)); |
b06ed71a DV |
97 | return arch_atomic64_add_unless(v, a, u); |
98 | } | |
99 | ||
100 | static __always_inline void atomic_inc(atomic_t *v) | |
101 | { | |
a35353bb | 102 | kasan_check_write(v, sizeof(*v)); |
b06ed71a DV |
103 | arch_atomic_inc(v); |
104 | } | |
105 | ||
106 | static __always_inline void atomic64_inc(atomic64_t *v) | |
107 | { | |
a35353bb | 108 | kasan_check_write(v, sizeof(*v)); |
b06ed71a DV |
109 | arch_atomic64_inc(v); |
110 | } | |
111 | ||
112 | static __always_inline void atomic_dec(atomic_t *v) | |
113 | { | |
a35353bb | 114 | kasan_check_write(v, sizeof(*v)); |
b06ed71a DV |
115 | arch_atomic_dec(v); |
116 | } | |
117 | ||
118 | static __always_inline void atomic64_dec(atomic64_t *v) | |
119 | { | |
a35353bb | 120 | kasan_check_write(v, sizeof(*v)); |
b06ed71a DV |
121 | arch_atomic64_dec(v); |
122 | } | |
123 | ||
124 | static __always_inline void atomic_add(int i, atomic_t *v) | |
125 | { | |
a35353bb | 126 | kasan_check_write(v, sizeof(*v)); |
b06ed71a DV |
127 | arch_atomic_add(i, v); |
128 | } | |
129 | ||
130 | static __always_inline void atomic64_add(s64 i, atomic64_t *v) | |
131 | { | |
a35353bb | 132 | kasan_check_write(v, sizeof(*v)); |
b06ed71a DV |
133 | arch_atomic64_add(i, v); |
134 | } | |
135 | ||
136 | static __always_inline void atomic_sub(int i, atomic_t *v) | |
137 | { | |
a35353bb | 138 | kasan_check_write(v, sizeof(*v)); |
b06ed71a DV |
139 | arch_atomic_sub(i, v); |
140 | } | |
141 | ||
142 | static __always_inline void atomic64_sub(s64 i, atomic64_t *v) | |
143 | { | |
a35353bb | 144 | kasan_check_write(v, sizeof(*v)); |
b06ed71a DV |
145 | arch_atomic64_sub(i, v); |
146 | } | |
147 | ||
148 | static __always_inline void atomic_and(int i, atomic_t *v) | |
149 | { | |
a35353bb | 150 | kasan_check_write(v, sizeof(*v)); |
b06ed71a DV |
151 | arch_atomic_and(i, v); |
152 | } | |
153 | ||
154 | static __always_inline void atomic64_and(s64 i, atomic64_t *v) | |
155 | { | |
a35353bb | 156 | kasan_check_write(v, sizeof(*v)); |
b06ed71a DV |
157 | arch_atomic64_and(i, v); |
158 | } | |
159 | ||
160 | static __always_inline void atomic_or(int i, atomic_t *v) | |
161 | { | |
a35353bb | 162 | kasan_check_write(v, sizeof(*v)); |
b06ed71a DV |
163 | arch_atomic_or(i, v); |
164 | } | |
165 | ||
166 | static __always_inline void atomic64_or(s64 i, atomic64_t *v) | |
167 | { | |
a35353bb | 168 | kasan_check_write(v, sizeof(*v)); |
b06ed71a DV |
169 | arch_atomic64_or(i, v); |
170 | } | |
171 | ||
172 | static __always_inline void atomic_xor(int i, atomic_t *v) | |
173 | { | |
a35353bb | 174 | kasan_check_write(v, sizeof(*v)); |
b06ed71a DV |
175 | arch_atomic_xor(i, v); |
176 | } | |
177 | ||
178 | static __always_inline void atomic64_xor(s64 i, atomic64_t *v) | |
179 | { | |
a35353bb | 180 | kasan_check_write(v, sizeof(*v)); |
b06ed71a DV |
181 | arch_atomic64_xor(i, v); |
182 | } | |
183 | ||
184 | static __always_inline int atomic_inc_return(atomic_t *v) | |
185 | { | |
a35353bb | 186 | kasan_check_write(v, sizeof(*v)); |
b06ed71a DV |
187 | return arch_atomic_inc_return(v); |
188 | } | |
189 | ||
190 | static __always_inline s64 atomic64_inc_return(atomic64_t *v) | |
191 | { | |
a35353bb | 192 | kasan_check_write(v, sizeof(*v)); |
b06ed71a DV |
193 | return arch_atomic64_inc_return(v); |
194 | } | |
195 | ||
196 | static __always_inline int atomic_dec_return(atomic_t *v) | |
197 | { | |
a35353bb | 198 | kasan_check_write(v, sizeof(*v)); |
b06ed71a DV |
199 | return arch_atomic_dec_return(v); |
200 | } | |
201 | ||
202 | static __always_inline s64 atomic64_dec_return(atomic64_t *v) | |
203 | { | |
a35353bb | 204 | kasan_check_write(v, sizeof(*v)); |
b06ed71a DV |
205 | return arch_atomic64_dec_return(v); |
206 | } | |
207 | ||
208 | static __always_inline s64 atomic64_inc_not_zero(atomic64_t *v) | |
209 | { | |
a35353bb | 210 | kasan_check_write(v, sizeof(*v)); |
b06ed71a DV |
211 | return arch_atomic64_inc_not_zero(v); |
212 | } | |
213 | ||
214 | static __always_inline s64 atomic64_dec_if_positive(atomic64_t *v) | |
215 | { | |
a35353bb | 216 | kasan_check_write(v, sizeof(*v)); |
b06ed71a DV |
217 | return arch_atomic64_dec_if_positive(v); |
218 | } | |
219 | ||
220 | static __always_inline bool atomic_dec_and_test(atomic_t *v) | |
221 | { | |
a35353bb | 222 | kasan_check_write(v, sizeof(*v)); |
b06ed71a DV |
223 | return arch_atomic_dec_and_test(v); |
224 | } | |
225 | ||
226 | static __always_inline bool atomic64_dec_and_test(atomic64_t *v) | |
227 | { | |
a35353bb | 228 | kasan_check_write(v, sizeof(*v)); |
b06ed71a DV |
229 | return arch_atomic64_dec_and_test(v); |
230 | } | |
231 | ||
232 | static __always_inline bool atomic_inc_and_test(atomic_t *v) | |
233 | { | |
a35353bb | 234 | kasan_check_write(v, sizeof(*v)); |
b06ed71a DV |
235 | return arch_atomic_inc_and_test(v); |
236 | } | |
237 | ||
238 | static __always_inline bool atomic64_inc_and_test(atomic64_t *v) | |
239 | { | |
a35353bb | 240 | kasan_check_write(v, sizeof(*v)); |
b06ed71a DV |
241 | return arch_atomic64_inc_and_test(v); |
242 | } | |
243 | ||
244 | static __always_inline int atomic_add_return(int i, atomic_t *v) | |
245 | { | |
a35353bb | 246 | kasan_check_write(v, sizeof(*v)); |
b06ed71a DV |
247 | return arch_atomic_add_return(i, v); |
248 | } | |
249 | ||
250 | static __always_inline s64 atomic64_add_return(s64 i, atomic64_t *v) | |
251 | { | |
a35353bb | 252 | kasan_check_write(v, sizeof(*v)); |
b06ed71a DV |
253 | return arch_atomic64_add_return(i, v); |
254 | } | |
255 | ||
256 | static __always_inline int atomic_sub_return(int i, atomic_t *v) | |
257 | { | |
a35353bb | 258 | kasan_check_write(v, sizeof(*v)); |
b06ed71a DV |
259 | return arch_atomic_sub_return(i, v); |
260 | } | |
261 | ||
262 | static __always_inline s64 atomic64_sub_return(s64 i, atomic64_t *v) | |
263 | { | |
a35353bb | 264 | kasan_check_write(v, sizeof(*v)); |
b06ed71a DV |
265 | return arch_atomic64_sub_return(i, v); |
266 | } | |
267 | ||
268 | static __always_inline int atomic_fetch_add(int i, atomic_t *v) | |
269 | { | |
a35353bb | 270 | kasan_check_write(v, sizeof(*v)); |
b06ed71a DV |
271 | return arch_atomic_fetch_add(i, v); |
272 | } | |
273 | ||
274 | static __always_inline s64 atomic64_fetch_add(s64 i, atomic64_t *v) | |
275 | { | |
a35353bb | 276 | kasan_check_write(v, sizeof(*v)); |
b06ed71a DV |
277 | return arch_atomic64_fetch_add(i, v); |
278 | } | |
279 | ||
280 | static __always_inline int atomic_fetch_sub(int i, atomic_t *v) | |
281 | { | |
a35353bb | 282 | kasan_check_write(v, sizeof(*v)); |
b06ed71a DV |
283 | return arch_atomic_fetch_sub(i, v); |
284 | } | |
285 | ||
286 | static __always_inline s64 atomic64_fetch_sub(s64 i, atomic64_t *v) | |
287 | { | |
a35353bb | 288 | kasan_check_write(v, sizeof(*v)); |
b06ed71a DV |
289 | return arch_atomic64_fetch_sub(i, v); |
290 | } | |
291 | ||
292 | static __always_inline int atomic_fetch_and(int i, atomic_t *v) | |
293 | { | |
a35353bb | 294 | kasan_check_write(v, sizeof(*v)); |
b06ed71a DV |
295 | return arch_atomic_fetch_and(i, v); |
296 | } | |
297 | ||
298 | static __always_inline s64 atomic64_fetch_and(s64 i, atomic64_t *v) | |
299 | { | |
a35353bb | 300 | kasan_check_write(v, sizeof(*v)); |
b06ed71a DV |
301 | return arch_atomic64_fetch_and(i, v); |
302 | } | |
303 | ||
304 | static __always_inline int atomic_fetch_or(int i, atomic_t *v) | |
305 | { | |
a35353bb | 306 | kasan_check_write(v, sizeof(*v)); |
b06ed71a DV |
307 | return arch_atomic_fetch_or(i, v); |
308 | } | |
309 | ||
310 | static __always_inline s64 atomic64_fetch_or(s64 i, atomic64_t *v) | |
311 | { | |
a35353bb | 312 | kasan_check_write(v, sizeof(*v)); |
b06ed71a DV |
313 | return arch_atomic64_fetch_or(i, v); |
314 | } | |
315 | ||
316 | static __always_inline int atomic_fetch_xor(int i, atomic_t *v) | |
317 | { | |
a35353bb | 318 | kasan_check_write(v, sizeof(*v)); |
b06ed71a DV |
319 | return arch_atomic_fetch_xor(i, v); |
320 | } | |
321 | ||
322 | static __always_inline s64 atomic64_fetch_xor(s64 i, atomic64_t *v) | |
323 | { | |
a35353bb | 324 | kasan_check_write(v, sizeof(*v)); |
b06ed71a DV |
325 | return arch_atomic64_fetch_xor(i, v); |
326 | } | |
327 | ||
328 | static __always_inline bool atomic_sub_and_test(int i, atomic_t *v) | |
329 | { | |
a35353bb | 330 | kasan_check_write(v, sizeof(*v)); |
b06ed71a DV |
331 | return arch_atomic_sub_and_test(i, v); |
332 | } | |
333 | ||
334 | static __always_inline bool atomic64_sub_and_test(s64 i, atomic64_t *v) | |
335 | { | |
a35353bb | 336 | kasan_check_write(v, sizeof(*v)); |
b06ed71a DV |
337 | return arch_atomic64_sub_and_test(i, v); |
338 | } | |
339 | ||
340 | static __always_inline bool atomic_add_negative(int i, atomic_t *v) | |
341 | { | |
a35353bb | 342 | kasan_check_write(v, sizeof(*v)); |
b06ed71a DV |
343 | return arch_atomic_add_negative(i, v); |
344 | } | |
345 | ||
346 | static __always_inline bool atomic64_add_negative(s64 i, atomic64_t *v) | |
347 | { | |
a35353bb | 348 | kasan_check_write(v, sizeof(*v)); |
b06ed71a DV |
349 | return arch_atomic64_add_negative(i, v); |
350 | } | |
351 | ||
352 | static __always_inline unsigned long | |
353 | cmpxchg_size(volatile void *ptr, unsigned long old, unsigned long new, int size) | |
354 | { | |
a35353bb | 355 | kasan_check_write(ptr, size); |
b06ed71a DV |
356 | switch (size) { |
357 | case 1: | |
358 | return arch_cmpxchg((u8 *)ptr, (u8)old, (u8)new); | |
359 | case 2: | |
360 | return arch_cmpxchg((u16 *)ptr, (u16)old, (u16)new); | |
361 | case 4: | |
362 | return arch_cmpxchg((u32 *)ptr, (u32)old, (u32)new); | |
363 | case 8: | |
364 | BUILD_BUG_ON(sizeof(unsigned long) != 8); | |
365 | return arch_cmpxchg((u64 *)ptr, (u64)old, (u64)new); | |
366 | } | |
367 | BUILD_BUG(); | |
368 | return 0; | |
369 | } | |
370 | ||
371 | #define cmpxchg(ptr, old, new) \ | |
372 | ({ \ | |
373 | ((__typeof__(*(ptr)))cmpxchg_size((ptr), (unsigned long)(old), \ | |
374 | (unsigned long)(new), sizeof(*(ptr)))); \ | |
375 | }) | |
376 | ||
377 | static __always_inline unsigned long | |
378 | sync_cmpxchg_size(volatile void *ptr, unsigned long old, unsigned long new, | |
379 | int size) | |
380 | { | |
a35353bb | 381 | kasan_check_write(ptr, size); |
b06ed71a DV |
382 | switch (size) { |
383 | case 1: | |
384 | return arch_sync_cmpxchg((u8 *)ptr, (u8)old, (u8)new); | |
385 | case 2: | |
386 | return arch_sync_cmpxchg((u16 *)ptr, (u16)old, (u16)new); | |
387 | case 4: | |
388 | return arch_sync_cmpxchg((u32 *)ptr, (u32)old, (u32)new); | |
389 | case 8: | |
390 | BUILD_BUG_ON(sizeof(unsigned long) != 8); | |
391 | return arch_sync_cmpxchg((u64 *)ptr, (u64)old, (u64)new); | |
392 | } | |
393 | BUILD_BUG(); | |
394 | return 0; | |
395 | } | |
396 | ||
397 | #define sync_cmpxchg(ptr, old, new) \ | |
398 | ({ \ | |
399 | ((__typeof__(*(ptr)))sync_cmpxchg_size((ptr), \ | |
400 | (unsigned long)(old), (unsigned long)(new), \ | |
401 | sizeof(*(ptr)))); \ | |
402 | }) | |
403 | ||
404 | static __always_inline unsigned long | |
405 | cmpxchg_local_size(volatile void *ptr, unsigned long old, unsigned long new, | |
406 | int size) | |
407 | { | |
a35353bb | 408 | kasan_check_write(ptr, size); |
b06ed71a DV |
409 | switch (size) { |
410 | case 1: | |
411 | return arch_cmpxchg_local((u8 *)ptr, (u8)old, (u8)new); | |
412 | case 2: | |
413 | return arch_cmpxchg_local((u16 *)ptr, (u16)old, (u16)new); | |
414 | case 4: | |
415 | return arch_cmpxchg_local((u32 *)ptr, (u32)old, (u32)new); | |
416 | case 8: | |
417 | BUILD_BUG_ON(sizeof(unsigned long) != 8); | |
418 | return arch_cmpxchg_local((u64 *)ptr, (u64)old, (u64)new); | |
419 | } | |
420 | BUILD_BUG(); | |
421 | return 0; | |
422 | } | |
423 | ||
424 | #define cmpxchg_local(ptr, old, new) \ | |
425 | ({ \ | |
426 | ((__typeof__(*(ptr)))cmpxchg_local_size((ptr), \ | |
427 | (unsigned long)(old), (unsigned long)(new), \ | |
428 | sizeof(*(ptr)))); \ | |
429 | }) | |
430 | ||
431 | static __always_inline u64 | |
432 | cmpxchg64_size(volatile u64 *ptr, u64 old, u64 new) | |
433 | { | |
a35353bb | 434 | kasan_check_write(ptr, sizeof(*ptr)); |
b06ed71a DV |
435 | return arch_cmpxchg64(ptr, old, new); |
436 | } | |
437 | ||
438 | #define cmpxchg64(ptr, old, new) \ | |
439 | ({ \ | |
440 | ((__typeof__(*(ptr)))cmpxchg64_size((ptr), (u64)(old), \ | |
441 | (u64)(new))); \ | |
442 | }) | |
443 | ||
444 | static __always_inline u64 | |
445 | cmpxchg64_local_size(volatile u64 *ptr, u64 old, u64 new) | |
446 | { | |
a35353bb | 447 | kasan_check_write(ptr, sizeof(*ptr)); |
b06ed71a DV |
448 | return arch_cmpxchg64_local(ptr, old, new); |
449 | } | |
450 | ||
451 | #define cmpxchg64_local(ptr, old, new) \ | |
452 | ({ \ | |
453 | ((__typeof__(*(ptr)))cmpxchg64_local_size((ptr), (u64)(old), \ | |
454 | (u64)(new))); \ | |
455 | }) | |
456 | ||
ac605bee DV |
457 | /* |
458 | * Originally we had the following code here: | |
459 | * __typeof__(p1) ____p1 = (p1); | |
460 | * kasan_check_write(____p1, 2 * sizeof(*____p1)); | |
461 | * arch_cmpxchg_double(____p1, (p2), (o1), (o2), (n1), (n2)); | |
462 | * But it leads to compilation failures (see gcc issue 72873). | |
463 | * So for now it's left non-instrumented. | |
464 | * There are few callers of cmpxchg_double(), so it's not critical. | |
465 | */ | |
b06ed71a DV |
466 | #define cmpxchg_double(p1, p2, o1, o2, n1, n2) \ |
467 | ({ \ | |
468 | arch_cmpxchg_double((p1), (p2), (o1), (o2), (n1), (n2)); \ | |
469 | }) | |
470 | ||
471 | #define cmpxchg_double_local(p1, p2, o1, o2, n1, n2) \ | |
472 | ({ \ | |
473 | arch_cmpxchg_double_local((p1), (p2), (o1), (o2), (n1), (n2)); \ | |
474 | }) | |
475 | ||
476 | #endif /* _LINUX_ATOMIC_INSTRUMENTED_H */ |