Commit | Line | Data |
---|---|---|
aa525d06 MR |
1 | // SPDX-License-Identifier: GPL-2.0 |
2 | ||
3 | // Generated by scripts/atomic/gen-atomic-instrumented.sh | |
4 | // DO NOT MODIFY THIS FILE DIRECTLY | |
5 | ||
ac605bee DV |
6 | /* |
7 | * This file provides wrappers with KASAN instrumentation for atomic operations. | |
8 | * To use this functionality an arch's atomic.h file needs to define all | |
9 | * atomic operations with arch_ prefix (e.g. arch_atomic_read()) and include | |
10 | * this file at the end. This file provides atomic_read() that forwards to | |
11 | * arch_atomic_read() for actual atomic operation. | |
12 | * Note: if an arch atomic operation is implemented by means of other atomic | |
13 | * operations (e.g. atomic_read()/atomic_cmpxchg() loop), then it needs to use | |
14 | * arch_ variants (i.e. arch_atomic_read()/arch_atomic_cmpxchg()) to avoid | |
15 | * double instrumentation. | |
16 | */ | |
aa525d06 MR |
17 | #ifndef _ASM_GENERIC_ATOMIC_INSTRUMENTED_H |
18 | #define _ASM_GENERIC_ATOMIC_INSTRUMENTED_H | |
19 | ||
20 | #include <linux/build_bug.h> | |
21 | #include <linux/kasan-checks.h> | |
e75a6795 ME |
22 | #include <linux/kcsan-checks.h> |
23 | ||
24 | static inline void __atomic_check_read(const volatile void *v, size_t size) | |
25 | { | |
26 | kasan_check_read(v, size); | |
27 | kcsan_check_atomic_read(v, size); | |
28 | } | |
29 | ||
30 | static inline void __atomic_check_write(const volatile void *v, size_t size) | |
31 | { | |
32 | kasan_check_write(v, size); | |
33 | kcsan_check_atomic_write(v, size); | |
34 | } | |
aa525d06 MR |
35 | |
36 | static inline int | |
37 | atomic_read(const atomic_t *v) | |
38 | { | |
e75a6795 | 39 | __atomic_check_read(v, sizeof(*v)); |
aa525d06 MR |
40 | return arch_atomic_read(v); |
41 | } | |
42 | #define atomic_read atomic_read | |
43 | ||
44 | #if defined(arch_atomic_read_acquire) | |
45 | static inline int | |
46 | atomic_read_acquire(const atomic_t *v) | |
47 | { | |
e75a6795 | 48 | __atomic_check_read(v, sizeof(*v)); |
aa525d06 MR |
49 | return arch_atomic_read_acquire(v); |
50 | } | |
51 | #define atomic_read_acquire atomic_read_acquire | |
52 | #endif | |
53 | ||
54 | static inline void | |
55 | atomic_set(atomic_t *v, int i) | |
56 | { | |
e75a6795 | 57 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
58 | arch_atomic_set(v, i); |
59 | } | |
60 | #define atomic_set atomic_set | |
61 | ||
62 | #if defined(arch_atomic_set_release) | |
63 | static inline void | |
64 | atomic_set_release(atomic_t *v, int i) | |
65 | { | |
e75a6795 | 66 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
67 | arch_atomic_set_release(v, i); |
68 | } | |
69 | #define atomic_set_release atomic_set_release | |
70 | #endif | |
71 | ||
72 | static inline void | |
73 | atomic_add(int i, atomic_t *v) | |
74 | { | |
e75a6795 | 75 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
76 | arch_atomic_add(i, v); |
77 | } | |
78 | #define atomic_add atomic_add | |
79 | ||
80 | #if !defined(arch_atomic_add_return_relaxed) || defined(arch_atomic_add_return) | |
81 | static inline int | |
82 | atomic_add_return(int i, atomic_t *v) | |
83 | { | |
e75a6795 | 84 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
85 | return arch_atomic_add_return(i, v); |
86 | } | |
87 | #define atomic_add_return atomic_add_return | |
88 | #endif | |
89 | ||
90 | #if defined(arch_atomic_add_return_acquire) | |
91 | static inline int | |
92 | atomic_add_return_acquire(int i, atomic_t *v) | |
93 | { | |
e75a6795 | 94 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
95 | return arch_atomic_add_return_acquire(i, v); |
96 | } | |
97 | #define atomic_add_return_acquire atomic_add_return_acquire | |
98 | #endif | |
99 | ||
100 | #if defined(arch_atomic_add_return_release) | |
101 | static inline int | |
102 | atomic_add_return_release(int i, atomic_t *v) | |
103 | { | |
e75a6795 | 104 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
105 | return arch_atomic_add_return_release(i, v); |
106 | } | |
107 | #define atomic_add_return_release atomic_add_return_release | |
108 | #endif | |
109 | ||
110 | #if defined(arch_atomic_add_return_relaxed) | |
111 | static inline int | |
112 | atomic_add_return_relaxed(int i, atomic_t *v) | |
113 | { | |
e75a6795 | 114 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
115 | return arch_atomic_add_return_relaxed(i, v); |
116 | } | |
117 | #define atomic_add_return_relaxed atomic_add_return_relaxed | |
118 | #endif | |
119 | ||
120 | #if !defined(arch_atomic_fetch_add_relaxed) || defined(arch_atomic_fetch_add) | |
121 | static inline int | |
122 | atomic_fetch_add(int i, atomic_t *v) | |
123 | { | |
e75a6795 | 124 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
125 | return arch_atomic_fetch_add(i, v); |
126 | } | |
127 | #define atomic_fetch_add atomic_fetch_add | |
128 | #endif | |
129 | ||
130 | #if defined(arch_atomic_fetch_add_acquire) | |
131 | static inline int | |
132 | atomic_fetch_add_acquire(int i, atomic_t *v) | |
133 | { | |
e75a6795 | 134 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
135 | return arch_atomic_fetch_add_acquire(i, v); |
136 | } | |
137 | #define atomic_fetch_add_acquire atomic_fetch_add_acquire | |
138 | #endif | |
139 | ||
140 | #if defined(arch_atomic_fetch_add_release) | |
141 | static inline int | |
142 | atomic_fetch_add_release(int i, atomic_t *v) | |
143 | { | |
e75a6795 | 144 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
145 | return arch_atomic_fetch_add_release(i, v); |
146 | } | |
147 | #define atomic_fetch_add_release atomic_fetch_add_release | |
148 | #endif | |
149 | ||
150 | #if defined(arch_atomic_fetch_add_relaxed) | |
151 | static inline int | |
152 | atomic_fetch_add_relaxed(int i, atomic_t *v) | |
153 | { | |
e75a6795 | 154 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
155 | return arch_atomic_fetch_add_relaxed(i, v); |
156 | } | |
157 | #define atomic_fetch_add_relaxed atomic_fetch_add_relaxed | |
158 | #endif | |
159 | ||
160 | static inline void | |
161 | atomic_sub(int i, atomic_t *v) | |
162 | { | |
e75a6795 | 163 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
164 | arch_atomic_sub(i, v); |
165 | } | |
166 | #define atomic_sub atomic_sub | |
167 | ||
168 | #if !defined(arch_atomic_sub_return_relaxed) || defined(arch_atomic_sub_return) | |
169 | static inline int | |
170 | atomic_sub_return(int i, atomic_t *v) | |
171 | { | |
e75a6795 | 172 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
173 | return arch_atomic_sub_return(i, v); |
174 | } | |
175 | #define atomic_sub_return atomic_sub_return | |
176 | #endif | |
177 | ||
178 | #if defined(arch_atomic_sub_return_acquire) | |
179 | static inline int | |
180 | atomic_sub_return_acquire(int i, atomic_t *v) | |
181 | { | |
e75a6795 | 182 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
183 | return arch_atomic_sub_return_acquire(i, v); |
184 | } | |
185 | #define atomic_sub_return_acquire atomic_sub_return_acquire | |
186 | #endif | |
187 | ||
188 | #if defined(arch_atomic_sub_return_release) | |
189 | static inline int | |
190 | atomic_sub_return_release(int i, atomic_t *v) | |
191 | { | |
e75a6795 | 192 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
193 | return arch_atomic_sub_return_release(i, v); |
194 | } | |
195 | #define atomic_sub_return_release atomic_sub_return_release | |
196 | #endif | |
197 | ||
198 | #if defined(arch_atomic_sub_return_relaxed) | |
199 | static inline int | |
200 | atomic_sub_return_relaxed(int i, atomic_t *v) | |
201 | { | |
e75a6795 | 202 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
203 | return arch_atomic_sub_return_relaxed(i, v); |
204 | } | |
205 | #define atomic_sub_return_relaxed atomic_sub_return_relaxed | |
206 | #endif | |
207 | ||
208 | #if !defined(arch_atomic_fetch_sub_relaxed) || defined(arch_atomic_fetch_sub) | |
209 | static inline int | |
210 | atomic_fetch_sub(int i, atomic_t *v) | |
211 | { | |
e75a6795 | 212 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
213 | return arch_atomic_fetch_sub(i, v); |
214 | } | |
215 | #define atomic_fetch_sub atomic_fetch_sub | |
216 | #endif | |
217 | ||
218 | #if defined(arch_atomic_fetch_sub_acquire) | |
219 | static inline int | |
220 | atomic_fetch_sub_acquire(int i, atomic_t *v) | |
221 | { | |
e75a6795 | 222 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
223 | return arch_atomic_fetch_sub_acquire(i, v); |
224 | } | |
225 | #define atomic_fetch_sub_acquire atomic_fetch_sub_acquire | |
226 | #endif | |
227 | ||
228 | #if defined(arch_atomic_fetch_sub_release) | |
229 | static inline int | |
230 | atomic_fetch_sub_release(int i, atomic_t *v) | |
231 | { | |
e75a6795 | 232 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
233 | return arch_atomic_fetch_sub_release(i, v); |
234 | } | |
235 | #define atomic_fetch_sub_release atomic_fetch_sub_release | |
236 | #endif | |
237 | ||
238 | #if defined(arch_atomic_fetch_sub_relaxed) | |
239 | static inline int | |
240 | atomic_fetch_sub_relaxed(int i, atomic_t *v) | |
241 | { | |
e75a6795 | 242 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
243 | return arch_atomic_fetch_sub_relaxed(i, v); |
244 | } | |
245 | #define atomic_fetch_sub_relaxed atomic_fetch_sub_relaxed | |
246 | #endif | |
247 | ||
248 | #if defined(arch_atomic_inc) | |
249 | static inline void | |
250 | atomic_inc(atomic_t *v) | |
251 | { | |
e75a6795 | 252 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
253 | arch_atomic_inc(v); |
254 | } | |
255 | #define atomic_inc atomic_inc | |
256 | #endif | |
257 | ||
258 | #if defined(arch_atomic_inc_return) | |
259 | static inline int | |
260 | atomic_inc_return(atomic_t *v) | |
261 | { | |
e75a6795 | 262 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
263 | return arch_atomic_inc_return(v); |
264 | } | |
265 | #define atomic_inc_return atomic_inc_return | |
266 | #endif | |
267 | ||
268 | #if defined(arch_atomic_inc_return_acquire) | |
269 | static inline int | |
270 | atomic_inc_return_acquire(atomic_t *v) | |
271 | { | |
e75a6795 | 272 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
273 | return arch_atomic_inc_return_acquire(v); |
274 | } | |
275 | #define atomic_inc_return_acquire atomic_inc_return_acquire | |
276 | #endif | |
277 | ||
278 | #if defined(arch_atomic_inc_return_release) | |
279 | static inline int | |
280 | atomic_inc_return_release(atomic_t *v) | |
281 | { | |
e75a6795 | 282 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
283 | return arch_atomic_inc_return_release(v); |
284 | } | |
285 | #define atomic_inc_return_release atomic_inc_return_release | |
286 | #endif | |
287 | ||
288 | #if defined(arch_atomic_inc_return_relaxed) | |
289 | static inline int | |
290 | atomic_inc_return_relaxed(atomic_t *v) | |
291 | { | |
e75a6795 | 292 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
293 | return arch_atomic_inc_return_relaxed(v); |
294 | } | |
295 | #define atomic_inc_return_relaxed atomic_inc_return_relaxed | |
296 | #endif | |
297 | ||
298 | #if defined(arch_atomic_fetch_inc) | |
299 | static inline int | |
300 | atomic_fetch_inc(atomic_t *v) | |
301 | { | |
e75a6795 | 302 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
303 | return arch_atomic_fetch_inc(v); |
304 | } | |
305 | #define atomic_fetch_inc atomic_fetch_inc | |
306 | #endif | |
307 | ||
308 | #if defined(arch_atomic_fetch_inc_acquire) | |
309 | static inline int | |
310 | atomic_fetch_inc_acquire(atomic_t *v) | |
311 | { | |
e75a6795 | 312 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
313 | return arch_atomic_fetch_inc_acquire(v); |
314 | } | |
315 | #define atomic_fetch_inc_acquire atomic_fetch_inc_acquire | |
316 | #endif | |
317 | ||
318 | #if defined(arch_atomic_fetch_inc_release) | |
319 | static inline int | |
320 | atomic_fetch_inc_release(atomic_t *v) | |
321 | { | |
e75a6795 | 322 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
323 | return arch_atomic_fetch_inc_release(v); |
324 | } | |
325 | #define atomic_fetch_inc_release atomic_fetch_inc_release | |
326 | #endif | |
327 | ||
328 | #if defined(arch_atomic_fetch_inc_relaxed) | |
329 | static inline int | |
330 | atomic_fetch_inc_relaxed(atomic_t *v) | |
331 | { | |
e75a6795 | 332 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
333 | return arch_atomic_fetch_inc_relaxed(v); |
334 | } | |
335 | #define atomic_fetch_inc_relaxed atomic_fetch_inc_relaxed | |
336 | #endif | |
337 | ||
338 | #if defined(arch_atomic_dec) | |
339 | static inline void | |
340 | atomic_dec(atomic_t *v) | |
341 | { | |
e75a6795 | 342 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
343 | arch_atomic_dec(v); |
344 | } | |
345 | #define atomic_dec atomic_dec | |
346 | #endif | |
347 | ||
348 | #if defined(arch_atomic_dec_return) | |
349 | static inline int | |
350 | atomic_dec_return(atomic_t *v) | |
351 | { | |
e75a6795 | 352 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
353 | return arch_atomic_dec_return(v); |
354 | } | |
355 | #define atomic_dec_return atomic_dec_return | |
356 | #endif | |
357 | ||
358 | #if defined(arch_atomic_dec_return_acquire) | |
359 | static inline int | |
360 | atomic_dec_return_acquire(atomic_t *v) | |
361 | { | |
e75a6795 | 362 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
363 | return arch_atomic_dec_return_acquire(v); |
364 | } | |
365 | #define atomic_dec_return_acquire atomic_dec_return_acquire | |
366 | #endif | |
367 | ||
368 | #if defined(arch_atomic_dec_return_release) | |
369 | static inline int | |
370 | atomic_dec_return_release(atomic_t *v) | |
371 | { | |
e75a6795 | 372 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
373 | return arch_atomic_dec_return_release(v); |
374 | } | |
375 | #define atomic_dec_return_release atomic_dec_return_release | |
376 | #endif | |
377 | ||
378 | #if defined(arch_atomic_dec_return_relaxed) | |
379 | static inline int | |
380 | atomic_dec_return_relaxed(atomic_t *v) | |
381 | { | |
e75a6795 | 382 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
383 | return arch_atomic_dec_return_relaxed(v); |
384 | } | |
385 | #define atomic_dec_return_relaxed atomic_dec_return_relaxed | |
386 | #endif | |
387 | ||
388 | #if defined(arch_atomic_fetch_dec) | |
389 | static inline int | |
390 | atomic_fetch_dec(atomic_t *v) | |
391 | { | |
e75a6795 | 392 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
393 | return arch_atomic_fetch_dec(v); |
394 | } | |
395 | #define atomic_fetch_dec atomic_fetch_dec | |
396 | #endif | |
397 | ||
398 | #if defined(arch_atomic_fetch_dec_acquire) | |
399 | static inline int | |
400 | atomic_fetch_dec_acquire(atomic_t *v) | |
401 | { | |
e75a6795 | 402 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
403 | return arch_atomic_fetch_dec_acquire(v); |
404 | } | |
405 | #define atomic_fetch_dec_acquire atomic_fetch_dec_acquire | |
406 | #endif | |
407 | ||
408 | #if defined(arch_atomic_fetch_dec_release) | |
409 | static inline int | |
410 | atomic_fetch_dec_release(atomic_t *v) | |
411 | { | |
e75a6795 | 412 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
413 | return arch_atomic_fetch_dec_release(v); |
414 | } | |
415 | #define atomic_fetch_dec_release atomic_fetch_dec_release | |
416 | #endif | |
417 | ||
418 | #if defined(arch_atomic_fetch_dec_relaxed) | |
419 | static inline int | |
420 | atomic_fetch_dec_relaxed(atomic_t *v) | |
421 | { | |
e75a6795 | 422 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
423 | return arch_atomic_fetch_dec_relaxed(v); |
424 | } | |
425 | #define atomic_fetch_dec_relaxed atomic_fetch_dec_relaxed | |
426 | #endif | |
427 | ||
428 | static inline void | |
429 | atomic_and(int i, atomic_t *v) | |
430 | { | |
e75a6795 | 431 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
432 | arch_atomic_and(i, v); |
433 | } | |
434 | #define atomic_and atomic_and | |
435 | ||
436 | #if !defined(arch_atomic_fetch_and_relaxed) || defined(arch_atomic_fetch_and) | |
437 | static inline int | |
438 | atomic_fetch_and(int i, atomic_t *v) | |
439 | { | |
e75a6795 | 440 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
441 | return arch_atomic_fetch_and(i, v); |
442 | } | |
443 | #define atomic_fetch_and atomic_fetch_and | |
444 | #endif | |
445 | ||
446 | #if defined(arch_atomic_fetch_and_acquire) | |
447 | static inline int | |
448 | atomic_fetch_and_acquire(int i, atomic_t *v) | |
449 | { | |
e75a6795 | 450 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
451 | return arch_atomic_fetch_and_acquire(i, v); |
452 | } | |
453 | #define atomic_fetch_and_acquire atomic_fetch_and_acquire | |
454 | #endif | |
455 | ||
456 | #if defined(arch_atomic_fetch_and_release) | |
457 | static inline int | |
458 | atomic_fetch_and_release(int i, atomic_t *v) | |
459 | { | |
e75a6795 | 460 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
461 | return arch_atomic_fetch_and_release(i, v); |
462 | } | |
463 | #define atomic_fetch_and_release atomic_fetch_and_release | |
464 | #endif | |
465 | ||
466 | #if defined(arch_atomic_fetch_and_relaxed) | |
467 | static inline int | |
468 | atomic_fetch_and_relaxed(int i, atomic_t *v) | |
469 | { | |
e75a6795 | 470 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
471 | return arch_atomic_fetch_and_relaxed(i, v); |
472 | } | |
473 | #define atomic_fetch_and_relaxed atomic_fetch_and_relaxed | |
474 | #endif | |
475 | ||
476 | #if defined(arch_atomic_andnot) | |
477 | static inline void | |
478 | atomic_andnot(int i, atomic_t *v) | |
479 | { | |
e75a6795 | 480 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
481 | arch_atomic_andnot(i, v); |
482 | } | |
483 | #define atomic_andnot atomic_andnot | |
484 | #endif | |
485 | ||
486 | #if defined(arch_atomic_fetch_andnot) | |
487 | static inline int | |
488 | atomic_fetch_andnot(int i, atomic_t *v) | |
489 | { | |
e75a6795 | 490 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
491 | return arch_atomic_fetch_andnot(i, v); |
492 | } | |
493 | #define atomic_fetch_andnot atomic_fetch_andnot | |
494 | #endif | |
495 | ||
496 | #if defined(arch_atomic_fetch_andnot_acquire) | |
497 | static inline int | |
498 | atomic_fetch_andnot_acquire(int i, atomic_t *v) | |
499 | { | |
e75a6795 | 500 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
501 | return arch_atomic_fetch_andnot_acquire(i, v); |
502 | } | |
503 | #define atomic_fetch_andnot_acquire atomic_fetch_andnot_acquire | |
504 | #endif | |
505 | ||
506 | #if defined(arch_atomic_fetch_andnot_release) | |
507 | static inline int | |
508 | atomic_fetch_andnot_release(int i, atomic_t *v) | |
509 | { | |
e75a6795 | 510 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
511 | return arch_atomic_fetch_andnot_release(i, v); |
512 | } | |
513 | #define atomic_fetch_andnot_release atomic_fetch_andnot_release | |
514 | #endif | |
515 | ||
516 | #if defined(arch_atomic_fetch_andnot_relaxed) | |
517 | static inline int | |
518 | atomic_fetch_andnot_relaxed(int i, atomic_t *v) | |
519 | { | |
e75a6795 | 520 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
521 | return arch_atomic_fetch_andnot_relaxed(i, v); |
522 | } | |
523 | #define atomic_fetch_andnot_relaxed atomic_fetch_andnot_relaxed | |
524 | #endif | |
525 | ||
526 | static inline void | |
527 | atomic_or(int i, atomic_t *v) | |
528 | { | |
e75a6795 | 529 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
530 | arch_atomic_or(i, v); |
531 | } | |
532 | #define atomic_or atomic_or | |
533 | ||
534 | #if !defined(arch_atomic_fetch_or_relaxed) || defined(arch_atomic_fetch_or) | |
535 | static inline int | |
536 | atomic_fetch_or(int i, atomic_t *v) | |
537 | { | |
e75a6795 | 538 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
539 | return arch_atomic_fetch_or(i, v); |
540 | } | |
541 | #define atomic_fetch_or atomic_fetch_or | |
542 | #endif | |
543 | ||
544 | #if defined(arch_atomic_fetch_or_acquire) | |
545 | static inline int | |
546 | atomic_fetch_or_acquire(int i, atomic_t *v) | |
547 | { | |
e75a6795 | 548 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
549 | return arch_atomic_fetch_or_acquire(i, v); |
550 | } | |
551 | #define atomic_fetch_or_acquire atomic_fetch_or_acquire | |
552 | #endif | |
553 | ||
554 | #if defined(arch_atomic_fetch_or_release) | |
555 | static inline int | |
556 | atomic_fetch_or_release(int i, atomic_t *v) | |
557 | { | |
e75a6795 | 558 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
559 | return arch_atomic_fetch_or_release(i, v); |
560 | } | |
561 | #define atomic_fetch_or_release atomic_fetch_or_release | |
562 | #endif | |
563 | ||
564 | #if defined(arch_atomic_fetch_or_relaxed) | |
565 | static inline int | |
566 | atomic_fetch_or_relaxed(int i, atomic_t *v) | |
567 | { | |
e75a6795 | 568 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
569 | return arch_atomic_fetch_or_relaxed(i, v); |
570 | } | |
571 | #define atomic_fetch_or_relaxed atomic_fetch_or_relaxed | |
572 | #endif | |
573 | ||
574 | static inline void | |
575 | atomic_xor(int i, atomic_t *v) | |
576 | { | |
e75a6795 | 577 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
578 | arch_atomic_xor(i, v); |
579 | } | |
580 | #define atomic_xor atomic_xor | |
581 | ||
582 | #if !defined(arch_atomic_fetch_xor_relaxed) || defined(arch_atomic_fetch_xor) | |
583 | static inline int | |
584 | atomic_fetch_xor(int i, atomic_t *v) | |
585 | { | |
e75a6795 | 586 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
587 | return arch_atomic_fetch_xor(i, v); |
588 | } | |
589 | #define atomic_fetch_xor atomic_fetch_xor | |
590 | #endif | |
591 | ||
592 | #if defined(arch_atomic_fetch_xor_acquire) | |
593 | static inline int | |
594 | atomic_fetch_xor_acquire(int i, atomic_t *v) | |
595 | { | |
e75a6795 | 596 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
597 | return arch_atomic_fetch_xor_acquire(i, v); |
598 | } | |
599 | #define atomic_fetch_xor_acquire atomic_fetch_xor_acquire | |
600 | #endif | |
601 | ||
602 | #if defined(arch_atomic_fetch_xor_release) | |
603 | static inline int | |
604 | atomic_fetch_xor_release(int i, atomic_t *v) | |
605 | { | |
e75a6795 | 606 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
607 | return arch_atomic_fetch_xor_release(i, v); |
608 | } | |
609 | #define atomic_fetch_xor_release atomic_fetch_xor_release | |
610 | #endif | |
611 | ||
612 | #if defined(arch_atomic_fetch_xor_relaxed) | |
613 | static inline int | |
614 | atomic_fetch_xor_relaxed(int i, atomic_t *v) | |
615 | { | |
e75a6795 | 616 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
617 | return arch_atomic_fetch_xor_relaxed(i, v); |
618 | } | |
619 | #define atomic_fetch_xor_relaxed atomic_fetch_xor_relaxed | |
620 | #endif | |
621 | ||
622 | #if !defined(arch_atomic_xchg_relaxed) || defined(arch_atomic_xchg) | |
623 | static inline int | |
624 | atomic_xchg(atomic_t *v, int i) | |
625 | { | |
e75a6795 | 626 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
627 | return arch_atomic_xchg(v, i); |
628 | } | |
629 | #define atomic_xchg atomic_xchg | |
630 | #endif | |
631 | ||
632 | #if defined(arch_atomic_xchg_acquire) | |
633 | static inline int | |
634 | atomic_xchg_acquire(atomic_t *v, int i) | |
635 | { | |
e75a6795 | 636 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
637 | return arch_atomic_xchg_acquire(v, i); |
638 | } | |
639 | #define atomic_xchg_acquire atomic_xchg_acquire | |
640 | #endif | |
641 | ||
642 | #if defined(arch_atomic_xchg_release) | |
643 | static inline int | |
644 | atomic_xchg_release(atomic_t *v, int i) | |
645 | { | |
e75a6795 | 646 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
647 | return arch_atomic_xchg_release(v, i); |
648 | } | |
649 | #define atomic_xchg_release atomic_xchg_release | |
650 | #endif | |
651 | ||
652 | #if defined(arch_atomic_xchg_relaxed) | |
653 | static inline int | |
654 | atomic_xchg_relaxed(atomic_t *v, int i) | |
655 | { | |
e75a6795 | 656 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
657 | return arch_atomic_xchg_relaxed(v, i); |
658 | } | |
659 | #define atomic_xchg_relaxed atomic_xchg_relaxed | |
660 | #endif | |
661 | ||
662 | #if !defined(arch_atomic_cmpxchg_relaxed) || defined(arch_atomic_cmpxchg) | |
663 | static inline int | |
664 | atomic_cmpxchg(atomic_t *v, int old, int new) | |
665 | { | |
e75a6795 | 666 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
667 | return arch_atomic_cmpxchg(v, old, new); |
668 | } | |
669 | #define atomic_cmpxchg atomic_cmpxchg | |
670 | #endif | |
671 | ||
672 | #if defined(arch_atomic_cmpxchg_acquire) | |
673 | static inline int | |
674 | atomic_cmpxchg_acquire(atomic_t *v, int old, int new) | |
675 | { | |
e75a6795 | 676 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
677 | return arch_atomic_cmpxchg_acquire(v, old, new); |
678 | } | |
679 | #define atomic_cmpxchg_acquire atomic_cmpxchg_acquire | |
680 | #endif | |
681 | ||
682 | #if defined(arch_atomic_cmpxchg_release) | |
683 | static inline int | |
684 | atomic_cmpxchg_release(atomic_t *v, int old, int new) | |
685 | { | |
e75a6795 | 686 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
687 | return arch_atomic_cmpxchg_release(v, old, new); |
688 | } | |
689 | #define atomic_cmpxchg_release atomic_cmpxchg_release | |
690 | #endif | |
691 | ||
692 | #if defined(arch_atomic_cmpxchg_relaxed) | |
693 | static inline int | |
694 | atomic_cmpxchg_relaxed(atomic_t *v, int old, int new) | |
695 | { | |
e75a6795 | 696 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
697 | return arch_atomic_cmpxchg_relaxed(v, old, new); |
698 | } | |
699 | #define atomic_cmpxchg_relaxed atomic_cmpxchg_relaxed | |
700 | #endif | |
701 | ||
702 | #if defined(arch_atomic_try_cmpxchg) | |
703 | static inline bool | |
704 | atomic_try_cmpxchg(atomic_t *v, int *old, int new) | |
705 | { | |
e75a6795 ME |
706 | __atomic_check_write(v, sizeof(*v)); |
707 | __atomic_check_write(old, sizeof(*old)); | |
aa525d06 MR |
708 | return arch_atomic_try_cmpxchg(v, old, new); |
709 | } | |
710 | #define atomic_try_cmpxchg atomic_try_cmpxchg | |
711 | #endif | |
712 | ||
713 | #if defined(arch_atomic_try_cmpxchg_acquire) | |
714 | static inline bool | |
715 | atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new) | |
716 | { | |
e75a6795 ME |
717 | __atomic_check_write(v, sizeof(*v)); |
718 | __atomic_check_write(old, sizeof(*old)); | |
aa525d06 MR |
719 | return arch_atomic_try_cmpxchg_acquire(v, old, new); |
720 | } | |
721 | #define atomic_try_cmpxchg_acquire atomic_try_cmpxchg_acquire | |
722 | #endif | |
723 | ||
724 | #if defined(arch_atomic_try_cmpxchg_release) | |
725 | static inline bool | |
726 | atomic_try_cmpxchg_release(atomic_t *v, int *old, int new) | |
727 | { | |
e75a6795 ME |
728 | __atomic_check_write(v, sizeof(*v)); |
729 | __atomic_check_write(old, sizeof(*old)); | |
aa525d06 MR |
730 | return arch_atomic_try_cmpxchg_release(v, old, new); |
731 | } | |
732 | #define atomic_try_cmpxchg_release atomic_try_cmpxchg_release | |
733 | #endif | |
734 | ||
735 | #if defined(arch_atomic_try_cmpxchg_relaxed) | |
736 | static inline bool | |
737 | atomic_try_cmpxchg_relaxed(atomic_t *v, int *old, int new) | |
738 | { | |
e75a6795 ME |
739 | __atomic_check_write(v, sizeof(*v)); |
740 | __atomic_check_write(old, sizeof(*old)); | |
aa525d06 MR |
741 | return arch_atomic_try_cmpxchg_relaxed(v, old, new); |
742 | } | |
743 | #define atomic_try_cmpxchg_relaxed atomic_try_cmpxchg_relaxed | |
744 | #endif | |
745 | ||
746 | #if defined(arch_atomic_sub_and_test) | |
747 | static inline bool | |
748 | atomic_sub_and_test(int i, atomic_t *v) | |
749 | { | |
e75a6795 | 750 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
751 | return arch_atomic_sub_and_test(i, v); |
752 | } | |
753 | #define atomic_sub_and_test atomic_sub_and_test | |
754 | #endif | |
755 | ||
756 | #if defined(arch_atomic_dec_and_test) | |
757 | static inline bool | |
758 | atomic_dec_and_test(atomic_t *v) | |
759 | { | |
e75a6795 | 760 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
761 | return arch_atomic_dec_and_test(v); |
762 | } | |
763 | #define atomic_dec_and_test atomic_dec_and_test | |
764 | #endif | |
765 | ||
766 | #if defined(arch_atomic_inc_and_test) | |
767 | static inline bool | |
768 | atomic_inc_and_test(atomic_t *v) | |
769 | { | |
e75a6795 | 770 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
771 | return arch_atomic_inc_and_test(v); |
772 | } | |
773 | #define atomic_inc_and_test atomic_inc_and_test | |
774 | #endif | |
775 | ||
776 | #if defined(arch_atomic_add_negative) | |
777 | static inline bool | |
778 | atomic_add_negative(int i, atomic_t *v) | |
779 | { | |
e75a6795 | 780 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
781 | return arch_atomic_add_negative(i, v); |
782 | } | |
783 | #define atomic_add_negative atomic_add_negative | |
784 | #endif | |
785 | ||
786 | #if defined(arch_atomic_fetch_add_unless) | |
787 | static inline int | |
788 | atomic_fetch_add_unless(atomic_t *v, int a, int u) | |
789 | { | |
e75a6795 | 790 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
791 | return arch_atomic_fetch_add_unless(v, a, u); |
792 | } | |
793 | #define atomic_fetch_add_unless atomic_fetch_add_unless | |
794 | #endif | |
795 | ||
796 | #if defined(arch_atomic_add_unless) | |
797 | static inline bool | |
798 | atomic_add_unless(atomic_t *v, int a, int u) | |
799 | { | |
e75a6795 | 800 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
801 | return arch_atomic_add_unless(v, a, u); |
802 | } | |
803 | #define atomic_add_unless atomic_add_unless | |
804 | #endif | |
805 | ||
806 | #if defined(arch_atomic_inc_not_zero) | |
807 | static inline bool | |
808 | atomic_inc_not_zero(atomic_t *v) | |
809 | { | |
e75a6795 | 810 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
811 | return arch_atomic_inc_not_zero(v); |
812 | } | |
813 | #define atomic_inc_not_zero atomic_inc_not_zero | |
814 | #endif | |
815 | ||
816 | #if defined(arch_atomic_inc_unless_negative) | |
817 | static inline bool | |
818 | atomic_inc_unless_negative(atomic_t *v) | |
819 | { | |
e75a6795 | 820 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
821 | return arch_atomic_inc_unless_negative(v); |
822 | } | |
823 | #define atomic_inc_unless_negative atomic_inc_unless_negative | |
824 | #endif | |
825 | ||
826 | #if defined(arch_atomic_dec_unless_positive) | |
827 | static inline bool | |
828 | atomic_dec_unless_positive(atomic_t *v) | |
829 | { | |
e75a6795 | 830 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
831 | return arch_atomic_dec_unless_positive(v); |
832 | } | |
833 | #define atomic_dec_unless_positive atomic_dec_unless_positive | |
834 | #endif | |
835 | ||
836 | #if defined(arch_atomic_dec_if_positive) | |
837 | static inline int | |
838 | atomic_dec_if_positive(atomic_t *v) | |
839 | { | |
e75a6795 | 840 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
841 | return arch_atomic_dec_if_positive(v); |
842 | } | |
843 | #define atomic_dec_if_positive atomic_dec_if_positive | |
844 | #endif | |
845 | ||
846 | static inline s64 | |
847 | atomic64_read(const atomic64_t *v) | |
848 | { | |
e75a6795 | 849 | __atomic_check_read(v, sizeof(*v)); |
aa525d06 MR |
850 | return arch_atomic64_read(v); |
851 | } | |
852 | #define atomic64_read atomic64_read | |
853 | ||
854 | #if defined(arch_atomic64_read_acquire) | |
855 | static inline s64 | |
856 | atomic64_read_acquire(const atomic64_t *v) | |
857 | { | |
e75a6795 | 858 | __atomic_check_read(v, sizeof(*v)); |
aa525d06 MR |
859 | return arch_atomic64_read_acquire(v); |
860 | } | |
861 | #define atomic64_read_acquire atomic64_read_acquire | |
862 | #endif | |
863 | ||
864 | static inline void | |
865 | atomic64_set(atomic64_t *v, s64 i) | |
866 | { | |
e75a6795 | 867 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
868 | arch_atomic64_set(v, i); |
869 | } | |
870 | #define atomic64_set atomic64_set | |
871 | ||
872 | #if defined(arch_atomic64_set_release) | |
873 | static inline void | |
874 | atomic64_set_release(atomic64_t *v, s64 i) | |
875 | { | |
e75a6795 | 876 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
877 | arch_atomic64_set_release(v, i); |
878 | } | |
879 | #define atomic64_set_release atomic64_set_release | |
880 | #endif | |
881 | ||
882 | static inline void | |
883 | atomic64_add(s64 i, atomic64_t *v) | |
884 | { | |
e75a6795 | 885 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
886 | arch_atomic64_add(i, v); |
887 | } | |
888 | #define atomic64_add atomic64_add | |
889 | ||
890 | #if !defined(arch_atomic64_add_return_relaxed) || defined(arch_atomic64_add_return) | |
891 | static inline s64 | |
892 | atomic64_add_return(s64 i, atomic64_t *v) | |
893 | { | |
e75a6795 | 894 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
895 | return arch_atomic64_add_return(i, v); |
896 | } | |
897 | #define atomic64_add_return atomic64_add_return | |
898 | #endif | |
899 | ||
900 | #if defined(arch_atomic64_add_return_acquire) | |
901 | static inline s64 | |
902 | atomic64_add_return_acquire(s64 i, atomic64_t *v) | |
903 | { | |
e75a6795 | 904 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
905 | return arch_atomic64_add_return_acquire(i, v); |
906 | } | |
907 | #define atomic64_add_return_acquire atomic64_add_return_acquire | |
908 | #endif | |
909 | ||
910 | #if defined(arch_atomic64_add_return_release) | |
911 | static inline s64 | |
912 | atomic64_add_return_release(s64 i, atomic64_t *v) | |
913 | { | |
e75a6795 | 914 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
915 | return arch_atomic64_add_return_release(i, v); |
916 | } | |
917 | #define atomic64_add_return_release atomic64_add_return_release | |
918 | #endif | |
919 | ||
920 | #if defined(arch_atomic64_add_return_relaxed) | |
921 | static inline s64 | |
922 | atomic64_add_return_relaxed(s64 i, atomic64_t *v) | |
923 | { | |
e75a6795 | 924 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
925 | return arch_atomic64_add_return_relaxed(i, v); |
926 | } | |
927 | #define atomic64_add_return_relaxed atomic64_add_return_relaxed | |
928 | #endif | |
929 | ||
930 | #if !defined(arch_atomic64_fetch_add_relaxed) || defined(arch_atomic64_fetch_add) | |
931 | static inline s64 | |
932 | atomic64_fetch_add(s64 i, atomic64_t *v) | |
933 | { | |
e75a6795 | 934 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
935 | return arch_atomic64_fetch_add(i, v); |
936 | } | |
937 | #define atomic64_fetch_add atomic64_fetch_add | |
938 | #endif | |
939 | ||
940 | #if defined(arch_atomic64_fetch_add_acquire) | |
941 | static inline s64 | |
942 | atomic64_fetch_add_acquire(s64 i, atomic64_t *v) | |
943 | { | |
e75a6795 | 944 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
945 | return arch_atomic64_fetch_add_acquire(i, v); |
946 | } | |
947 | #define atomic64_fetch_add_acquire atomic64_fetch_add_acquire | |
948 | #endif | |
949 | ||
950 | #if defined(arch_atomic64_fetch_add_release) | |
951 | static inline s64 | |
952 | atomic64_fetch_add_release(s64 i, atomic64_t *v) | |
953 | { | |
e75a6795 | 954 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
955 | return arch_atomic64_fetch_add_release(i, v); |
956 | } | |
957 | #define atomic64_fetch_add_release atomic64_fetch_add_release | |
958 | #endif | |
959 | ||
960 | #if defined(arch_atomic64_fetch_add_relaxed) | |
961 | static inline s64 | |
962 | atomic64_fetch_add_relaxed(s64 i, atomic64_t *v) | |
963 | { | |
e75a6795 | 964 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
965 | return arch_atomic64_fetch_add_relaxed(i, v); |
966 | } | |
967 | #define atomic64_fetch_add_relaxed atomic64_fetch_add_relaxed | |
968 | #endif | |
969 | ||
970 | static inline void | |
971 | atomic64_sub(s64 i, atomic64_t *v) | |
972 | { | |
e75a6795 | 973 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
974 | arch_atomic64_sub(i, v); |
975 | } | |
976 | #define atomic64_sub atomic64_sub | |
ac605bee | 977 | |
aa525d06 MR |
978 | #if !defined(arch_atomic64_sub_return_relaxed) || defined(arch_atomic64_sub_return) |
979 | static inline s64 | |
980 | atomic64_sub_return(s64 i, atomic64_t *v) | |
981 | { | |
e75a6795 | 982 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
983 | return arch_atomic64_sub_return(i, v); |
984 | } | |
985 | #define atomic64_sub_return atomic64_sub_return | |
986 | #endif | |
b06ed71a | 987 | |
aa525d06 MR |
988 | #if defined(arch_atomic64_sub_return_acquire) |
989 | static inline s64 | |
990 | atomic64_sub_return_acquire(s64 i, atomic64_t *v) | |
991 | { | |
e75a6795 | 992 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
993 | return arch_atomic64_sub_return_acquire(i, v); |
994 | } | |
995 | #define atomic64_sub_return_acquire atomic64_sub_return_acquire | |
996 | #endif | |
b06ed71a | 997 | |
aa525d06 MR |
998 | #if defined(arch_atomic64_sub_return_release) |
999 | static inline s64 | |
1000 | atomic64_sub_return_release(s64 i, atomic64_t *v) | |
b06ed71a | 1001 | { |
e75a6795 | 1002 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 | 1003 | return arch_atomic64_sub_return_release(i, v); |
b06ed71a | 1004 | } |
aa525d06 MR |
1005 | #define atomic64_sub_return_release atomic64_sub_return_release |
1006 | #endif | |
b06ed71a | 1007 | |
aa525d06 MR |
1008 | #if defined(arch_atomic64_sub_return_relaxed) |
1009 | static inline s64 | |
1010 | atomic64_sub_return_relaxed(s64 i, atomic64_t *v) | |
b06ed71a | 1011 | { |
e75a6795 | 1012 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 | 1013 | return arch_atomic64_sub_return_relaxed(i, v); |
b06ed71a | 1014 | } |
aa525d06 MR |
1015 | #define atomic64_sub_return_relaxed atomic64_sub_return_relaxed |
1016 | #endif | |
b06ed71a | 1017 | |
aa525d06 MR |
1018 | #if !defined(arch_atomic64_fetch_sub_relaxed) || defined(arch_atomic64_fetch_sub) |
1019 | static inline s64 | |
1020 | atomic64_fetch_sub(s64 i, atomic64_t *v) | |
b06ed71a | 1021 | { |
e75a6795 | 1022 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 | 1023 | return arch_atomic64_fetch_sub(i, v); |
b06ed71a | 1024 | } |
aa525d06 MR |
1025 | #define atomic64_fetch_sub atomic64_fetch_sub |
1026 | #endif | |
b06ed71a | 1027 | |
aa525d06 MR |
1028 | #if defined(arch_atomic64_fetch_sub_acquire) |
1029 | static inline s64 | |
1030 | atomic64_fetch_sub_acquire(s64 i, atomic64_t *v) | |
b06ed71a | 1031 | { |
e75a6795 | 1032 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 | 1033 | return arch_atomic64_fetch_sub_acquire(i, v); |
b06ed71a | 1034 | } |
aa525d06 MR |
1035 | #define atomic64_fetch_sub_acquire atomic64_fetch_sub_acquire |
1036 | #endif | |
b06ed71a | 1037 | |
aa525d06 MR |
1038 | #if defined(arch_atomic64_fetch_sub_release) |
1039 | static inline s64 | |
1040 | atomic64_fetch_sub_release(s64 i, atomic64_t *v) | |
b06ed71a | 1041 | { |
e75a6795 | 1042 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 | 1043 | return arch_atomic64_fetch_sub_release(i, v); |
b06ed71a | 1044 | } |
aa525d06 MR |
1045 | #define atomic64_fetch_sub_release atomic64_fetch_sub_release |
1046 | #endif | |
b06ed71a | 1047 | |
aa525d06 MR |
1048 | #if defined(arch_atomic64_fetch_sub_relaxed) |
1049 | static inline s64 | |
1050 | atomic64_fetch_sub_relaxed(s64 i, atomic64_t *v) | |
b06ed71a | 1051 | { |
e75a6795 | 1052 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 | 1053 | return arch_atomic64_fetch_sub_relaxed(i, v); |
b06ed71a | 1054 | } |
aa525d06 MR |
1055 | #define atomic64_fetch_sub_relaxed atomic64_fetch_sub_relaxed |
1056 | #endif | |
b06ed71a | 1057 | |
aa525d06 MR |
1058 | #if defined(arch_atomic64_inc) |
1059 | static inline void | |
1060 | atomic64_inc(atomic64_t *v) | |
b06ed71a | 1061 | { |
e75a6795 | 1062 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 | 1063 | arch_atomic64_inc(v); |
b06ed71a | 1064 | } |
aa525d06 MR |
1065 | #define atomic64_inc atomic64_inc |
1066 | #endif | |
b06ed71a | 1067 | |
aa525d06 MR |
1068 | #if defined(arch_atomic64_inc_return) |
1069 | static inline s64 | |
1070 | atomic64_inc_return(atomic64_t *v) | |
b06ed71a | 1071 | { |
e75a6795 | 1072 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 | 1073 | return arch_atomic64_inc_return(v); |
b06ed71a | 1074 | } |
aa525d06 MR |
1075 | #define atomic64_inc_return atomic64_inc_return |
1076 | #endif | |
b06ed71a | 1077 | |
aa525d06 MR |
1078 | #if defined(arch_atomic64_inc_return_acquire) |
1079 | static inline s64 | |
1080 | atomic64_inc_return_acquire(atomic64_t *v) | |
b06ed71a | 1081 | { |
e75a6795 | 1082 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 | 1083 | return arch_atomic64_inc_return_acquire(v); |
b06ed71a | 1084 | } |
aa525d06 | 1085 | #define atomic64_inc_return_acquire atomic64_inc_return_acquire |
b06ed71a DV |
1086 | #endif |
1087 | ||
aa525d06 MR |
1088 | #if defined(arch_atomic64_inc_return_release) |
1089 | static inline s64 | |
1090 | atomic64_inc_return_release(atomic64_t *v) | |
b06ed71a | 1091 | { |
e75a6795 | 1092 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 | 1093 | return arch_atomic64_inc_return_release(v); |
b06ed71a | 1094 | } |
aa525d06 | 1095 | #define atomic64_inc_return_release atomic64_inc_return_release |
b06ed71a DV |
1096 | #endif |
1097 | ||
aa525d06 MR |
1098 | #if defined(arch_atomic64_inc_return_relaxed) |
1099 | static inline s64 | |
1100 | atomic64_inc_return_relaxed(atomic64_t *v) | |
b06ed71a | 1101 | { |
e75a6795 | 1102 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 | 1103 | return arch_atomic64_inc_return_relaxed(v); |
b06ed71a | 1104 | } |
aa525d06 | 1105 | #define atomic64_inc_return_relaxed atomic64_inc_return_relaxed |
eccc2da8 | 1106 | #endif |
b06ed71a | 1107 | |
aa525d06 MR |
1108 | #if defined(arch_atomic64_fetch_inc) |
1109 | static inline s64 | |
1110 | atomic64_fetch_inc(atomic64_t *v) | |
0ae1d994 | 1111 | { |
e75a6795 | 1112 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 | 1113 | return arch_atomic64_fetch_inc(v); |
0ae1d994 | 1114 | } |
aa525d06 | 1115 | #define atomic64_fetch_inc atomic64_fetch_inc |
0ae1d994 | 1116 | #endif |
b06ed71a | 1117 | |
aa525d06 MR |
1118 | #if defined(arch_atomic64_fetch_inc_acquire) |
1119 | static inline s64 | |
1120 | atomic64_fetch_inc_acquire(atomic64_t *v) | |
b06ed71a | 1121 | { |
e75a6795 | 1122 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 | 1123 | return arch_atomic64_fetch_inc_acquire(v); |
b06ed71a | 1124 | } |
aa525d06 | 1125 | #define atomic64_fetch_inc_acquire atomic64_fetch_inc_acquire |
9837559d | 1126 | #endif |
b06ed71a | 1127 | |
aa525d06 MR |
1128 | #if defined(arch_atomic64_fetch_inc_release) |
1129 | static inline s64 | |
1130 | atomic64_fetch_inc_release(atomic64_t *v) | |
b06ed71a | 1131 | { |
e75a6795 | 1132 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 | 1133 | return arch_atomic64_fetch_inc_release(v); |
b06ed71a | 1134 | } |
aa525d06 | 1135 | #define atomic64_fetch_inc_release atomic64_fetch_inc_release |
9837559d | 1136 | #endif |
b06ed71a | 1137 | |
aa525d06 MR |
1138 | #if defined(arch_atomic64_fetch_inc_relaxed) |
1139 | static inline s64 | |
1140 | atomic64_fetch_inc_relaxed(atomic64_t *v) | |
b06ed71a | 1141 | { |
e75a6795 | 1142 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 | 1143 | return arch_atomic64_fetch_inc_relaxed(v); |
b06ed71a | 1144 | } |
aa525d06 | 1145 | #define atomic64_fetch_inc_relaxed atomic64_fetch_inc_relaxed |
9837559d | 1146 | #endif |
b06ed71a | 1147 | |
aa525d06 MR |
1148 | #if defined(arch_atomic64_dec) |
1149 | static inline void | |
1150 | atomic64_dec(atomic64_t *v) | |
b06ed71a | 1151 | { |
e75a6795 | 1152 | __atomic_check_write(v, sizeof(*v)); |
b06ed71a DV |
1153 | arch_atomic64_dec(v); |
1154 | } | |
aa525d06 | 1155 | #define atomic64_dec atomic64_dec |
9837559d | 1156 | #endif |
b06ed71a | 1157 | |
aa525d06 MR |
1158 | #if defined(arch_atomic64_dec_return) |
1159 | static inline s64 | |
1160 | atomic64_dec_return(atomic64_t *v) | |
b06ed71a | 1161 | { |
e75a6795 | 1162 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 | 1163 | return arch_atomic64_dec_return(v); |
b06ed71a | 1164 | } |
aa525d06 MR |
1165 | #define atomic64_dec_return atomic64_dec_return |
1166 | #endif | |
b06ed71a | 1167 | |
aa525d06 MR |
1168 | #if defined(arch_atomic64_dec_return_acquire) |
1169 | static inline s64 | |
1170 | atomic64_dec_return_acquire(atomic64_t *v) | |
b06ed71a | 1171 | { |
e75a6795 | 1172 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 | 1173 | return arch_atomic64_dec_return_acquire(v); |
b06ed71a | 1174 | } |
aa525d06 MR |
1175 | #define atomic64_dec_return_acquire atomic64_dec_return_acquire |
1176 | #endif | |
b06ed71a | 1177 | |
aa525d06 MR |
1178 | #if defined(arch_atomic64_dec_return_release) |
1179 | static inline s64 | |
1180 | atomic64_dec_return_release(atomic64_t *v) | |
b06ed71a | 1181 | { |
e75a6795 | 1182 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 | 1183 | return arch_atomic64_dec_return_release(v); |
b06ed71a | 1184 | } |
aa525d06 MR |
1185 | #define atomic64_dec_return_release atomic64_dec_return_release |
1186 | #endif | |
b06ed71a | 1187 | |
aa525d06 MR |
1188 | #if defined(arch_atomic64_dec_return_relaxed) |
1189 | static inline s64 | |
1190 | atomic64_dec_return_relaxed(atomic64_t *v) | |
b06ed71a | 1191 | { |
e75a6795 | 1192 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 | 1193 | return arch_atomic64_dec_return_relaxed(v); |
b06ed71a | 1194 | } |
aa525d06 MR |
1195 | #define atomic64_dec_return_relaxed atomic64_dec_return_relaxed |
1196 | #endif | |
b06ed71a | 1197 | |
aa525d06 MR |
1198 | #if defined(arch_atomic64_fetch_dec) |
1199 | static inline s64 | |
1200 | atomic64_fetch_dec(atomic64_t *v) | |
b06ed71a | 1201 | { |
e75a6795 | 1202 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
1203 | return arch_atomic64_fetch_dec(v); |
1204 | } | |
1205 | #define atomic64_fetch_dec atomic64_fetch_dec | |
1206 | #endif | |
1207 | ||
1208 | #if defined(arch_atomic64_fetch_dec_acquire) | |
1209 | static inline s64 | |
1210 | atomic64_fetch_dec_acquire(atomic64_t *v) | |
1211 | { | |
e75a6795 | 1212 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
1213 | return arch_atomic64_fetch_dec_acquire(v); |
1214 | } | |
1215 | #define atomic64_fetch_dec_acquire atomic64_fetch_dec_acquire | |
1216 | #endif | |
1217 | ||
1218 | #if defined(arch_atomic64_fetch_dec_release) | |
1219 | static inline s64 | |
1220 | atomic64_fetch_dec_release(atomic64_t *v) | |
1221 | { | |
e75a6795 | 1222 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 | 1223 | return arch_atomic64_fetch_dec_release(v); |
b06ed71a | 1224 | } |
aa525d06 MR |
1225 | #define atomic64_fetch_dec_release atomic64_fetch_dec_release |
1226 | #endif | |
1227 | ||
1228 | #if defined(arch_atomic64_fetch_dec_relaxed) | |
1229 | static inline s64 | |
1230 | atomic64_fetch_dec_relaxed(atomic64_t *v) | |
1231 | { | |
e75a6795 | 1232 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
1233 | return arch_atomic64_fetch_dec_relaxed(v); |
1234 | } | |
1235 | #define atomic64_fetch_dec_relaxed atomic64_fetch_dec_relaxed | |
1236 | #endif | |
b06ed71a | 1237 | |
aa525d06 MR |
1238 | static inline void |
1239 | atomic64_and(s64 i, atomic64_t *v) | |
b06ed71a | 1240 | { |
e75a6795 | 1241 | __atomic_check_write(v, sizeof(*v)); |
b06ed71a DV |
1242 | arch_atomic64_and(i, v); |
1243 | } | |
aa525d06 | 1244 | #define atomic64_and atomic64_and |
b06ed71a | 1245 | |
aa525d06 MR |
1246 | #if !defined(arch_atomic64_fetch_and_relaxed) || defined(arch_atomic64_fetch_and) |
1247 | static inline s64 | |
1248 | atomic64_fetch_and(s64 i, atomic64_t *v) | |
b06ed71a | 1249 | { |
e75a6795 | 1250 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 | 1251 | return arch_atomic64_fetch_and(i, v); |
b06ed71a | 1252 | } |
aa525d06 MR |
1253 | #define atomic64_fetch_and atomic64_fetch_and |
1254 | #endif | |
b06ed71a | 1255 | |
aa525d06 MR |
1256 | #if defined(arch_atomic64_fetch_and_acquire) |
1257 | static inline s64 | |
1258 | atomic64_fetch_and_acquire(s64 i, atomic64_t *v) | |
b06ed71a | 1259 | { |
e75a6795 | 1260 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 | 1261 | return arch_atomic64_fetch_and_acquire(i, v); |
b06ed71a | 1262 | } |
aa525d06 MR |
1263 | #define atomic64_fetch_and_acquire atomic64_fetch_and_acquire |
1264 | #endif | |
b06ed71a | 1265 | |
aa525d06 MR |
1266 | #if defined(arch_atomic64_fetch_and_release) |
1267 | static inline s64 | |
1268 | atomic64_fetch_and_release(s64 i, atomic64_t *v) | |
b06ed71a | 1269 | { |
e75a6795 | 1270 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 | 1271 | return arch_atomic64_fetch_and_release(i, v); |
b06ed71a | 1272 | } |
aa525d06 MR |
1273 | #define atomic64_fetch_and_release atomic64_fetch_and_release |
1274 | #endif | |
b06ed71a | 1275 | |
aa525d06 MR |
1276 | #if defined(arch_atomic64_fetch_and_relaxed) |
1277 | static inline s64 | |
1278 | atomic64_fetch_and_relaxed(s64 i, atomic64_t *v) | |
b06ed71a | 1279 | { |
e75a6795 | 1280 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 | 1281 | return arch_atomic64_fetch_and_relaxed(i, v); |
b06ed71a | 1282 | } |
aa525d06 MR |
1283 | #define atomic64_fetch_and_relaxed atomic64_fetch_and_relaxed |
1284 | #endif | |
b06ed71a | 1285 | |
aa525d06 MR |
1286 | #if defined(arch_atomic64_andnot) |
1287 | static inline void | |
1288 | atomic64_andnot(s64 i, atomic64_t *v) | |
b06ed71a | 1289 | { |
e75a6795 | 1290 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 | 1291 | arch_atomic64_andnot(i, v); |
b06ed71a | 1292 | } |
aa525d06 | 1293 | #define atomic64_andnot atomic64_andnot |
9837559d | 1294 | #endif |
b06ed71a | 1295 | |
aa525d06 MR |
1296 | #if defined(arch_atomic64_fetch_andnot) |
1297 | static inline s64 | |
1298 | atomic64_fetch_andnot(s64 i, atomic64_t *v) | |
b06ed71a | 1299 | { |
e75a6795 | 1300 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 | 1301 | return arch_atomic64_fetch_andnot(i, v); |
b06ed71a | 1302 | } |
aa525d06 | 1303 | #define atomic64_fetch_andnot atomic64_fetch_andnot |
9837559d | 1304 | #endif |
b06ed71a | 1305 | |
aa525d06 MR |
1306 | #if defined(arch_atomic64_fetch_andnot_acquire) |
1307 | static inline s64 | |
1308 | atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v) | |
b06ed71a | 1309 | { |
e75a6795 | 1310 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 | 1311 | return arch_atomic64_fetch_andnot_acquire(i, v); |
b06ed71a | 1312 | } |
aa525d06 | 1313 | #define atomic64_fetch_andnot_acquire atomic64_fetch_andnot_acquire |
9837559d | 1314 | #endif |
b06ed71a | 1315 | |
aa525d06 MR |
1316 | #if defined(arch_atomic64_fetch_andnot_release) |
1317 | static inline s64 | |
1318 | atomic64_fetch_andnot_release(s64 i, atomic64_t *v) | |
b06ed71a | 1319 | { |
e75a6795 | 1320 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 | 1321 | return arch_atomic64_fetch_andnot_release(i, v); |
b06ed71a | 1322 | } |
aa525d06 | 1323 | #define atomic64_fetch_andnot_release atomic64_fetch_andnot_release |
9837559d | 1324 | #endif |
b06ed71a | 1325 | |
aa525d06 MR |
1326 | #if defined(arch_atomic64_fetch_andnot_relaxed) |
1327 | static inline s64 | |
1328 | atomic64_fetch_andnot_relaxed(s64 i, atomic64_t *v) | |
b06ed71a | 1329 | { |
e75a6795 | 1330 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 | 1331 | return arch_atomic64_fetch_andnot_relaxed(i, v); |
b06ed71a | 1332 | } |
aa525d06 | 1333 | #define atomic64_fetch_andnot_relaxed atomic64_fetch_andnot_relaxed |
bef82820 | 1334 | #endif |
b06ed71a | 1335 | |
aa525d06 MR |
1336 | static inline void |
1337 | atomic64_or(s64 i, atomic64_t *v) | |
b06ed71a | 1338 | { |
e75a6795 | 1339 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
1340 | arch_atomic64_or(i, v); |
1341 | } | |
1342 | #define atomic64_or atomic64_or | |
1343 | ||
1344 | #if !defined(arch_atomic64_fetch_or_relaxed) || defined(arch_atomic64_fetch_or) | |
1345 | static inline s64 | |
1346 | atomic64_fetch_or(s64 i, atomic64_t *v) | |
1347 | { | |
e75a6795 | 1348 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 | 1349 | return arch_atomic64_fetch_or(i, v); |
b06ed71a | 1350 | } |
aa525d06 | 1351 | #define atomic64_fetch_or atomic64_fetch_or |
b3a2a05f | 1352 | #endif |
b06ed71a | 1353 | |
aa525d06 MR |
1354 | #if defined(arch_atomic64_fetch_or_acquire) |
1355 | static inline s64 | |
1356 | atomic64_fetch_or_acquire(s64 i, atomic64_t *v) | |
b06ed71a | 1357 | { |
e75a6795 | 1358 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 | 1359 | return arch_atomic64_fetch_or_acquire(i, v); |
b06ed71a | 1360 | } |
aa525d06 | 1361 | #define atomic64_fetch_or_acquire atomic64_fetch_or_acquire |
18cc1814 | 1362 | #endif |
b06ed71a | 1363 | |
aa525d06 MR |
1364 | #if defined(arch_atomic64_fetch_or_release) |
1365 | static inline s64 | |
1366 | atomic64_fetch_or_release(s64 i, atomic64_t *v) | |
b06ed71a | 1367 | { |
e75a6795 | 1368 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 | 1369 | return arch_atomic64_fetch_or_release(i, v); |
b06ed71a | 1370 | } |
aa525d06 | 1371 | #define atomic64_fetch_or_release atomic64_fetch_or_release |
18cc1814 | 1372 | #endif |
b06ed71a | 1373 | |
aa525d06 MR |
1374 | #if defined(arch_atomic64_fetch_or_relaxed) |
1375 | static inline s64 | |
1376 | atomic64_fetch_or_relaxed(s64 i, atomic64_t *v) | |
b06ed71a | 1377 | { |
e75a6795 | 1378 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 | 1379 | return arch_atomic64_fetch_or_relaxed(i, v); |
b06ed71a | 1380 | } |
aa525d06 | 1381 | #define atomic64_fetch_or_relaxed atomic64_fetch_or_relaxed |
18cc1814 | 1382 | #endif |
b06ed71a | 1383 | |
aa525d06 MR |
1384 | static inline void |
1385 | atomic64_xor(s64 i, atomic64_t *v) | |
b06ed71a | 1386 | { |
e75a6795 | 1387 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
1388 | arch_atomic64_xor(i, v); |
1389 | } | |
1390 | #define atomic64_xor atomic64_xor | |
1391 | ||
1392 | #if !defined(arch_atomic64_fetch_xor_relaxed) || defined(arch_atomic64_fetch_xor) | |
1393 | static inline s64 | |
1394 | atomic64_fetch_xor(s64 i, atomic64_t *v) | |
1395 | { | |
e75a6795 | 1396 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 | 1397 | return arch_atomic64_fetch_xor(i, v); |
b06ed71a | 1398 | } |
aa525d06 | 1399 | #define atomic64_fetch_xor atomic64_fetch_xor |
18cc1814 | 1400 | #endif |
b06ed71a | 1401 | |
aa525d06 MR |
1402 | #if defined(arch_atomic64_fetch_xor_acquire) |
1403 | static inline s64 | |
1404 | atomic64_fetch_xor_acquire(s64 i, atomic64_t *v) | |
b06ed71a | 1405 | { |
e75a6795 | 1406 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 | 1407 | return arch_atomic64_fetch_xor_acquire(i, v); |
b06ed71a | 1408 | } |
aa525d06 MR |
1409 | #define atomic64_fetch_xor_acquire atomic64_fetch_xor_acquire |
1410 | #endif | |
b06ed71a | 1411 | |
aa525d06 MR |
1412 | #if defined(arch_atomic64_fetch_xor_release) |
1413 | static inline s64 | |
1414 | atomic64_fetch_xor_release(s64 i, atomic64_t *v) | |
b06ed71a | 1415 | { |
e75a6795 | 1416 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 | 1417 | return arch_atomic64_fetch_xor_release(i, v); |
b06ed71a | 1418 | } |
aa525d06 MR |
1419 | #define atomic64_fetch_xor_release atomic64_fetch_xor_release |
1420 | #endif | |
b06ed71a | 1421 | |
aa525d06 MR |
1422 | #if defined(arch_atomic64_fetch_xor_relaxed) |
1423 | static inline s64 | |
1424 | atomic64_fetch_xor_relaxed(s64 i, atomic64_t *v) | |
b06ed71a | 1425 | { |
e75a6795 | 1426 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 | 1427 | return arch_atomic64_fetch_xor_relaxed(i, v); |
b06ed71a | 1428 | } |
aa525d06 MR |
1429 | #define atomic64_fetch_xor_relaxed atomic64_fetch_xor_relaxed |
1430 | #endif | |
b06ed71a | 1431 | |
aa525d06 MR |
1432 | #if !defined(arch_atomic64_xchg_relaxed) || defined(arch_atomic64_xchg) |
1433 | static inline s64 | |
1434 | atomic64_xchg(atomic64_t *v, s64 i) | |
b06ed71a | 1435 | { |
e75a6795 | 1436 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 | 1437 | return arch_atomic64_xchg(v, i); |
b06ed71a | 1438 | } |
aa525d06 MR |
1439 | #define atomic64_xchg atomic64_xchg |
1440 | #endif | |
b06ed71a | 1441 | |
aa525d06 MR |
1442 | #if defined(arch_atomic64_xchg_acquire) |
1443 | static inline s64 | |
1444 | atomic64_xchg_acquire(atomic64_t *v, s64 i) | |
b06ed71a | 1445 | { |
e75a6795 | 1446 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 | 1447 | return arch_atomic64_xchg_acquire(v, i); |
b06ed71a | 1448 | } |
aa525d06 MR |
1449 | #define atomic64_xchg_acquire atomic64_xchg_acquire |
1450 | #endif | |
b06ed71a | 1451 | |
aa525d06 MR |
1452 | #if defined(arch_atomic64_xchg_release) |
1453 | static inline s64 | |
1454 | atomic64_xchg_release(atomic64_t *v, s64 i) | |
b06ed71a | 1455 | { |
e75a6795 | 1456 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 | 1457 | return arch_atomic64_xchg_release(v, i); |
b06ed71a | 1458 | } |
aa525d06 MR |
1459 | #define atomic64_xchg_release atomic64_xchg_release |
1460 | #endif | |
b06ed71a | 1461 | |
aa525d06 MR |
1462 | #if defined(arch_atomic64_xchg_relaxed) |
1463 | static inline s64 | |
1464 | atomic64_xchg_relaxed(atomic64_t *v, s64 i) | |
b06ed71a | 1465 | { |
e75a6795 | 1466 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 | 1467 | return arch_atomic64_xchg_relaxed(v, i); |
b06ed71a | 1468 | } |
aa525d06 MR |
1469 | #define atomic64_xchg_relaxed atomic64_xchg_relaxed |
1470 | #endif | |
b06ed71a | 1471 | |
aa525d06 MR |
1472 | #if !defined(arch_atomic64_cmpxchg_relaxed) || defined(arch_atomic64_cmpxchg) |
1473 | static inline s64 | |
1474 | atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new) | |
b06ed71a | 1475 | { |
e75a6795 | 1476 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 | 1477 | return arch_atomic64_cmpxchg(v, old, new); |
b06ed71a | 1478 | } |
aa525d06 MR |
1479 | #define atomic64_cmpxchg atomic64_cmpxchg |
1480 | #endif | |
b06ed71a | 1481 | |
aa525d06 MR |
1482 | #if defined(arch_atomic64_cmpxchg_acquire) |
1483 | static inline s64 | |
1484 | atomic64_cmpxchg_acquire(atomic64_t *v, s64 old, s64 new) | |
b06ed71a | 1485 | { |
e75a6795 | 1486 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 | 1487 | return arch_atomic64_cmpxchg_acquire(v, old, new); |
b06ed71a | 1488 | } |
aa525d06 MR |
1489 | #define atomic64_cmpxchg_acquire atomic64_cmpxchg_acquire |
1490 | #endif | |
b06ed71a | 1491 | |
aa525d06 MR |
1492 | #if defined(arch_atomic64_cmpxchg_release) |
1493 | static inline s64 | |
1494 | atomic64_cmpxchg_release(atomic64_t *v, s64 old, s64 new) | |
b06ed71a | 1495 | { |
e75a6795 | 1496 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 | 1497 | return arch_atomic64_cmpxchg_release(v, old, new); |
b06ed71a | 1498 | } |
aa525d06 MR |
1499 | #define atomic64_cmpxchg_release atomic64_cmpxchg_release |
1500 | #endif | |
b06ed71a | 1501 | |
aa525d06 MR |
1502 | #if defined(arch_atomic64_cmpxchg_relaxed) |
1503 | static inline s64 | |
1504 | atomic64_cmpxchg_relaxed(atomic64_t *v, s64 old, s64 new) | |
b06ed71a | 1505 | { |
e75a6795 | 1506 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 | 1507 | return arch_atomic64_cmpxchg_relaxed(v, old, new); |
b06ed71a | 1508 | } |
aa525d06 MR |
1509 | #define atomic64_cmpxchg_relaxed atomic64_cmpxchg_relaxed |
1510 | #endif | |
b06ed71a | 1511 | |
aa525d06 MR |
1512 | #if defined(arch_atomic64_try_cmpxchg) |
1513 | static inline bool | |
1514 | atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new) | |
b06ed71a | 1515 | { |
e75a6795 ME |
1516 | __atomic_check_write(v, sizeof(*v)); |
1517 | __atomic_check_write(old, sizeof(*old)); | |
aa525d06 | 1518 | return arch_atomic64_try_cmpxchg(v, old, new); |
b06ed71a | 1519 | } |
aa525d06 MR |
1520 | #define atomic64_try_cmpxchg atomic64_try_cmpxchg |
1521 | #endif | |
b06ed71a | 1522 | |
aa525d06 MR |
1523 | #if defined(arch_atomic64_try_cmpxchg_acquire) |
1524 | static inline bool | |
1525 | atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new) | |
b06ed71a | 1526 | { |
e75a6795 ME |
1527 | __atomic_check_write(v, sizeof(*v)); |
1528 | __atomic_check_write(old, sizeof(*old)); | |
aa525d06 | 1529 | return arch_atomic64_try_cmpxchg_acquire(v, old, new); |
b06ed71a | 1530 | } |
aa525d06 MR |
1531 | #define atomic64_try_cmpxchg_acquire atomic64_try_cmpxchg_acquire |
1532 | #endif | |
b06ed71a | 1533 | |
aa525d06 MR |
1534 | #if defined(arch_atomic64_try_cmpxchg_release) |
1535 | static inline bool | |
1536 | atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new) | |
b06ed71a | 1537 | { |
e75a6795 ME |
1538 | __atomic_check_write(v, sizeof(*v)); |
1539 | __atomic_check_write(old, sizeof(*old)); | |
aa525d06 | 1540 | return arch_atomic64_try_cmpxchg_release(v, old, new); |
b06ed71a | 1541 | } |
aa525d06 MR |
1542 | #define atomic64_try_cmpxchg_release atomic64_try_cmpxchg_release |
1543 | #endif | |
b06ed71a | 1544 | |
aa525d06 MR |
1545 | #if defined(arch_atomic64_try_cmpxchg_relaxed) |
1546 | static inline bool | |
1547 | atomic64_try_cmpxchg_relaxed(atomic64_t *v, s64 *old, s64 new) | |
b06ed71a | 1548 | { |
e75a6795 ME |
1549 | __atomic_check_write(v, sizeof(*v)); |
1550 | __atomic_check_write(old, sizeof(*old)); | |
aa525d06 | 1551 | return arch_atomic64_try_cmpxchg_relaxed(v, old, new); |
b06ed71a | 1552 | } |
aa525d06 | 1553 | #define atomic64_try_cmpxchg_relaxed atomic64_try_cmpxchg_relaxed |
18cc1814 | 1554 | #endif |
b06ed71a | 1555 | |
aa525d06 MR |
1556 | #if defined(arch_atomic64_sub_and_test) |
1557 | static inline bool | |
1558 | atomic64_sub_and_test(s64 i, atomic64_t *v) | |
b06ed71a | 1559 | { |
e75a6795 | 1560 | __atomic_check_write(v, sizeof(*v)); |
b06ed71a DV |
1561 | return arch_atomic64_sub_and_test(i, v); |
1562 | } | |
aa525d06 | 1563 | #define atomic64_sub_and_test atomic64_sub_and_test |
18cc1814 | 1564 | #endif |
b06ed71a | 1565 | |
aa525d06 MR |
1566 | #if defined(arch_atomic64_dec_and_test) |
1567 | static inline bool | |
1568 | atomic64_dec_and_test(atomic64_t *v) | |
b06ed71a | 1569 | { |
e75a6795 | 1570 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 | 1571 | return arch_atomic64_dec_and_test(v); |
b06ed71a | 1572 | } |
aa525d06 | 1573 | #define atomic64_dec_and_test atomic64_dec_and_test |
18cc1814 | 1574 | #endif |
b06ed71a | 1575 | |
aa525d06 MR |
1576 | #if defined(arch_atomic64_inc_and_test) |
1577 | static inline bool | |
1578 | atomic64_inc_and_test(atomic64_t *v) | |
1579 | { | |
e75a6795 | 1580 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
1581 | return arch_atomic64_inc_and_test(v); |
1582 | } | |
1583 | #define atomic64_inc_and_test atomic64_inc_and_test | |
1584 | #endif | |
1585 | ||
1586 | #if defined(arch_atomic64_add_negative) | |
1587 | static inline bool | |
1588 | atomic64_add_negative(s64 i, atomic64_t *v) | |
b06ed71a | 1589 | { |
e75a6795 | 1590 | __atomic_check_write(v, sizeof(*v)); |
b06ed71a DV |
1591 | return arch_atomic64_add_negative(i, v); |
1592 | } | |
aa525d06 MR |
1593 | #define atomic64_add_negative atomic64_add_negative |
1594 | #endif | |
1595 | ||
1596 | #if defined(arch_atomic64_fetch_add_unless) | |
1597 | static inline s64 | |
1598 | atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u) | |
1599 | { | |
e75a6795 | 1600 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
1601 | return arch_atomic64_fetch_add_unless(v, a, u); |
1602 | } | |
1603 | #define atomic64_fetch_add_unless atomic64_fetch_add_unless | |
1604 | #endif | |
1605 | ||
1606 | #if defined(arch_atomic64_add_unless) | |
1607 | static inline bool | |
1608 | atomic64_add_unless(atomic64_t *v, s64 a, s64 u) | |
1609 | { | |
e75a6795 | 1610 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
1611 | return arch_atomic64_add_unless(v, a, u); |
1612 | } | |
1613 | #define atomic64_add_unless atomic64_add_unless | |
1614 | #endif | |
1615 | ||
1616 | #if defined(arch_atomic64_inc_not_zero) | |
1617 | static inline bool | |
1618 | atomic64_inc_not_zero(atomic64_t *v) | |
1619 | { | |
e75a6795 | 1620 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
1621 | return arch_atomic64_inc_not_zero(v); |
1622 | } | |
1623 | #define atomic64_inc_not_zero atomic64_inc_not_zero | |
1624 | #endif | |
1625 | ||
1626 | #if defined(arch_atomic64_inc_unless_negative) | |
1627 | static inline bool | |
1628 | atomic64_inc_unless_negative(atomic64_t *v) | |
1629 | { | |
e75a6795 | 1630 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
1631 | return arch_atomic64_inc_unless_negative(v); |
1632 | } | |
1633 | #define atomic64_inc_unless_negative atomic64_inc_unless_negative | |
18cc1814 | 1634 | #endif |
b06ed71a | 1635 | |
aa525d06 MR |
1636 | #if defined(arch_atomic64_dec_unless_positive) |
1637 | static inline bool | |
1638 | atomic64_dec_unless_positive(atomic64_t *v) | |
1639 | { | |
e75a6795 | 1640 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
1641 | return arch_atomic64_dec_unless_positive(v); |
1642 | } | |
1643 | #define atomic64_dec_unless_positive atomic64_dec_unless_positive | |
1644 | #endif | |
1645 | ||
1646 | #if defined(arch_atomic64_dec_if_positive) | |
1647 | static inline s64 | |
1648 | atomic64_dec_if_positive(atomic64_t *v) | |
1649 | { | |
e75a6795 | 1650 | __atomic_check_write(v, sizeof(*v)); |
aa525d06 MR |
1651 | return arch_atomic64_dec_if_positive(v); |
1652 | } | |
1653 | #define atomic64_dec_if_positive atomic64_dec_if_positive | |
1654 | #endif | |
1655 | ||
1656 | #if !defined(arch_xchg_relaxed) || defined(arch_xchg) | |
1657 | #define xchg(ptr, ...) \ | |
1658 | ({ \ | |
1659 | typeof(ptr) __ai_ptr = (ptr); \ | |
e75a6795 | 1660 | __atomic_check_write(__ai_ptr, sizeof(*__ai_ptr)); \ |
aa525d06 MR |
1661 | arch_xchg(__ai_ptr, __VA_ARGS__); \ |
1662 | }) | |
1663 | #endif | |
1664 | ||
1665 | #if defined(arch_xchg_acquire) | |
1666 | #define xchg_acquire(ptr, ...) \ | |
1667 | ({ \ | |
1668 | typeof(ptr) __ai_ptr = (ptr); \ | |
e75a6795 | 1669 | __atomic_check_write(__ai_ptr, sizeof(*__ai_ptr)); \ |
aa525d06 MR |
1670 | arch_xchg_acquire(__ai_ptr, __VA_ARGS__); \ |
1671 | }) | |
1672 | #endif | |
1673 | ||
1674 | #if defined(arch_xchg_release) | |
1675 | #define xchg_release(ptr, ...) \ | |
1676 | ({ \ | |
1677 | typeof(ptr) __ai_ptr = (ptr); \ | |
e75a6795 | 1678 | __atomic_check_write(__ai_ptr, sizeof(*__ai_ptr)); \ |
aa525d06 MR |
1679 | arch_xchg_release(__ai_ptr, __VA_ARGS__); \ |
1680 | }) | |
1681 | #endif | |
1682 | ||
1683 | #if defined(arch_xchg_relaxed) | |
1684 | #define xchg_relaxed(ptr, ...) \ | |
1685 | ({ \ | |
1686 | typeof(ptr) __ai_ptr = (ptr); \ | |
e75a6795 | 1687 | __atomic_check_write(__ai_ptr, sizeof(*__ai_ptr)); \ |
aa525d06 MR |
1688 | arch_xchg_relaxed(__ai_ptr, __VA_ARGS__); \ |
1689 | }) | |
1690 | #endif | |
1691 | ||
1692 | #if !defined(arch_cmpxchg_relaxed) || defined(arch_cmpxchg) | |
1693 | #define cmpxchg(ptr, ...) \ | |
f9881cc4 MR |
1694 | ({ \ |
1695 | typeof(ptr) __ai_ptr = (ptr); \ | |
e75a6795 | 1696 | __atomic_check_write(__ai_ptr, sizeof(*__ai_ptr)); \ |
aa525d06 | 1697 | arch_cmpxchg(__ai_ptr, __VA_ARGS__); \ |
f9881cc4 | 1698 | }) |
aa525d06 MR |
1699 | #endif |
1700 | ||
1701 | #if defined(arch_cmpxchg_acquire) | |
1702 | #define cmpxchg_acquire(ptr, ...) \ | |
1703 | ({ \ | |
1704 | typeof(ptr) __ai_ptr = (ptr); \ | |
e75a6795 | 1705 | __atomic_check_write(__ai_ptr, sizeof(*__ai_ptr)); \ |
aa525d06 MR |
1706 | arch_cmpxchg_acquire(__ai_ptr, __VA_ARGS__); \ |
1707 | }) | |
1708 | #endif | |
1709 | ||
1710 | #if defined(arch_cmpxchg_release) | |
1711 | #define cmpxchg_release(ptr, ...) \ | |
1712 | ({ \ | |
1713 | typeof(ptr) __ai_ptr = (ptr); \ | |
e75a6795 | 1714 | __atomic_check_write(__ai_ptr, sizeof(*__ai_ptr)); \ |
aa525d06 MR |
1715 | arch_cmpxchg_release(__ai_ptr, __VA_ARGS__); \ |
1716 | }) | |
1717 | #endif | |
1718 | ||
1719 | #if defined(arch_cmpxchg_relaxed) | |
1720 | #define cmpxchg_relaxed(ptr, ...) \ | |
1721 | ({ \ | |
1722 | typeof(ptr) __ai_ptr = (ptr); \ | |
e75a6795 | 1723 | __atomic_check_write(__ai_ptr, sizeof(*__ai_ptr)); \ |
aa525d06 MR |
1724 | arch_cmpxchg_relaxed(__ai_ptr, __VA_ARGS__); \ |
1725 | }) | |
1726 | #endif | |
f9881cc4 | 1727 | |
aa525d06 MR |
1728 | #if !defined(arch_cmpxchg64_relaxed) || defined(arch_cmpxchg64) |
1729 | #define cmpxchg64(ptr, ...) \ | |
b06ed71a | 1730 | ({ \ |
df79ed2c | 1731 | typeof(ptr) __ai_ptr = (ptr); \ |
e75a6795 | 1732 | __atomic_check_write(__ai_ptr, sizeof(*__ai_ptr)); \ |
aa525d06 | 1733 | arch_cmpxchg64(__ai_ptr, __VA_ARGS__); \ |
b06ed71a | 1734 | }) |
aa525d06 MR |
1735 | #endif |
1736 | ||
1737 | #if defined(arch_cmpxchg64_acquire) | |
1738 | #define cmpxchg64_acquire(ptr, ...) \ | |
1739 | ({ \ | |
1740 | typeof(ptr) __ai_ptr = (ptr); \ | |
e75a6795 | 1741 | __atomic_check_write(__ai_ptr, sizeof(*__ai_ptr)); \ |
aa525d06 MR |
1742 | arch_cmpxchg64_acquire(__ai_ptr, __VA_ARGS__); \ |
1743 | }) | |
1744 | #endif | |
1745 | ||
1746 | #if defined(arch_cmpxchg64_release) | |
1747 | #define cmpxchg64_release(ptr, ...) \ | |
1748 | ({ \ | |
1749 | typeof(ptr) __ai_ptr = (ptr); \ | |
e75a6795 | 1750 | __atomic_check_write(__ai_ptr, sizeof(*__ai_ptr)); \ |
aa525d06 MR |
1751 | arch_cmpxchg64_release(__ai_ptr, __VA_ARGS__); \ |
1752 | }) | |
1753 | #endif | |
b06ed71a | 1754 | |
aa525d06 MR |
1755 | #if defined(arch_cmpxchg64_relaxed) |
1756 | #define cmpxchg64_relaxed(ptr, ...) \ | |
b06ed71a | 1757 | ({ \ |
df79ed2c | 1758 | typeof(ptr) __ai_ptr = (ptr); \ |
e75a6795 | 1759 | __atomic_check_write(__ai_ptr, sizeof(*__ai_ptr)); \ |
aa525d06 | 1760 | arch_cmpxchg64_relaxed(__ai_ptr, __VA_ARGS__); \ |
b06ed71a | 1761 | }) |
aa525d06 | 1762 | #endif |
b06ed71a | 1763 | |
aa525d06 | 1764 | #define cmpxchg_local(ptr, ...) \ |
b06ed71a | 1765 | ({ \ |
df79ed2c | 1766 | typeof(ptr) __ai_ptr = (ptr); \ |
e75a6795 | 1767 | __atomic_check_write(__ai_ptr, sizeof(*__ai_ptr)); \ |
aa525d06 | 1768 | arch_cmpxchg_local(__ai_ptr, __VA_ARGS__); \ |
b06ed71a DV |
1769 | }) |
1770 | ||
aa525d06 | 1771 | #define cmpxchg64_local(ptr, ...) \ |
b06ed71a | 1772 | ({ \ |
df79ed2c | 1773 | typeof(ptr) __ai_ptr = (ptr); \ |
e75a6795 | 1774 | __atomic_check_write(__ai_ptr, sizeof(*__ai_ptr)); \ |
aa525d06 | 1775 | arch_cmpxchg64_local(__ai_ptr, __VA_ARGS__); \ |
b06ed71a DV |
1776 | }) |
1777 | ||
aa525d06 | 1778 | #define sync_cmpxchg(ptr, ...) \ |
b06ed71a | 1779 | ({ \ |
df79ed2c | 1780 | typeof(ptr) __ai_ptr = (ptr); \ |
e75a6795 | 1781 | __atomic_check_write(__ai_ptr, sizeof(*__ai_ptr)); \ |
aa525d06 | 1782 | arch_sync_cmpxchg(__ai_ptr, __VA_ARGS__); \ |
b06ed71a DV |
1783 | }) |
1784 | ||
aa525d06 | 1785 | #define cmpxchg_double(ptr, ...) \ |
b06ed71a | 1786 | ({ \ |
aa525d06 | 1787 | typeof(ptr) __ai_ptr = (ptr); \ |
e75a6795 | 1788 | __atomic_check_write(__ai_ptr, 2 * sizeof(*__ai_ptr)); \ |
aa525d06 | 1789 | arch_cmpxchg_double(__ai_ptr, __VA_ARGS__); \ |
b06ed71a DV |
1790 | }) |
1791 | ||
aa525d06 MR |
1792 | |
1793 | #define cmpxchg_double_local(ptr, ...) \ | |
1794 | ({ \ | |
1795 | typeof(ptr) __ai_ptr = (ptr); \ | |
e75a6795 | 1796 | __atomic_check_write(__ai_ptr, 2 * sizeof(*__ai_ptr)); \ |
aa525d06 | 1797 | arch_cmpxchg_double_local(__ai_ptr, __VA_ARGS__); \ |
b06ed71a DV |
1798 | }) |
1799 | ||
aa525d06 | 1800 | #endif /* _ASM_GENERIC_ATOMIC_INSTRUMENTED_H */ |
e75a6795 | 1801 | // beea41c2a0f2c69e4958ed71bf26f59740fa4b12 |