Commit | Line | Data |
---|---|---|
b4d0d230 | 1 | /* SPDX-License-Identifier: GPL-2.0-or-later */ |
3f7e212d | 2 | /* |
2609a195 MR |
3 | * Generic C implementation of atomic counter operations. Do not include in |
4 | * machine independent code. | |
acac43e2 | 5 | * |
3f7e212d AB |
6 | * Copyright (C) 2007 Red Hat, Inc. All Rights Reserved. |
7 | * Written by David Howells (dhowells@redhat.com) | |
3f7e212d AB |
8 | */ |
9 | #ifndef __ASM_GENERIC_ATOMIC_H | |
10 | #define __ASM_GENERIC_ATOMIC_H | |
11 | ||
34484277 | 12 | #include <asm/cmpxchg.h> |
febdbfe8 | 13 | #include <asm/barrier.h> |
34484277 | 14 | |
3f7e212d | 15 | #ifdef CONFIG_SMP |
560cb12a PZ |
16 | |
17 | /* we can build all atomic primitives from cmpxchg */ | |
18 | ||
19 | #define ATOMIC_OP(op, c_op) \ | |
f8b6455a | 20 | static inline void generic_atomic_##op(int i, atomic_t *v) \ |
560cb12a PZ |
21 | { \ |
22 | int c, old; \ | |
23 | \ | |
24 | c = v->counter; \ | |
3c188518 | 25 | while ((old = arch_cmpxchg(&v->counter, c, c c_op i)) != c) \ |
560cb12a PZ |
26 | c = old; \ |
27 | } | |
28 | ||
29 | #define ATOMIC_OP_RETURN(op, c_op) \ | |
f8b6455a | 30 | static inline int generic_atomic_##op##_return(int i, atomic_t *v) \ |
560cb12a PZ |
31 | { \ |
32 | int c, old; \ | |
33 | \ | |
34 | c = v->counter; \ | |
3c188518 | 35 | while ((old = arch_cmpxchg(&v->counter, c, c c_op i)) != c) \ |
560cb12a PZ |
36 | c = old; \ |
37 | \ | |
38 | return c c_op i; \ | |
39 | } | |
40 | ||
28aa2bda | 41 | #define ATOMIC_FETCH_OP(op, c_op) \ |
f8b6455a | 42 | static inline int generic_atomic_fetch_##op(int i, atomic_t *v) \ |
28aa2bda PZ |
43 | { \ |
44 | int c, old; \ | |
45 | \ | |
46 | c = v->counter; \ | |
3c188518 | 47 | while ((old = arch_cmpxchg(&v->counter, c, c c_op i)) != c) \ |
28aa2bda PZ |
48 | c = old; \ |
49 | \ | |
50 | return c; \ | |
51 | } | |
52 | ||
560cb12a PZ |
53 | #else |
54 | ||
55 | #include <linux/irqflags.h> | |
56 | ||
57 | #define ATOMIC_OP(op, c_op) \ | |
f8b6455a | 58 | static inline void generic_atomic_##op(int i, atomic_t *v) \ |
560cb12a PZ |
59 | { \ |
60 | unsigned long flags; \ | |
61 | \ | |
62 | raw_local_irq_save(flags); \ | |
63 | v->counter = v->counter c_op i; \ | |
64 | raw_local_irq_restore(flags); \ | |
65 | } | |
66 | ||
67 | #define ATOMIC_OP_RETURN(op, c_op) \ | |
f8b6455a | 68 | static inline int generic_atomic_##op##_return(int i, atomic_t *v) \ |
560cb12a PZ |
69 | { \ |
70 | unsigned long flags; \ | |
71 | int ret; \ | |
72 | \ | |
73 | raw_local_irq_save(flags); \ | |
74 | ret = (v->counter = v->counter c_op i); \ | |
75 | raw_local_irq_restore(flags); \ | |
76 | \ | |
77 | return ret; \ | |
78 | } | |
79 | ||
28aa2bda | 80 | #define ATOMIC_FETCH_OP(op, c_op) \ |
f8b6455a | 81 | static inline int generic_atomic_fetch_##op(int i, atomic_t *v) \ |
28aa2bda PZ |
82 | { \ |
83 | unsigned long flags; \ | |
84 | int ret; \ | |
85 | \ | |
86 | raw_local_irq_save(flags); \ | |
87 | ret = v->counter; \ | |
88 | v->counter = v->counter c_op i; \ | |
89 | raw_local_irq_restore(flags); \ | |
90 | \ | |
91 | return ret; \ | |
92 | } | |
93 | ||
560cb12a PZ |
94 | #endif /* CONFIG_SMP */ |
95 | ||
560cb12a | 96 | ATOMIC_OP_RETURN(add, +) |
560cb12a | 97 | ATOMIC_OP_RETURN(sub, -) |
560cb12a | 98 | |
28aa2bda | 99 | ATOMIC_FETCH_OP(add, +) |
28aa2bda | 100 | ATOMIC_FETCH_OP(sub, -) |
28aa2bda | 101 | ATOMIC_FETCH_OP(and, &) |
28aa2bda | 102 | ATOMIC_FETCH_OP(or, |) |
28aa2bda | 103 | ATOMIC_FETCH_OP(xor, ^) |
28aa2bda | 104 | |
d0e03218 MR |
105 | ATOMIC_OP(add, +) |
106 | ATOMIC_OP(sub, -) | |
560cb12a | 107 | ATOMIC_OP(and, &) |
560cb12a | 108 | ATOMIC_OP(or, |) |
e6942b7d | 109 | ATOMIC_OP(xor, ^) |
560cb12a | 110 | |
28aa2bda | 111 | #undef ATOMIC_FETCH_OP |
560cb12a PZ |
112 | #undef ATOMIC_OP_RETURN |
113 | #undef ATOMIC_OP | |
114 | ||
f8b6455a MR |
115 | #define arch_atomic_add_return generic_atomic_add_return |
116 | #define arch_atomic_sub_return generic_atomic_sub_return | |
117 | ||
118 | #define arch_atomic_fetch_add generic_atomic_fetch_add | |
119 | #define arch_atomic_fetch_sub generic_atomic_fetch_sub | |
120 | #define arch_atomic_fetch_and generic_atomic_fetch_and | |
121 | #define arch_atomic_fetch_or generic_atomic_fetch_or | |
122 | #define arch_atomic_fetch_xor generic_atomic_fetch_xor | |
123 | ||
124 | #define arch_atomic_add generic_atomic_add | |
125 | #define arch_atomic_sub generic_atomic_sub | |
126 | #define arch_atomic_and generic_atomic_and | |
127 | #define arch_atomic_or generic_atomic_or | |
128 | #define arch_atomic_xor generic_atomic_xor | |
129 | ||
130 | #define arch_atomic_read(v) READ_ONCE((v)->counter) | |
131 | #define arch_atomic_set(v, i) WRITE_ONCE(((v)->counter), (i)) | |
132 | ||
656e9007 AB |
133 | #define arch_atomic_xchg(ptr, v) (arch_xchg(&(ptr)->counter, (u32)(v))) |
134 | #define arch_atomic_cmpxchg(v, old, new) (arch_cmpxchg(&((v)->counter), (u32)(old), (u32)(new))) | |
f8b6455a | 135 | |
3f7e212d | 136 | #endif /* __ASM_GENERIC_ATOMIC_H */ |