Commit | Line | Data |
---|---|---|
b2441318 | 1 | // SPDX-License-Identifier: GPL-2.0 |
1da177e4 LT |
2 | /* |
3 | * atomic32.c: 32-bit atomic_t implementation | |
4 | * | |
5 | * Copyright (C) 2004 Keith M Wesolowski | |
6197fe4d | 6 | * Copyright (C) 2007 Kyle McMartin |
1da177e4 LT |
7 | * |
8 | * Based on asm-parisc/atomic.h Copyright (C) 2000 Philipp Rumpf | |
9 | */ | |
10 | ||
60063497 | 11 | #include <linux/atomic.h> |
1da177e4 LT |
12 | #include <linux/spinlock.h> |
13 | #include <linux/module.h> | |
14 | ||
15 | #ifdef CONFIG_SMP | |
16 | #define ATOMIC_HASH_SIZE 4 | |
17 | #define ATOMIC_HASH(a) (&__atomic_hash[(((unsigned long)a)>>8) & (ATOMIC_HASH_SIZE-1)]) | |
18 | ||
19 | spinlock_t __atomic_hash[ATOMIC_HASH_SIZE] = { | |
24774fbd | 20 | [0 ... (ATOMIC_HASH_SIZE-1)] = __SPIN_LOCK_UNLOCKED(__atomic_hash) |
1da177e4 LT |
21 | }; |
22 | ||
23 | #else /* SMP */ | |
24 | ||
a9f6a0dd | 25 | static DEFINE_SPINLOCK(dummy); |
1da177e4 LT |
26 | #define ATOMIC_HASH_SIZE 1 |
27 | #define ATOMIC_HASH(a) (&dummy) | |
28 | ||
29 | #endif /* SMP */ | |
30 | ||
3a1adb23 | 31 | #define ATOMIC_FETCH_OP(op, c_op) \ |
ff5b4f1e | 32 | int arch_atomic_fetch_##op(int i, atomic_t *v) \ |
4f3316c2 PZ |
33 | { \ |
34 | int ret; \ | |
35 | unsigned long flags; \ | |
36 | spin_lock_irqsave(ATOMIC_HASH(v), flags); \ | |
37 | \ | |
3a1adb23 PZ |
38 | ret = v->counter; \ |
39 | v->counter c_op i; \ | |
4f3316c2 PZ |
40 | \ |
41 | spin_unlock_irqrestore(ATOMIC_HASH(v), flags); \ | |
42 | return ret; \ | |
43 | } \ | |
ff5b4f1e | 44 | EXPORT_SYMBOL(arch_atomic_fetch_##op); |
4f3316c2 | 45 | |
3a1adb23 | 46 | #define ATOMIC_OP_RETURN(op, c_op) \ |
ff5b4f1e | 47 | int arch_atomic_##op##_return(int i, atomic_t *v) \ |
304a0d69 | 48 | { \ |
3a1adb23 | 49 | int ret; \ |
304a0d69 PZ |
50 | unsigned long flags; \ |
51 | spin_lock_irqsave(ATOMIC_HASH(v), flags); \ | |
52 | \ | |
3a1adb23 | 53 | ret = (v->counter c_op i); \ |
304a0d69 PZ |
54 | \ |
55 | spin_unlock_irqrestore(ATOMIC_HASH(v), flags); \ | |
3a1adb23 | 56 | return ret; \ |
304a0d69 | 57 | } \ |
ff5b4f1e | 58 | EXPORT_SYMBOL(arch_atomic_##op##_return); |
304a0d69 PZ |
59 | |
60 | ATOMIC_OP_RETURN(add, +=) | |
4f3316c2 | 61 | |
3a1adb23 PZ |
62 | ATOMIC_FETCH_OP(add, +=) |
63 | ATOMIC_FETCH_OP(and, &=) | |
64 | ATOMIC_FETCH_OP(or, |=) | |
65 | ATOMIC_FETCH_OP(xor, ^=) | |
66 | ||
67 | #undef ATOMIC_FETCH_OP | |
304a0d69 | 68 | #undef ATOMIC_OP_RETURN |
1da177e4 | 69 | |
ff5b4f1e | 70 | int arch_atomic_xchg(atomic_t *v, int new) |
1a17fdc4 AL |
71 | { |
72 | int ret; | |
73 | unsigned long flags; | |
74 | ||
75 | spin_lock_irqsave(ATOMIC_HASH(v), flags); | |
76 | ret = v->counter; | |
77 | v->counter = new; | |
78 | spin_unlock_irqrestore(ATOMIC_HASH(v), flags); | |
79 | return ret; | |
80 | } | |
ff5b4f1e | 81 | EXPORT_SYMBOL(arch_atomic_xchg); |
1a17fdc4 | 82 | |
ff5b4f1e | 83 | int arch_atomic_cmpxchg(atomic_t *v, int old, int new) |
1da177e4 | 84 | { |
4a6dae6d | 85 | int ret; |
1da177e4 | 86 | unsigned long flags; |
1da177e4 | 87 | |
4a6dae6d NP |
88 | spin_lock_irqsave(ATOMIC_HASH(v), flags); |
89 | ret = v->counter; | |
90 | if (likely(ret == old)) | |
91 | v->counter = new; | |
1da177e4 LT |
92 | |
93 | spin_unlock_irqrestore(ATOMIC_HASH(v), flags); | |
4a6dae6d | 94 | return ret; |
1da177e4 | 95 | } |
ff5b4f1e | 96 | EXPORT_SYMBOL(arch_atomic_cmpxchg); |
1da177e4 | 97 | |
ff5b4f1e | 98 | int arch_atomic_fetch_add_unless(atomic_t *v, int a, int u) |
8426e1f6 NP |
99 | { |
100 | int ret; | |
101 | unsigned long flags; | |
102 | ||
103 | spin_lock_irqsave(ATOMIC_HASH(v), flags); | |
104 | ret = v->counter; | |
105 | if (ret != u) | |
106 | v->counter += a; | |
107 | spin_unlock_irqrestore(ATOMIC_HASH(v), flags); | |
a61b5829 | 108 | return ret; |
8426e1f6 | 109 | } |
ff5b4f1e | 110 | EXPORT_SYMBOL(arch_atomic_fetch_add_unless); |
8426e1f6 | 111 | |
8426e1f6 | 112 | /* Atomic operations are already serializing */ |
ff5b4f1e | 113 | void arch_atomic_set(atomic_t *v, int i) |
4a6dae6d NP |
114 | { |
115 | unsigned long flags; | |
1da177e4 | 116 | |
4a6dae6d NP |
117 | spin_lock_irqsave(ATOMIC_HASH(v), flags); |
118 | v->counter = i; | |
119 | spin_unlock_irqrestore(ATOMIC_HASH(v), flags); | |
120 | } | |
ff5b4f1e | 121 | EXPORT_SYMBOL(arch_atomic_set); |
8a8b836b | 122 | |
e69eb9c4 | 123 | unsigned long sp32___set_bit(unsigned long *addr, unsigned long mask) |
8a8b836b DM |
124 | { |
125 | unsigned long old, flags; | |
126 | ||
127 | spin_lock_irqsave(ATOMIC_HASH(addr), flags); | |
128 | old = *addr; | |
129 | *addr = old | mask; | |
130 | spin_unlock_irqrestore(ATOMIC_HASH(addr), flags); | |
131 | ||
132 | return old & mask; | |
133 | } | |
e69eb9c4 | 134 | EXPORT_SYMBOL(sp32___set_bit); |
8a8b836b | 135 | |
e69eb9c4 | 136 | unsigned long sp32___clear_bit(unsigned long *addr, unsigned long mask) |
8a8b836b DM |
137 | { |
138 | unsigned long old, flags; | |
139 | ||
140 | spin_lock_irqsave(ATOMIC_HASH(addr), flags); | |
141 | old = *addr; | |
142 | *addr = old & ~mask; | |
143 | spin_unlock_irqrestore(ATOMIC_HASH(addr), flags); | |
144 | ||
145 | return old & mask; | |
146 | } | |
e69eb9c4 | 147 | EXPORT_SYMBOL(sp32___clear_bit); |
8a8b836b | 148 | |
e69eb9c4 | 149 | unsigned long sp32___change_bit(unsigned long *addr, unsigned long mask) |
8a8b836b DM |
150 | { |
151 | unsigned long old, flags; | |
152 | ||
153 | spin_lock_irqsave(ATOMIC_HASH(addr), flags); | |
154 | old = *addr; | |
155 | *addr = old ^ mask; | |
156 | spin_unlock_irqrestore(ATOMIC_HASH(addr), flags); | |
157 | ||
158 | return old & mask; | |
159 | } | |
e69eb9c4 | 160 | EXPORT_SYMBOL(sp32___change_bit); |
6197fe4d KM |
161 | |
162 | unsigned long __cmpxchg_u32(volatile u32 *ptr, u32 old, u32 new) | |
163 | { | |
164 | unsigned long flags; | |
165 | u32 prev; | |
166 | ||
1fb8812b | 167 | spin_lock_irqsave(ATOMIC_HASH(ptr), flags); |
6197fe4d KM |
168 | if ((prev = *ptr) == old) |
169 | *ptr = new; | |
1fb8812b | 170 | spin_unlock_irqrestore(ATOMIC_HASH(ptr), flags); |
6197fe4d KM |
171 | |
172 | return (unsigned long)prev; | |
173 | } | |
174 | EXPORT_SYMBOL(__cmpxchg_u32); | |
1a17fdc4 | 175 | |
23198ddf DM |
176 | u64 __cmpxchg_u64(u64 *ptr, u64 old, u64 new) |
177 | { | |
178 | unsigned long flags; | |
179 | u64 prev; | |
180 | ||
181 | spin_lock_irqsave(ATOMIC_HASH(ptr), flags); | |
182 | if ((prev = *ptr) == old) | |
183 | *ptr = new; | |
184 | spin_unlock_irqrestore(ATOMIC_HASH(ptr), flags); | |
185 | ||
186 | return prev; | |
187 | } | |
188 | EXPORT_SYMBOL(__cmpxchg_u64); | |
189 | ||
1a17fdc4 AL |
190 | unsigned long __xchg_u32(volatile u32 *ptr, u32 new) |
191 | { | |
192 | unsigned long flags; | |
193 | u32 prev; | |
194 | ||
195 | spin_lock_irqsave(ATOMIC_HASH(ptr), flags); | |
196 | prev = *ptr; | |
197 | *ptr = new; | |
198 | spin_unlock_irqrestore(ATOMIC_HASH(ptr), flags); | |
199 | ||
200 | return (unsigned long)prev; | |
201 | } | |
202 | EXPORT_SYMBOL(__xchg_u32); |