Commit | Line | Data |
---|---|---|
feaf7cf1 BB |
1 | #ifndef _ASM_POWERPC_ATOMIC_H_ |
2 | #define _ASM_POWERPC_ATOMIC_H_ | |
3 | ||
1da177e4 LT |
4 | /* |
5 | * PowerPC atomic operations | |
6 | */ | |
7 | ||
1da177e4 LT |
8 | typedef struct { volatile int counter; } atomic_t; |
9 | ||
10 | #ifdef __KERNEL__ | |
f055affb | 11 | #include <linux/compiler.h> |
feaf7cf1 | 12 | #include <asm/synch.h> |
3ddfbcf1 | 13 | #include <asm/asm-compat.h> |
1da177e4 | 14 | |
feaf7cf1 | 15 | #define ATOMIC_INIT(i) { (i) } |
1da177e4 LT |
16 | |
17 | #define atomic_read(v) ((v)->counter) | |
18 | #define atomic_set(v,i) (((v)->counter) = (i)) | |
19 | ||
1da177e4 LT |
20 | static __inline__ void atomic_add(int a, atomic_t *v) |
21 | { | |
22 | int t; | |
23 | ||
24 | __asm__ __volatile__( | |
25 | "1: lwarx %0,0,%3 # atomic_add\n\ | |
26 | add %0,%2,%0\n" | |
27 | PPC405_ERR77(0,%3) | |
28 | " stwcx. %0,0,%3 \n\ | |
29 | bne- 1b" | |
30 | : "=&r" (t), "=m" (v->counter) | |
31 | : "r" (a), "r" (&v->counter), "m" (v->counter) | |
32 | : "cc"); | |
33 | } | |
34 | ||
35 | static __inline__ int atomic_add_return(int a, atomic_t *v) | |
36 | { | |
37 | int t; | |
38 | ||
39 | __asm__ __volatile__( | |
144b9c13 | 40 | LWSYNC_ON_SMP |
1da177e4 LT |
41 | "1: lwarx %0,0,%2 # atomic_add_return\n\ |
42 | add %0,%1,%0\n" | |
43 | PPC405_ERR77(0,%2) | |
44 | " stwcx. %0,0,%2 \n\ | |
45 | bne- 1b" | |
feaf7cf1 | 46 | ISYNC_ON_SMP |
1da177e4 LT |
47 | : "=&r" (t) |
48 | : "r" (a), "r" (&v->counter) | |
49 | : "cc", "memory"); | |
50 | ||
51 | return t; | |
52 | } | |
53 | ||
54 | #define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0) | |
55 | ||
56 | static __inline__ void atomic_sub(int a, atomic_t *v) | |
57 | { | |
58 | int t; | |
59 | ||
60 | __asm__ __volatile__( | |
61 | "1: lwarx %0,0,%3 # atomic_sub\n\ | |
62 | subf %0,%2,%0\n" | |
63 | PPC405_ERR77(0,%3) | |
64 | " stwcx. %0,0,%3 \n\ | |
65 | bne- 1b" | |
66 | : "=&r" (t), "=m" (v->counter) | |
67 | : "r" (a), "r" (&v->counter), "m" (v->counter) | |
68 | : "cc"); | |
69 | } | |
70 | ||
71 | static __inline__ int atomic_sub_return(int a, atomic_t *v) | |
72 | { | |
73 | int t; | |
74 | ||
75 | __asm__ __volatile__( | |
144b9c13 | 76 | LWSYNC_ON_SMP |
1da177e4 LT |
77 | "1: lwarx %0,0,%2 # atomic_sub_return\n\ |
78 | subf %0,%1,%0\n" | |
79 | PPC405_ERR77(0,%2) | |
80 | " stwcx. %0,0,%2 \n\ | |
81 | bne- 1b" | |
feaf7cf1 | 82 | ISYNC_ON_SMP |
1da177e4 LT |
83 | : "=&r" (t) |
84 | : "r" (a), "r" (&v->counter) | |
85 | : "cc", "memory"); | |
86 | ||
87 | return t; | |
88 | } | |
89 | ||
90 | static __inline__ void atomic_inc(atomic_t *v) | |
91 | { | |
92 | int t; | |
93 | ||
94 | __asm__ __volatile__( | |
95 | "1: lwarx %0,0,%2 # atomic_inc\n\ | |
96 | addic %0,%0,1\n" | |
97 | PPC405_ERR77(0,%2) | |
98 | " stwcx. %0,0,%2 \n\ | |
99 | bne- 1b" | |
100 | : "=&r" (t), "=m" (v->counter) | |
101 | : "r" (&v->counter), "m" (v->counter) | |
102 | : "cc"); | |
103 | } | |
104 | ||
105 | static __inline__ int atomic_inc_return(atomic_t *v) | |
106 | { | |
107 | int t; | |
108 | ||
109 | __asm__ __volatile__( | |
144b9c13 | 110 | LWSYNC_ON_SMP |
1da177e4 LT |
111 | "1: lwarx %0,0,%1 # atomic_inc_return\n\ |
112 | addic %0,%0,1\n" | |
113 | PPC405_ERR77(0,%1) | |
114 | " stwcx. %0,0,%1 \n\ | |
115 | bne- 1b" | |
feaf7cf1 | 116 | ISYNC_ON_SMP |
1da177e4 LT |
117 | : "=&r" (t) |
118 | : "r" (&v->counter) | |
119 | : "cc", "memory"); | |
120 | ||
121 | return t; | |
122 | } | |
123 | ||
124 | /* | |
125 | * atomic_inc_and_test - increment and test | |
126 | * @v: pointer of type atomic_t | |
127 | * | |
128 | * Atomically increments @v by 1 | |
129 | * and returns true if the result is zero, or false for all | |
130 | * other cases. | |
131 | */ | |
132 | #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0) | |
133 | ||
134 | static __inline__ void atomic_dec(atomic_t *v) | |
135 | { | |
136 | int t; | |
137 | ||
138 | __asm__ __volatile__( | |
139 | "1: lwarx %0,0,%2 # atomic_dec\n\ | |
140 | addic %0,%0,-1\n" | |
141 | PPC405_ERR77(0,%2)\ | |
142 | " stwcx. %0,0,%2\n\ | |
143 | bne- 1b" | |
144 | : "=&r" (t), "=m" (v->counter) | |
145 | : "r" (&v->counter), "m" (v->counter) | |
146 | : "cc"); | |
147 | } | |
148 | ||
149 | static __inline__ int atomic_dec_return(atomic_t *v) | |
150 | { | |
151 | int t; | |
152 | ||
153 | __asm__ __volatile__( | |
144b9c13 | 154 | LWSYNC_ON_SMP |
1da177e4 LT |
155 | "1: lwarx %0,0,%1 # atomic_dec_return\n\ |
156 | addic %0,%0,-1\n" | |
157 | PPC405_ERR77(0,%1) | |
158 | " stwcx. %0,0,%1\n\ | |
159 | bne- 1b" | |
feaf7cf1 | 160 | ISYNC_ON_SMP |
1da177e4 LT |
161 | : "=&r" (t) |
162 | : "r" (&v->counter) | |
163 | : "cc", "memory"); | |
164 | ||
165 | return t; | |
166 | } | |
167 | ||
4a6dae6d | 168 | #define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n))) |
ffbf670f | 169 | #define atomic_xchg(v, new) (xchg(&((v)->counter), new)) |
4a6dae6d | 170 | |
8426e1f6 NP |
171 | /** |
172 | * atomic_add_unless - add unless the number is a given value | |
173 | * @v: pointer of type atomic_t | |
174 | * @a: the amount to add to v... | |
175 | * @u: ...unless v is equal to u. | |
176 | * | |
177 | * Atomically adds @a to @v, so long as it was not @u. | |
178 | * Returns non-zero if @v was not @u, and zero otherwise. | |
179 | */ | |
f055affb NP |
180 | static __inline__ int atomic_add_unless(atomic_t *v, int a, int u) |
181 | { | |
182 | int t; | |
183 | ||
184 | __asm__ __volatile__ ( | |
185 | LWSYNC_ON_SMP | |
186 | "1: lwarx %0,0,%1 # atomic_add_unless\n\ | |
187 | cmpw 0,%0,%3 \n\ | |
188 | beq- 2f \n\ | |
189 | add %0,%2,%0 \n" | |
190 | PPC405_ERR77(0,%2) | |
191 | " stwcx. %0,0,%1 \n\ | |
192 | bne- 1b \n" | |
193 | ISYNC_ON_SMP | |
194 | " subf %0,%2,%0 \n\ | |
195 | 2:" | |
196 | : "=&r" (t) | |
197 | : "r" (&v->counter), "r" (a), "r" (u) | |
198 | : "cc", "memory"); | |
199 | ||
200 | return t != u; | |
201 | } | |
202 | ||
8426e1f6 NP |
203 | #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0) |
204 | ||
1da177e4 LT |
205 | #define atomic_sub_and_test(a, v) (atomic_sub_return((a), (v)) == 0) |
206 | #define atomic_dec_and_test(v) (atomic_dec_return((v)) == 0) | |
207 | ||
208 | /* | |
209 | * Atomically test *v and decrement if it is greater than 0. | |
210 | * The function returns the old value of *v minus 1. | |
211 | */ | |
212 | static __inline__ int atomic_dec_if_positive(atomic_t *v) | |
213 | { | |
214 | int t; | |
215 | ||
216 | __asm__ __volatile__( | |
144b9c13 | 217 | LWSYNC_ON_SMP |
1da177e4 LT |
218 | "1: lwarx %0,0,%1 # atomic_dec_if_positive\n\ |
219 | addic. %0,%0,-1\n\ | |
220 | blt- 2f\n" | |
221 | PPC405_ERR77(0,%1) | |
222 | " stwcx. %0,0,%1\n\ | |
223 | bne- 1b" | |
feaf7cf1 | 224 | ISYNC_ON_SMP |
1da177e4 LT |
225 | "\n\ |
226 | 2:" : "=&r" (t) | |
227 | : "r" (&v->counter) | |
228 | : "cc", "memory"); | |
229 | ||
230 | return t; | |
231 | } | |
232 | ||
feaf7cf1 BB |
233 | #define smp_mb__before_atomic_dec() smp_mb() |
234 | #define smp_mb__after_atomic_dec() smp_mb() | |
235 | #define smp_mb__before_atomic_inc() smp_mb() | |
236 | #define smp_mb__after_atomic_inc() smp_mb() | |
1da177e4 | 237 | |
06a98dba SR |
238 | #ifdef __powerpc64__ |
239 | ||
240 | typedef struct { volatile long counter; } atomic64_t; | |
241 | ||
242 | #define ATOMIC64_INIT(i) { (i) } | |
243 | ||
244 | #define atomic64_read(v) ((v)->counter) | |
245 | #define atomic64_set(v,i) (((v)->counter) = (i)) | |
246 | ||
247 | static __inline__ void atomic64_add(long a, atomic64_t *v) | |
248 | { | |
249 | long t; | |
250 | ||
251 | __asm__ __volatile__( | |
252 | "1: ldarx %0,0,%3 # atomic64_add\n\ | |
253 | add %0,%2,%0\n\ | |
254 | stdcx. %0,0,%3 \n\ | |
255 | bne- 1b" | |
256 | : "=&r" (t), "=m" (v->counter) | |
257 | : "r" (a), "r" (&v->counter), "m" (v->counter) | |
258 | : "cc"); | |
259 | } | |
260 | ||
261 | static __inline__ long atomic64_add_return(long a, atomic64_t *v) | |
262 | { | |
263 | long t; | |
264 | ||
265 | __asm__ __volatile__( | |
144b9c13 | 266 | LWSYNC_ON_SMP |
06a98dba SR |
267 | "1: ldarx %0,0,%2 # atomic64_add_return\n\ |
268 | add %0,%1,%0\n\ | |
269 | stdcx. %0,0,%2 \n\ | |
270 | bne- 1b" | |
271 | ISYNC_ON_SMP | |
272 | : "=&r" (t) | |
273 | : "r" (a), "r" (&v->counter) | |
274 | : "cc", "memory"); | |
275 | ||
276 | return t; | |
277 | } | |
278 | ||
279 | #define atomic64_add_negative(a, v) (atomic64_add_return((a), (v)) < 0) | |
280 | ||
281 | static __inline__ void atomic64_sub(long a, atomic64_t *v) | |
282 | { | |
283 | long t; | |
284 | ||
285 | __asm__ __volatile__( | |
286 | "1: ldarx %0,0,%3 # atomic64_sub\n\ | |
287 | subf %0,%2,%0\n\ | |
288 | stdcx. %0,0,%3 \n\ | |
289 | bne- 1b" | |
290 | : "=&r" (t), "=m" (v->counter) | |
291 | : "r" (a), "r" (&v->counter), "m" (v->counter) | |
292 | : "cc"); | |
293 | } | |
294 | ||
295 | static __inline__ long atomic64_sub_return(long a, atomic64_t *v) | |
296 | { | |
297 | long t; | |
298 | ||
299 | __asm__ __volatile__( | |
144b9c13 | 300 | LWSYNC_ON_SMP |
06a98dba SR |
301 | "1: ldarx %0,0,%2 # atomic64_sub_return\n\ |
302 | subf %0,%1,%0\n\ | |
303 | stdcx. %0,0,%2 \n\ | |
304 | bne- 1b" | |
305 | ISYNC_ON_SMP | |
306 | : "=&r" (t) | |
307 | : "r" (a), "r" (&v->counter) | |
308 | : "cc", "memory"); | |
309 | ||
310 | return t; | |
311 | } | |
312 | ||
313 | static __inline__ void atomic64_inc(atomic64_t *v) | |
314 | { | |
315 | long t; | |
316 | ||
317 | __asm__ __volatile__( | |
318 | "1: ldarx %0,0,%2 # atomic64_inc\n\ | |
319 | addic %0,%0,1\n\ | |
320 | stdcx. %0,0,%2 \n\ | |
321 | bne- 1b" | |
322 | : "=&r" (t), "=m" (v->counter) | |
323 | : "r" (&v->counter), "m" (v->counter) | |
324 | : "cc"); | |
325 | } | |
326 | ||
327 | static __inline__ long atomic64_inc_return(atomic64_t *v) | |
328 | { | |
329 | long t; | |
330 | ||
331 | __asm__ __volatile__( | |
144b9c13 | 332 | LWSYNC_ON_SMP |
06a98dba SR |
333 | "1: ldarx %0,0,%1 # atomic64_inc_return\n\ |
334 | addic %0,%0,1\n\ | |
335 | stdcx. %0,0,%1 \n\ | |
336 | bne- 1b" | |
337 | ISYNC_ON_SMP | |
338 | : "=&r" (t) | |
339 | : "r" (&v->counter) | |
340 | : "cc", "memory"); | |
341 | ||
342 | return t; | |
343 | } | |
344 | ||
345 | /* | |
346 | * atomic64_inc_and_test - increment and test | |
347 | * @v: pointer of type atomic64_t | |
348 | * | |
349 | * Atomically increments @v by 1 | |
350 | * and returns true if the result is zero, or false for all | |
351 | * other cases. | |
352 | */ | |
353 | #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0) | |
354 | ||
355 | static __inline__ void atomic64_dec(atomic64_t *v) | |
356 | { | |
357 | long t; | |
358 | ||
359 | __asm__ __volatile__( | |
360 | "1: ldarx %0,0,%2 # atomic64_dec\n\ | |
361 | addic %0,%0,-1\n\ | |
362 | stdcx. %0,0,%2\n\ | |
363 | bne- 1b" | |
364 | : "=&r" (t), "=m" (v->counter) | |
365 | : "r" (&v->counter), "m" (v->counter) | |
366 | : "cc"); | |
367 | } | |
368 | ||
369 | static __inline__ long atomic64_dec_return(atomic64_t *v) | |
370 | { | |
371 | long t; | |
372 | ||
373 | __asm__ __volatile__( | |
144b9c13 | 374 | LWSYNC_ON_SMP |
06a98dba SR |
375 | "1: ldarx %0,0,%1 # atomic64_dec_return\n\ |
376 | addic %0,%0,-1\n\ | |
377 | stdcx. %0,0,%1\n\ | |
378 | bne- 1b" | |
379 | ISYNC_ON_SMP | |
380 | : "=&r" (t) | |
381 | : "r" (&v->counter) | |
382 | : "cc", "memory"); | |
383 | ||
384 | return t; | |
385 | } | |
386 | ||
387 | #define atomic64_sub_and_test(a, v) (atomic64_sub_return((a), (v)) == 0) | |
388 | #define atomic64_dec_and_test(v) (atomic64_dec_return((v)) == 0) | |
389 | ||
390 | /* | |
391 | * Atomically test *v and decrement if it is greater than 0. | |
392 | * The function returns the old value of *v minus 1. | |
393 | */ | |
394 | static __inline__ long atomic64_dec_if_positive(atomic64_t *v) | |
395 | { | |
396 | long t; | |
397 | ||
398 | __asm__ __volatile__( | |
144b9c13 | 399 | LWSYNC_ON_SMP |
06a98dba SR |
400 | "1: ldarx %0,0,%1 # atomic64_dec_if_positive\n\ |
401 | addic. %0,%0,-1\n\ | |
402 | blt- 2f\n\ | |
403 | stdcx. %0,0,%1\n\ | |
404 | bne- 1b" | |
405 | ISYNC_ON_SMP | |
406 | "\n\ | |
407 | 2:" : "=&r" (t) | |
408 | : "r" (&v->counter) | |
409 | : "cc", "memory"); | |
410 | ||
411 | return t; | |
412 | } | |
413 | ||
414 | #endif /* __powerpc64__ */ | |
415 | ||
d3cb4871 | 416 | #include <asm-generic/atomic.h> |
1da177e4 | 417 | #endif /* __KERNEL__ */ |
feaf7cf1 | 418 | #endif /* _ASM_POWERPC_ATOMIC_H_ */ |