Merge branch 'for-linus-4.5' of git://git.kernel.org/pub/scm/linux/kernel/git/mason...
[linux-2.6-block.git] / arch / powerpc / include / asm / atomic.h
CommitLineData
feaf7cf1
BB
1#ifndef _ASM_POWERPC_ATOMIC_H_
2#define _ASM_POWERPC_ATOMIC_H_
3
1da177e4
LT
4/*
5 * PowerPC atomic operations
6 */
7
1da177e4 8#ifdef __KERNEL__
ae3a197e
DH
9#include <linux/types.h>
10#include <asm/cmpxchg.h>
c645073f 11#include <asm/barrier.h>
1da177e4 12
feaf7cf1 13#define ATOMIC_INIT(i) { (i) }
1da177e4 14
9f0cbea0
SB
15static __inline__ int atomic_read(const atomic_t *v)
16{
17 int t;
18
19 __asm__ __volatile__("lwz%U1%X1 %0,%1" : "=r"(t) : "m"(v->counter));
20
21 return t;
22}
23
24static __inline__ void atomic_set(atomic_t *v, int i)
25{
26 __asm__ __volatile__("stw%U0%X0 %1,%0" : "=m"(v->counter) : "r"(i));
27}
1da177e4 28
af095dd6
PZ
29#define ATOMIC_OP(op, asm_op) \
30static __inline__ void atomic_##op(int a, atomic_t *v) \
31{ \
32 int t; \
33 \
34 __asm__ __volatile__( \
35"1: lwarx %0,0,%3 # atomic_" #op "\n" \
36 #asm_op " %0,%2,%0\n" \
37 PPC405_ERR77(0,%3) \
38" stwcx. %0,0,%3 \n" \
39" bne- 1b\n" \
40 : "=&r" (t), "+m" (v->counter) \
41 : "r" (a), "r" (&v->counter) \
42 : "cc"); \
43} \
44
45#define ATOMIC_OP_RETURN(op, asm_op) \
46static __inline__ int atomic_##op##_return(int a, atomic_t *v) \
47{ \
48 int t; \
49 \
50 __asm__ __volatile__( \
51 PPC_ATOMIC_ENTRY_BARRIER \
52"1: lwarx %0,0,%2 # atomic_" #op "_return\n" \
53 #asm_op " %0,%1,%0\n" \
54 PPC405_ERR77(0,%2) \
55" stwcx. %0,0,%2 \n" \
56" bne- 1b\n" \
57 PPC_ATOMIC_EXIT_BARRIER \
58 : "=&r" (t) \
59 : "r" (a), "r" (&v->counter) \
60 : "cc", "memory"); \
61 \
62 return t; \
1da177e4
LT
63}
64
af095dd6 65#define ATOMIC_OPS(op, asm_op) ATOMIC_OP(op, asm_op) ATOMIC_OP_RETURN(op, asm_op)
1da177e4 66
af095dd6
PZ
67ATOMIC_OPS(add, add)
68ATOMIC_OPS(sub, subf)
1da177e4 69
d0b7eb6f
PZ
70ATOMIC_OP(and, and)
71ATOMIC_OP(or, or)
72ATOMIC_OP(xor, xor)
73
af095dd6
PZ
74#undef ATOMIC_OPS
75#undef ATOMIC_OP_RETURN
76#undef ATOMIC_OP
1da177e4
LT
77
78#define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)
79
1da177e4
LT
80static __inline__ void atomic_inc(atomic_t *v)
81{
82 int t;
83
84 __asm__ __volatile__(
85"1: lwarx %0,0,%2 # atomic_inc\n\
86 addic %0,%0,1\n"
87 PPC405_ERR77(0,%2)
88" stwcx. %0,0,%2 \n\
89 bne- 1b"
e2a3d402
LT
90 : "=&r" (t), "+m" (v->counter)
91 : "r" (&v->counter)
efc3624c 92 : "cc", "xer");
1da177e4
LT
93}
94
95static __inline__ int atomic_inc_return(atomic_t *v)
96{
97 int t;
98
99 __asm__ __volatile__(
b97021f8 100 PPC_ATOMIC_ENTRY_BARRIER
1da177e4
LT
101"1: lwarx %0,0,%1 # atomic_inc_return\n\
102 addic %0,%0,1\n"
103 PPC405_ERR77(0,%1)
104" stwcx. %0,0,%1 \n\
105 bne- 1b"
b97021f8 106 PPC_ATOMIC_EXIT_BARRIER
1da177e4
LT
107 : "=&r" (t)
108 : "r" (&v->counter)
efc3624c 109 : "cc", "xer", "memory");
1da177e4
LT
110
111 return t;
112}
113
114/*
115 * atomic_inc_and_test - increment and test
116 * @v: pointer of type atomic_t
117 *
118 * Atomically increments @v by 1
119 * and returns true if the result is zero, or false for all
120 * other cases.
121 */
122#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
123
124static __inline__ void atomic_dec(atomic_t *v)
125{
126 int t;
127
128 __asm__ __volatile__(
129"1: lwarx %0,0,%2 # atomic_dec\n\
130 addic %0,%0,-1\n"
131 PPC405_ERR77(0,%2)\
132" stwcx. %0,0,%2\n\
133 bne- 1b"
e2a3d402
LT
134 : "=&r" (t), "+m" (v->counter)
135 : "r" (&v->counter)
efc3624c 136 : "cc", "xer");
1da177e4
LT
137}
138
139static __inline__ int atomic_dec_return(atomic_t *v)
140{
141 int t;
142
143 __asm__ __volatile__(
b97021f8 144 PPC_ATOMIC_ENTRY_BARRIER
1da177e4
LT
145"1: lwarx %0,0,%1 # atomic_dec_return\n\
146 addic %0,%0,-1\n"
147 PPC405_ERR77(0,%1)
148" stwcx. %0,0,%1\n\
149 bne- 1b"
b97021f8 150 PPC_ATOMIC_EXIT_BARRIER
1da177e4
LT
151 : "=&r" (t)
152 : "r" (&v->counter)
efc3624c 153 : "cc", "xer", "memory");
1da177e4
LT
154
155 return t;
156}
157
f46e477e 158#define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
ffbf670f 159#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
4a6dae6d 160
8426e1f6 161/**
f24219b4 162 * __atomic_add_unless - add unless the number is a given value
8426e1f6
NP
163 * @v: pointer of type atomic_t
164 * @a: the amount to add to v...
165 * @u: ...unless v is equal to u.
166 *
167 * Atomically adds @a to @v, so long as it was not @u.
f24219b4 168 * Returns the old value of @v.
8426e1f6 169 */
f24219b4 170static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
f055affb
NP
171{
172 int t;
173
174 __asm__ __volatile__ (
b97021f8 175 PPC_ATOMIC_ENTRY_BARRIER
f24219b4 176"1: lwarx %0,0,%1 # __atomic_add_unless\n\
f055affb
NP
177 cmpw 0,%0,%3 \n\
178 beq- 2f \n\
179 add %0,%2,%0 \n"
180 PPC405_ERR77(0,%2)
181" stwcx. %0,0,%1 \n\
182 bne- 1b \n"
b97021f8 183 PPC_ATOMIC_EXIT_BARRIER
f055affb
NP
184" subf %0,%2,%0 \n\
1852:"
186 : "=&r" (t)
187 : "r" (&v->counter), "r" (a), "r" (u)
188 : "cc", "memory");
189
f24219b4 190 return t;
f055affb
NP
191}
192
a6cf7ed5
AB
193/**
194 * atomic_inc_not_zero - increment unless the number is zero
195 * @v: pointer of type atomic_t
196 *
197 * Atomically increments @v by 1, so long as @v is non-zero.
198 * Returns non-zero if @v was non-zero, and zero otherwise.
199 */
200static __inline__ int atomic_inc_not_zero(atomic_t *v)
201{
202 int t1, t2;
203
204 __asm__ __volatile__ (
205 PPC_ATOMIC_ENTRY_BARRIER
206"1: lwarx %0,0,%2 # atomic_inc_not_zero\n\
207 cmpwi 0,%0,0\n\
208 beq- 2f\n\
209 addic %1,%0,1\n"
210 PPC405_ERR77(0,%2)
211" stwcx. %1,0,%2\n\
212 bne- 1b\n"
213 PPC_ATOMIC_EXIT_BARRIER
214 "\n\
2152:"
216 : "=&r" (t1), "=&r" (t2)
217 : "r" (&v->counter)
218 : "cc", "xer", "memory");
219
220 return t1;
221}
222#define atomic_inc_not_zero(v) atomic_inc_not_zero((v))
8426e1f6 223
1da177e4
LT
224#define atomic_sub_and_test(a, v) (atomic_sub_return((a), (v)) == 0)
225#define atomic_dec_and_test(v) (atomic_dec_return((v)) == 0)
226
227/*
228 * Atomically test *v and decrement if it is greater than 0.
434f98c4
RJ
229 * The function returns the old value of *v minus 1, even if
230 * the atomic variable, v, was not decremented.
1da177e4
LT
231 */
232static __inline__ int atomic_dec_if_positive(atomic_t *v)
233{
234 int t;
235
236 __asm__ __volatile__(
b97021f8 237 PPC_ATOMIC_ENTRY_BARRIER
1da177e4 238"1: lwarx %0,0,%1 # atomic_dec_if_positive\n\
434f98c4
RJ
239 cmpwi %0,1\n\
240 addi %0,%0,-1\n\
1da177e4
LT
241 blt- 2f\n"
242 PPC405_ERR77(0,%1)
243" stwcx. %0,0,%1\n\
244 bne- 1b"
b97021f8 245 PPC_ATOMIC_EXIT_BARRIER
1da177e4 246 "\n\
434f98c4 2472:" : "=&b" (t)
1da177e4
LT
248 : "r" (&v->counter)
249 : "cc", "memory");
250
251 return t;
252}
e79bee24 253#define atomic_dec_if_positive atomic_dec_if_positive
1da177e4 254
06a98dba
SR
255#ifdef __powerpc64__
256
06a98dba
SR
257#define ATOMIC64_INIT(i) { (i) }
258
9f0cbea0
SB
259static __inline__ long atomic64_read(const atomic64_t *v)
260{
261 long t;
262
263 __asm__ __volatile__("ld%U1%X1 %0,%1" : "=r"(t) : "m"(v->counter));
264
265 return t;
266}
267
268static __inline__ void atomic64_set(atomic64_t *v, long i)
269{
270 __asm__ __volatile__("std%U0%X0 %1,%0" : "=m"(v->counter) : "r"(i));
271}
06a98dba 272
af095dd6
PZ
273#define ATOMIC64_OP(op, asm_op) \
274static __inline__ void atomic64_##op(long a, atomic64_t *v) \
275{ \
276 long t; \
277 \
278 __asm__ __volatile__( \
279"1: ldarx %0,0,%3 # atomic64_" #op "\n" \
280 #asm_op " %0,%2,%0\n" \
281" stdcx. %0,0,%3 \n" \
282" bne- 1b\n" \
283 : "=&r" (t), "+m" (v->counter) \
284 : "r" (a), "r" (&v->counter) \
285 : "cc"); \
06a98dba
SR
286}
287
af095dd6
PZ
288#define ATOMIC64_OP_RETURN(op, asm_op) \
289static __inline__ long atomic64_##op##_return(long a, atomic64_t *v) \
290{ \
291 long t; \
292 \
293 __asm__ __volatile__( \
294 PPC_ATOMIC_ENTRY_BARRIER \
295"1: ldarx %0,0,%2 # atomic64_" #op "_return\n" \
296 #asm_op " %0,%1,%0\n" \
297" stdcx. %0,0,%2 \n" \
298" bne- 1b\n" \
299 PPC_ATOMIC_EXIT_BARRIER \
300 : "=&r" (t) \
301 : "r" (a), "r" (&v->counter) \
302 : "cc", "memory"); \
303 \
304 return t; \
06a98dba
SR
305}
306
af095dd6 307#define ATOMIC64_OPS(op, asm_op) ATOMIC64_OP(op, asm_op) ATOMIC64_OP_RETURN(op, asm_op)
06a98dba 308
af095dd6
PZ
309ATOMIC64_OPS(add, add)
310ATOMIC64_OPS(sub, subf)
d0b7eb6f
PZ
311ATOMIC64_OP(and, and)
312ATOMIC64_OP(or, or)
313ATOMIC64_OP(xor, xor)
06a98dba 314
af095dd6
PZ
315#undef ATOMIC64_OPS
316#undef ATOMIC64_OP_RETURN
317#undef ATOMIC64_OP
06a98dba 318
af095dd6 319#define atomic64_add_negative(a, v) (atomic64_add_return((a), (v)) < 0)
06a98dba
SR
320
321static __inline__ void atomic64_inc(atomic64_t *v)
322{
323 long t;
324
325 __asm__ __volatile__(
326"1: ldarx %0,0,%2 # atomic64_inc\n\
327 addic %0,%0,1\n\
328 stdcx. %0,0,%2 \n\
329 bne- 1b"
e2a3d402
LT
330 : "=&r" (t), "+m" (v->counter)
331 : "r" (&v->counter)
efc3624c 332 : "cc", "xer");
06a98dba
SR
333}
334
335static __inline__ long atomic64_inc_return(atomic64_t *v)
336{
337 long t;
338
339 __asm__ __volatile__(
b97021f8 340 PPC_ATOMIC_ENTRY_BARRIER
06a98dba
SR
341"1: ldarx %0,0,%1 # atomic64_inc_return\n\
342 addic %0,%0,1\n\
343 stdcx. %0,0,%1 \n\
344 bne- 1b"
b97021f8 345 PPC_ATOMIC_EXIT_BARRIER
06a98dba
SR
346 : "=&r" (t)
347 : "r" (&v->counter)
efc3624c 348 : "cc", "xer", "memory");
06a98dba
SR
349
350 return t;
351}
352
353/*
354 * atomic64_inc_and_test - increment and test
355 * @v: pointer of type atomic64_t
356 *
357 * Atomically increments @v by 1
358 * and returns true if the result is zero, or false for all
359 * other cases.
360 */
361#define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
362
363static __inline__ void atomic64_dec(atomic64_t *v)
364{
365 long t;
366
367 __asm__ __volatile__(
368"1: ldarx %0,0,%2 # atomic64_dec\n\
369 addic %0,%0,-1\n\
370 stdcx. %0,0,%2\n\
371 bne- 1b"
e2a3d402
LT
372 : "=&r" (t), "+m" (v->counter)
373 : "r" (&v->counter)
efc3624c 374 : "cc", "xer");
06a98dba
SR
375}
376
377static __inline__ long atomic64_dec_return(atomic64_t *v)
378{
379 long t;
380
381 __asm__ __volatile__(
b97021f8 382 PPC_ATOMIC_ENTRY_BARRIER
06a98dba
SR
383"1: ldarx %0,0,%1 # atomic64_dec_return\n\
384 addic %0,%0,-1\n\
385 stdcx. %0,0,%1\n\
386 bne- 1b"
b97021f8 387 PPC_ATOMIC_EXIT_BARRIER
06a98dba
SR
388 : "=&r" (t)
389 : "r" (&v->counter)
efc3624c 390 : "cc", "xer", "memory");
06a98dba
SR
391
392 return t;
393}
394
395#define atomic64_sub_and_test(a, v) (atomic64_sub_return((a), (v)) == 0)
396#define atomic64_dec_and_test(v) (atomic64_dec_return((v)) == 0)
397
398/*
399 * Atomically test *v and decrement if it is greater than 0.
400 * The function returns the old value of *v minus 1.
401 */
402static __inline__ long atomic64_dec_if_positive(atomic64_t *v)
403{
404 long t;
405
406 __asm__ __volatile__(
b97021f8 407 PPC_ATOMIC_ENTRY_BARRIER
06a98dba
SR
408"1: ldarx %0,0,%1 # atomic64_dec_if_positive\n\
409 addic. %0,%0,-1\n\
410 blt- 2f\n\
411 stdcx. %0,0,%1\n\
412 bne- 1b"
b97021f8 413 PPC_ATOMIC_EXIT_BARRIER
06a98dba
SR
414 "\n\
4152:" : "=&r" (t)
416 : "r" (&v->counter)
efc3624c 417 : "cc", "xer", "memory");
06a98dba
SR
418
419 return t;
420}
421
f46e477e 422#define atomic64_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
41806ef4
MD
423#define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
424
425/**
426 * atomic64_add_unless - add unless the number is a given value
427 * @v: pointer of type atomic64_t
428 * @a: the amount to add to v...
429 * @u: ...unless v is equal to u.
430 *
431 * Atomically adds @a to @v, so long as it was not @u.
f24219b4 432 * Returns the old value of @v.
41806ef4
MD
433 */
434static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
435{
436 long t;
437
438 __asm__ __volatile__ (
b97021f8 439 PPC_ATOMIC_ENTRY_BARRIER
f24219b4 440"1: ldarx %0,0,%1 # __atomic_add_unless\n\
41806ef4
MD
441 cmpd 0,%0,%3 \n\
442 beq- 2f \n\
443 add %0,%2,%0 \n"
444" stdcx. %0,0,%1 \n\
445 bne- 1b \n"
b97021f8 446 PPC_ATOMIC_EXIT_BARRIER
41806ef4
MD
447" subf %0,%2,%0 \n\
4482:"
449 : "=&r" (t)
450 : "r" (&v->counter), "r" (a), "r" (u)
451 : "cc", "memory");
452
453 return t != u;
454}
455
a6cf7ed5
AB
456/**
457 * atomic_inc64_not_zero - increment unless the number is zero
458 * @v: pointer of type atomic64_t
459 *
460 * Atomically increments @v by 1, so long as @v is non-zero.
461 * Returns non-zero if @v was non-zero, and zero otherwise.
462 */
463static __inline__ long atomic64_inc_not_zero(atomic64_t *v)
464{
465 long t1, t2;
466
467 __asm__ __volatile__ (
468 PPC_ATOMIC_ENTRY_BARRIER
469"1: ldarx %0,0,%2 # atomic64_inc_not_zero\n\
470 cmpdi 0,%0,0\n\
471 beq- 2f\n\
472 addic %1,%0,1\n\
473 stdcx. %1,0,%2\n\
474 bne- 1b\n"
475 PPC_ATOMIC_EXIT_BARRIER
476 "\n\
4772:"
478 : "=&r" (t1), "=&r" (t2)
479 : "r" (&v->counter)
480 : "cc", "xer", "memory");
481
482 return t1;
483}
41806ef4 484
06a98dba
SR
485#endif /* __powerpc64__ */
486
1da177e4 487#endif /* __KERNEL__ */
feaf7cf1 488#endif /* _ASM_POWERPC_ATOMIC_H_ */