[POWERPC] Cell iommu support
[linux-2.6-block.git] / include / asm-mips / atomic.h
CommitLineData
1da177e4
LT
1/*
2 * Atomic operations that C can't guarantee us. Useful for
3 * resource counting etc..
4 *
5 * But use these as seldom as possible since they are much more slower
6 * than regular operations.
7 *
8 * This file is subject to the terms and conditions of the GNU General Public
9 * License. See the file "COPYING" in the main directory of this archive
10 * for more details.
11 *
e303e088 12 * Copyright (C) 1996, 97, 99, 2000, 03, 04, 06 by Ralf Baechle
1da177e4 13 */
1da177e4
LT
14#ifndef _ASM_ATOMIC_H
15#define _ASM_ATOMIC_H
16
192ef366 17#include <linux/irqflags.h>
1da177e4
LT
18#include <asm/cpu-features.h>
19#include <asm/war.h>
20
1da177e4
LT
21typedef struct { volatile int counter; } atomic_t;
22
23#define ATOMIC_INIT(i) { (i) }
24
25/*
26 * atomic_read - read atomic variable
27 * @v: pointer of type atomic_t
28 *
29 * Atomically reads the value of @v.
30 */
31#define atomic_read(v) ((v)->counter)
32
33/*
34 * atomic_set - set atomic variable
35 * @v: pointer of type atomic_t
36 * @i: required value
37 *
38 * Atomically sets the value of @v to @i.
39 */
40#define atomic_set(v,i) ((v)->counter = (i))
41
42/*
43 * atomic_add - add integer to atomic variable
44 * @i: integer value to add
45 * @v: pointer of type atomic_t
46 *
47 * Atomically adds @i to @v.
48 */
49static __inline__ void atomic_add(int i, atomic_t * v)
50{
51 if (cpu_has_llsc && R10000_LLSC_WAR) {
52 unsigned long temp;
53
54 __asm__ __volatile__(
c4559f67 55 " .set mips3 \n"
1da177e4
LT
56 "1: ll %0, %1 # atomic_add \n"
57 " addu %0, %2 \n"
58 " sc %0, %1 \n"
59 " beqzl %0, 1b \n"
aac8aa77 60 " .set mips0 \n"
1da177e4
LT
61 : "=&r" (temp), "=m" (v->counter)
62 : "Ir" (i), "m" (v->counter));
63 } else if (cpu_has_llsc) {
64 unsigned long temp;
65
66 __asm__ __volatile__(
c4559f67 67 " .set mips3 \n"
1da177e4
LT
68 "1: ll %0, %1 # atomic_add \n"
69 " addu %0, %2 \n"
70 " sc %0, %1 \n"
71 " beqz %0, 1b \n"
aac8aa77 72 " .set mips0 \n"
1da177e4
LT
73 : "=&r" (temp), "=m" (v->counter)
74 : "Ir" (i), "m" (v->counter));
75 } else {
76 unsigned long flags;
77
b2d28b7e 78 local_irq_save(flags);
1da177e4 79 v->counter += i;
b2d28b7e 80 local_irq_restore(flags);
1da177e4
LT
81 }
82}
83
84/*
85 * atomic_sub - subtract the atomic variable
86 * @i: integer value to subtract
87 * @v: pointer of type atomic_t
88 *
89 * Atomically subtracts @i from @v.
90 */
91static __inline__ void atomic_sub(int i, atomic_t * v)
92{
93 if (cpu_has_llsc && R10000_LLSC_WAR) {
94 unsigned long temp;
95
96 __asm__ __volatile__(
c4559f67 97 " .set mips3 \n"
1da177e4
LT
98 "1: ll %0, %1 # atomic_sub \n"
99 " subu %0, %2 \n"
100 " sc %0, %1 \n"
101 " beqzl %0, 1b \n"
aac8aa77 102 " .set mips0 \n"
1da177e4
LT
103 : "=&r" (temp), "=m" (v->counter)
104 : "Ir" (i), "m" (v->counter));
105 } else if (cpu_has_llsc) {
106 unsigned long temp;
107
108 __asm__ __volatile__(
c4559f67 109 " .set mips3 \n"
1da177e4
LT
110 "1: ll %0, %1 # atomic_sub \n"
111 " subu %0, %2 \n"
112 " sc %0, %1 \n"
113 " beqz %0, 1b \n"
aac8aa77 114 " .set mips0 \n"
1da177e4
LT
115 : "=&r" (temp), "=m" (v->counter)
116 : "Ir" (i), "m" (v->counter));
117 } else {
118 unsigned long flags;
119
b2d28b7e 120 local_irq_save(flags);
1da177e4 121 v->counter -= i;
b2d28b7e 122 local_irq_restore(flags);
1da177e4
LT
123 }
124}
125
126/*
127 * Same as above, but return the result value
128 */
129static __inline__ int atomic_add_return(int i, atomic_t * v)
130{
131 unsigned long result;
132
133 if (cpu_has_llsc && R10000_LLSC_WAR) {
134 unsigned long temp;
135
136 __asm__ __volatile__(
c4559f67 137 " .set mips3 \n"
1da177e4
LT
138 "1: ll %1, %2 # atomic_add_return \n"
139 " addu %0, %1, %3 \n"
140 " sc %0, %2 \n"
141 " beqzl %0, 1b \n"
142 " addu %0, %1, %3 \n"
143 " sync \n"
aac8aa77 144 " .set mips0 \n"
1da177e4
LT
145 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
146 : "Ir" (i), "m" (v->counter)
147 : "memory");
148 } else if (cpu_has_llsc) {
149 unsigned long temp;
150
151 __asm__ __volatile__(
c4559f67 152 " .set mips3 \n"
1da177e4
LT
153 "1: ll %1, %2 # atomic_add_return \n"
154 " addu %0, %1, %3 \n"
155 " sc %0, %2 \n"
156 " beqz %0, 1b \n"
157 " addu %0, %1, %3 \n"
158 " sync \n"
aac8aa77 159 " .set mips0 \n"
1da177e4
LT
160 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
161 : "Ir" (i), "m" (v->counter)
162 : "memory");
163 } else {
164 unsigned long flags;
165
b2d28b7e 166 local_irq_save(flags);
1da177e4
LT
167 result = v->counter;
168 result += i;
169 v->counter = result;
b2d28b7e 170 local_irq_restore(flags);
1da177e4
LT
171 }
172
173 return result;
174}
175
176static __inline__ int atomic_sub_return(int i, atomic_t * v)
177{
178 unsigned long result;
179
180 if (cpu_has_llsc && R10000_LLSC_WAR) {
181 unsigned long temp;
182
183 __asm__ __volatile__(
c4559f67 184 " .set mips3 \n"
1da177e4
LT
185 "1: ll %1, %2 # atomic_sub_return \n"
186 " subu %0, %1, %3 \n"
187 " sc %0, %2 \n"
188 " beqzl %0, 1b \n"
189 " subu %0, %1, %3 \n"
190 " sync \n"
aac8aa77 191 " .set mips0 \n"
1da177e4
LT
192 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
193 : "Ir" (i), "m" (v->counter)
194 : "memory");
195 } else if (cpu_has_llsc) {
196 unsigned long temp;
197
198 __asm__ __volatile__(
c4559f67 199 " .set mips3 \n"
1da177e4
LT
200 "1: ll %1, %2 # atomic_sub_return \n"
201 " subu %0, %1, %3 \n"
202 " sc %0, %2 \n"
203 " beqz %0, 1b \n"
204 " subu %0, %1, %3 \n"
205 " sync \n"
aac8aa77 206 " .set mips0 \n"
1da177e4
LT
207 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
208 : "Ir" (i), "m" (v->counter)
209 : "memory");
210 } else {
211 unsigned long flags;
212
b2d28b7e 213 local_irq_save(flags);
1da177e4
LT
214 result = v->counter;
215 result -= i;
216 v->counter = result;
b2d28b7e 217 local_irq_restore(flags);
1da177e4
LT
218 }
219
220 return result;
221}
222
223/*
f10d14dd
AG
224 * atomic_sub_if_positive - conditionally subtract integer from atomic variable
225 * @i: integer value to subtract
1da177e4
LT
226 * @v: pointer of type atomic_t
227 *
f10d14dd
AG
228 * Atomically test @v and subtract @i if @v is greater or equal than @i.
229 * The function returns the old value of @v minus @i.
1da177e4
LT
230 */
231static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
232{
233 unsigned long result;
234
235 if (cpu_has_llsc && R10000_LLSC_WAR) {
236 unsigned long temp;
237
238 __asm__ __volatile__(
c4559f67 239 " .set mips3 \n"
1da177e4
LT
240 "1: ll %1, %2 # atomic_sub_if_positive\n"
241 " subu %0, %1, %3 \n"
242 " bltz %0, 1f \n"
243 " sc %0, %2 \n"
92f22c18 244 " .set noreorder \n"
1da177e4 245 " beqzl %0, 1b \n"
92f22c18
RB
246 " subu %0, %1, %3 \n"
247 " .set reorder \n"
1da177e4
LT
248 " sync \n"
249 "1: \n"
aac8aa77 250 " .set mips0 \n"
1da177e4
LT
251 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
252 : "Ir" (i), "m" (v->counter)
253 : "memory");
254 } else if (cpu_has_llsc) {
255 unsigned long temp;
256
257 __asm__ __volatile__(
c4559f67 258 " .set mips3 \n"
1da177e4
LT
259 "1: ll %1, %2 # atomic_sub_if_positive\n"
260 " subu %0, %1, %3 \n"
261 " bltz %0, 1f \n"
262 " sc %0, %2 \n"
92f22c18 263 " .set noreorder \n"
1da177e4 264 " beqz %0, 1b \n"
92f22c18
RB
265 " subu %0, %1, %3 \n"
266 " .set reorder \n"
1da177e4
LT
267 " sync \n"
268 "1: \n"
aac8aa77 269 " .set mips0 \n"
1da177e4
LT
270 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
271 : "Ir" (i), "m" (v->counter)
272 : "memory");
273 } else {
274 unsigned long flags;
275
b2d28b7e 276 local_irq_save(flags);
1da177e4
LT
277 result = v->counter;
278 result -= i;
279 if (result >= 0)
280 v->counter = result;
b2d28b7e 281 local_irq_restore(flags);
1da177e4
LT
282 }
283
284 return result;
285}
286
4a6dae6d 287#define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
ffbf670f 288#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
4a6dae6d 289
8426e1f6
NP
290/**
291 * atomic_add_unless - add unless the number is a given value
292 * @v: pointer of type atomic_t
293 * @a: the amount to add to v...
294 * @u: ...unless v is equal to u.
295 *
296 * Atomically adds @a to @v, so long as it was not @u.
297 * Returns non-zero if @v was not @u, and zero otherwise.
298 */
299#define atomic_add_unless(v, a, u) \
300({ \
301 int c, old; \
302 c = atomic_read(v); \
303 while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \
304 c = old; \
305 c != (u); \
306})
307#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
308
1da177e4
LT
309#define atomic_dec_return(v) atomic_sub_return(1,(v))
310#define atomic_inc_return(v) atomic_add_return(1,(v))
311
312/*
313 * atomic_sub_and_test - subtract value from variable and test result
314 * @i: integer value to subtract
315 * @v: pointer of type atomic_t
316 *
317 * Atomically subtracts @i from @v and returns
318 * true if the result is zero, or false for all
319 * other cases.
320 */
321#define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
322
323/*
324 * atomic_inc_and_test - increment and test
325 * @v: pointer of type atomic_t
326 *
327 * Atomically increments @v by 1
328 * and returns true if the result is zero, or false for all
329 * other cases.
330 */
331#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
332
333/*
334 * atomic_dec_and_test - decrement by 1 and test
335 * @v: pointer of type atomic_t
336 *
337 * Atomically decrements @v by 1 and
338 * returns true if the result is 0, or false for all other
339 * cases.
340 */
341#define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
342
343/*
344 * atomic_dec_if_positive - decrement by 1 if old value positive
345 * @v: pointer of type atomic_t
346 */
347#define atomic_dec_if_positive(v) atomic_sub_if_positive(1, v)
348
349/*
350 * atomic_inc - increment atomic variable
351 * @v: pointer of type atomic_t
352 *
353 * Atomically increments @v by 1.
354 */
355#define atomic_inc(v) atomic_add(1,(v))
356
357/*
358 * atomic_dec - decrement and test
359 * @v: pointer of type atomic_t
360 *
361 * Atomically decrements @v by 1.
362 */
363#define atomic_dec(v) atomic_sub(1,(v))
364
365/*
366 * atomic_add_negative - add and test if negative
367 * @v: pointer of type atomic_t
368 * @i: integer value to add
369 *
370 * Atomically adds @i to @v and returns true
371 * if the result is negative, or false when
372 * result is greater than or equal to zero.
373 */
374#define atomic_add_negative(i,v) (atomic_add_return(i, (v)) < 0)
375
875d43e7 376#ifdef CONFIG_64BIT
1da177e4
LT
377
378typedef struct { volatile __s64 counter; } atomic64_t;
379
380#define ATOMIC64_INIT(i) { (i) }
381
382/*
383 * atomic64_read - read atomic variable
384 * @v: pointer of type atomic64_t
385 *
386 */
387#define atomic64_read(v) ((v)->counter)
388
389/*
390 * atomic64_set - set atomic variable
391 * @v: pointer of type atomic64_t
392 * @i: required value
393 */
394#define atomic64_set(v,i) ((v)->counter = (i))
395
396/*
397 * atomic64_add - add integer to atomic variable
398 * @i: integer value to add
399 * @v: pointer of type atomic64_t
400 *
401 * Atomically adds @i to @v.
402 */
403static __inline__ void atomic64_add(long i, atomic64_t * v)
404{
405 if (cpu_has_llsc && R10000_LLSC_WAR) {
406 unsigned long temp;
407
408 __asm__ __volatile__(
aac8aa77 409 " .set mips3 \n"
1da177e4
LT
410 "1: lld %0, %1 # atomic64_add \n"
411 " addu %0, %2 \n"
412 " scd %0, %1 \n"
413 " beqzl %0, 1b \n"
aac8aa77 414 " .set mips0 \n"
1da177e4
LT
415 : "=&r" (temp), "=m" (v->counter)
416 : "Ir" (i), "m" (v->counter));
417 } else if (cpu_has_llsc) {
418 unsigned long temp;
419
420 __asm__ __volatile__(
aac8aa77 421 " .set mips3 \n"
1da177e4
LT
422 "1: lld %0, %1 # atomic64_add \n"
423 " addu %0, %2 \n"
424 " scd %0, %1 \n"
425 " beqz %0, 1b \n"
aac8aa77 426 " .set mips0 \n"
1da177e4
LT
427 : "=&r" (temp), "=m" (v->counter)
428 : "Ir" (i), "m" (v->counter));
429 } else {
430 unsigned long flags;
431
b2d28b7e 432 local_irq_save(flags);
1da177e4 433 v->counter += i;
b2d28b7e 434 local_irq_restore(flags);
1da177e4
LT
435 }
436}
437
438/*
439 * atomic64_sub - subtract the atomic variable
440 * @i: integer value to subtract
441 * @v: pointer of type atomic64_t
442 *
443 * Atomically subtracts @i from @v.
444 */
445static __inline__ void atomic64_sub(long i, atomic64_t * v)
446{
447 if (cpu_has_llsc && R10000_LLSC_WAR) {
448 unsigned long temp;
449
450 __asm__ __volatile__(
aac8aa77 451 " .set mips3 \n"
1da177e4
LT
452 "1: lld %0, %1 # atomic64_sub \n"
453 " subu %0, %2 \n"
454 " scd %0, %1 \n"
455 " beqzl %0, 1b \n"
aac8aa77 456 " .set mips0 \n"
1da177e4
LT
457 : "=&r" (temp), "=m" (v->counter)
458 : "Ir" (i), "m" (v->counter));
459 } else if (cpu_has_llsc) {
460 unsigned long temp;
461
462 __asm__ __volatile__(
aac8aa77 463 " .set mips3 \n"
1da177e4
LT
464 "1: lld %0, %1 # atomic64_sub \n"
465 " subu %0, %2 \n"
466 " scd %0, %1 \n"
467 " beqz %0, 1b \n"
aac8aa77 468 " .set mips0 \n"
1da177e4
LT
469 : "=&r" (temp), "=m" (v->counter)
470 : "Ir" (i), "m" (v->counter));
471 } else {
472 unsigned long flags;
473
b2d28b7e 474 local_irq_save(flags);
1da177e4 475 v->counter -= i;
b2d28b7e 476 local_irq_restore(flags);
1da177e4
LT
477 }
478}
479
480/*
481 * Same as above, but return the result value
482 */
483static __inline__ long atomic64_add_return(long i, atomic64_t * v)
484{
485 unsigned long result;
486
487 if (cpu_has_llsc && R10000_LLSC_WAR) {
488 unsigned long temp;
489
490 __asm__ __volatile__(
aac8aa77 491 " .set mips3 \n"
1da177e4
LT
492 "1: lld %1, %2 # atomic64_add_return \n"
493 " addu %0, %1, %3 \n"
494 " scd %0, %2 \n"
495 " beqzl %0, 1b \n"
496 " addu %0, %1, %3 \n"
497 " sync \n"
aac8aa77 498 " .set mips0 \n"
1da177e4
LT
499 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
500 : "Ir" (i), "m" (v->counter)
501 : "memory");
502 } else if (cpu_has_llsc) {
503 unsigned long temp;
504
505 __asm__ __volatile__(
aac8aa77 506 " .set mips3 \n"
1da177e4
LT
507 "1: lld %1, %2 # atomic64_add_return \n"
508 " addu %0, %1, %3 \n"
509 " scd %0, %2 \n"
510 " beqz %0, 1b \n"
511 " addu %0, %1, %3 \n"
512 " sync \n"
aac8aa77 513 " .set mips0 \n"
1da177e4
LT
514 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
515 : "Ir" (i), "m" (v->counter)
516 : "memory");
517 } else {
518 unsigned long flags;
519
b2d28b7e 520 local_irq_save(flags);
1da177e4
LT
521 result = v->counter;
522 result += i;
523 v->counter = result;
b2d28b7e 524 local_irq_restore(flags);
1da177e4
LT
525 }
526
527 return result;
528}
529
530static __inline__ long atomic64_sub_return(long i, atomic64_t * v)
531{
532 unsigned long result;
533
534 if (cpu_has_llsc && R10000_LLSC_WAR) {
535 unsigned long temp;
536
537 __asm__ __volatile__(
aac8aa77 538 " .set mips3 \n"
1da177e4
LT
539 "1: lld %1, %2 # atomic64_sub_return \n"
540 " subu %0, %1, %3 \n"
541 " scd %0, %2 \n"
542 " beqzl %0, 1b \n"
543 " subu %0, %1, %3 \n"
544 " sync \n"
aac8aa77 545 " .set mips0 \n"
1da177e4
LT
546 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
547 : "Ir" (i), "m" (v->counter)
548 : "memory");
549 } else if (cpu_has_llsc) {
550 unsigned long temp;
551
552 __asm__ __volatile__(
aac8aa77 553 " .set mips3 \n"
1da177e4
LT
554 "1: lld %1, %2 # atomic64_sub_return \n"
555 " subu %0, %1, %3 \n"
556 " scd %0, %2 \n"
557 " beqz %0, 1b \n"
558 " subu %0, %1, %3 \n"
559 " sync \n"
aac8aa77 560 " .set mips0 \n"
1da177e4
LT
561 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
562 : "Ir" (i), "m" (v->counter)
563 : "memory");
564 } else {
565 unsigned long flags;
566
b2d28b7e 567 local_irq_save(flags);
1da177e4
LT
568 result = v->counter;
569 result -= i;
570 v->counter = result;
b2d28b7e 571 local_irq_restore(flags);
1da177e4
LT
572 }
573
574 return result;
575}
576
577/*
f10d14dd
AG
578 * atomic64_sub_if_positive - conditionally subtract integer from atomic variable
579 * @i: integer value to subtract
1da177e4
LT
580 * @v: pointer of type atomic64_t
581 *
f10d14dd
AG
582 * Atomically test @v and subtract @i if @v is greater or equal than @i.
583 * The function returns the old value of @v minus @i.
1da177e4
LT
584 */
585static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
586{
587 unsigned long result;
588
589 if (cpu_has_llsc && R10000_LLSC_WAR) {
590 unsigned long temp;
591
592 __asm__ __volatile__(
aac8aa77 593 " .set mips3 \n"
1da177e4
LT
594 "1: lld %1, %2 # atomic64_sub_if_positive\n"
595 " dsubu %0, %1, %3 \n"
596 " bltz %0, 1f \n"
597 " scd %0, %2 \n"
92f22c18 598 " .set noreorder \n"
1da177e4 599 " beqzl %0, 1b \n"
92f22c18
RB
600 " dsubu %0, %1, %3 \n"
601 " .set reorder \n"
1da177e4
LT
602 " sync \n"
603 "1: \n"
aac8aa77 604 " .set mips0 \n"
1da177e4
LT
605 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
606 : "Ir" (i), "m" (v->counter)
607 : "memory");
608 } else if (cpu_has_llsc) {
609 unsigned long temp;
610
611 __asm__ __volatile__(
aac8aa77 612 " .set mips3 \n"
1da177e4
LT
613 "1: lld %1, %2 # atomic64_sub_if_positive\n"
614 " dsubu %0, %1, %3 \n"
615 " bltz %0, 1f \n"
616 " scd %0, %2 \n"
92f22c18 617 " .set noreorder \n"
1da177e4 618 " beqz %0, 1b \n"
92f22c18
RB
619 " dsubu %0, %1, %3 \n"
620 " .set reorder \n"
1da177e4
LT
621 " sync \n"
622 "1: \n"
aac8aa77 623 " .set mips0 \n"
1da177e4
LT
624 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
625 : "Ir" (i), "m" (v->counter)
626 : "memory");
627 } else {
628 unsigned long flags;
629
b2d28b7e 630 local_irq_save(flags);
1da177e4
LT
631 result = v->counter;
632 result -= i;
633 if (result >= 0)
634 v->counter = result;
b2d28b7e 635 local_irq_restore(flags);
1da177e4
LT
636 }
637
638 return result;
639}
640
641#define atomic64_dec_return(v) atomic64_sub_return(1,(v))
642#define atomic64_inc_return(v) atomic64_add_return(1,(v))
643
644/*
645 * atomic64_sub_and_test - subtract value from variable and test result
646 * @i: integer value to subtract
647 * @v: pointer of type atomic64_t
648 *
649 * Atomically subtracts @i from @v and returns
650 * true if the result is zero, or false for all
651 * other cases.
652 */
653#define atomic64_sub_and_test(i,v) (atomic64_sub_return((i), (v)) == 0)
654
655/*
656 * atomic64_inc_and_test - increment and test
657 * @v: pointer of type atomic64_t
658 *
659 * Atomically increments @v by 1
660 * and returns true if the result is zero, or false for all
661 * other cases.
662 */
663#define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
664
665/*
666 * atomic64_dec_and_test - decrement by 1 and test
667 * @v: pointer of type atomic64_t
668 *
669 * Atomically decrements @v by 1 and
670 * returns true if the result is 0, or false for all other
671 * cases.
672 */
673#define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0)
674
675/*
676 * atomic64_dec_if_positive - decrement by 1 if old value positive
677 * @v: pointer of type atomic64_t
678 */
679#define atomic64_dec_if_positive(v) atomic64_sub_if_positive(1, v)
680
681/*
682 * atomic64_inc - increment atomic variable
683 * @v: pointer of type atomic64_t
684 *
685 * Atomically increments @v by 1.
686 */
687#define atomic64_inc(v) atomic64_add(1,(v))
688
689/*
690 * atomic64_dec - decrement and test
691 * @v: pointer of type atomic64_t
692 *
693 * Atomically decrements @v by 1.
694 */
695#define atomic64_dec(v) atomic64_sub(1,(v))
696
697/*
698 * atomic64_add_negative - add and test if negative
699 * @v: pointer of type atomic64_t
700 * @i: integer value to add
701 *
702 * Atomically adds @i to @v and returns true
703 * if the result is negative, or false when
704 * result is greater than or equal to zero.
705 */
706#define atomic64_add_negative(i,v) (atomic64_add_return(i, (v)) < 0)
707
875d43e7 708#endif /* CONFIG_64BIT */
1da177e4
LT
709
710/*
711 * atomic*_return operations are serializing but not the non-*_return
712 * versions.
713 */
714#define smp_mb__before_atomic_dec() smp_mb()
715#define smp_mb__after_atomic_dec() smp_mb()
716#define smp_mb__before_atomic_inc() smp_mb()
717#define smp_mb__after_atomic_inc() smp_mb()
718
d3cb4871 719#include <asm-generic/atomic.h>
1da177e4 720#endif /* _ASM_ATOMIC_H */