[S390] Whitespace cleanup.
[linux-2.6-block.git] / include / asm-s390 / atomic.h
CommitLineData
1da177e4
LT
1#ifndef __ARCH_S390_ATOMIC__
2#define __ARCH_S390_ATOMIC__
3
5bd1db65
DJ
4#include <linux/compiler.h>
5
1da177e4
LT
6/*
7 * include/asm-s390/atomic.h
8 *
9 * S390 version
973bd993 10 * Copyright (C) 1999-2005 IBM Deutschland Entwicklung GmbH, IBM Corporation
1da177e4
LT
11 * Author(s): Martin Schwidefsky (schwidefsky@de.ibm.com),
12 * Denis Joseph Barrow,
13 * Arnd Bergmann (arndb@de.ibm.com)
14 *
15 * Derived from "include/asm-i386/bitops.h"
16 * Copyright (C) 1992, Linus Torvalds
17 *
18 */
19
20/*
21 * Atomic operations that C can't guarantee us. Useful for
22 * resource counting etc..
23 * S390 uses 'Compare And Swap' for atomicity in SMP enviroment
24 */
25
26typedef struct {
27 volatile int counter;
28} __attribute__ ((aligned (4))) atomic_t;
29#define ATOMIC_INIT(i) { (i) }
30
31#ifdef __KERNEL__
32
33#define __CS_LOOP(ptr, op_val, op_string) ({ \
34 typeof(ptr->counter) old_val, new_val; \
35 __asm__ __volatile__(" l %0,0(%3)\n" \
36 "0: lr %1,%0\n" \
37 op_string " %1,%4\n" \
38 " cs %0,%1,0(%3)\n" \
39 " jl 0b" \
40 : "=&d" (old_val), "=&d" (new_val), \
41 "=m" (((atomic_t *)(ptr))->counter) \
42 : "a" (ptr), "d" (op_val), \
43 "m" (((atomic_t *)(ptr))->counter) \
44 : "cc", "memory" ); \
45 new_val; \
46})
47#define atomic_read(v) ((v)->counter)
48#define atomic_set(v,i) (((v)->counter) = (i))
49
1da177e4
LT
50static __inline__ int atomic_add_return(int i, atomic_t * v)
51{
52 return __CS_LOOP(v, i, "ar");
53}
973bd993
MS
54#define atomic_add(_i, _v) atomic_add_return(_i, _v)
55#define atomic_add_negative(_i, _v) (atomic_add_return(_i, _v) < 0)
56#define atomic_inc(_v) atomic_add_return(1, _v)
57#define atomic_inc_return(_v) atomic_add_return(1, _v)
58#define atomic_inc_and_test(_v) (atomic_add_return(1, _v) == 0)
59
1da177e4
LT
60static __inline__ int atomic_sub_return(int i, atomic_t * v)
61{
62 return __CS_LOOP(v, i, "sr");
63}
973bd993
MS
64#define atomic_sub(_i, _v) atomic_sub_return(_i, _v)
65#define atomic_sub_and_test(_i, _v) (atomic_sub_return(_i, _v) == 0)
66#define atomic_dec(_v) atomic_sub_return(1, _v)
67#define atomic_dec_return(_v) atomic_sub_return(1, _v)
68#define atomic_dec_and_test(_v) (atomic_sub_return(1, _v) == 0)
1da177e4 69
1da177e4
LT
70static __inline__ void atomic_clear_mask(unsigned long mask, atomic_t * v)
71{
72 __CS_LOOP(v, ~mask, "nr");
73}
973bd993 74
1da177e4
LT
75static __inline__ void atomic_set_mask(unsigned long mask, atomic_t * v)
76{
77 __CS_LOOP(v, mask, "or");
78}
973bd993 79
ffbf670f
IM
80#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
81
973bd993
MS
82static __inline__ int atomic_cmpxchg(atomic_t *v, int old, int new)
83{
84 __asm__ __volatile__(" cs %0,%3,0(%2)\n"
85 : "+d" (old), "=m" (v->counter)
86 : "a" (v), "d" (new), "m" (v->counter)
87 : "cc", "memory" );
88 return old;
89}
90
91static __inline__ int atomic_add_unless(atomic_t *v, int a, int u)
92{
93 int c, old;
973bd993 94 c = atomic_read(v);
0b2fcfdb
NP
95 for (;;) {
96 if (unlikely(c == u))
97 break;
98 old = atomic_cmpxchg(v, c, c + a);
99 if (likely(old == c))
100 break;
973bd993 101 c = old;
0b2fcfdb 102 }
973bd993
MS
103 return c != u;
104}
105
106#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
107
1da177e4
LT
108#undef __CS_LOOP
109
110#ifdef __s390x__
111typedef struct {
112 volatile long long counter;
113} __attribute__ ((aligned (8))) atomic64_t;
114#define ATOMIC64_INIT(i) { (i) }
115
116#define __CSG_LOOP(ptr, op_val, op_string) ({ \
117 typeof(ptr->counter) old_val, new_val; \
118 __asm__ __volatile__(" lg %0,0(%3)\n" \
119 "0: lgr %1,%0\n" \
120 op_string " %1,%4\n" \
121 " csg %0,%1,0(%3)\n" \
122 " jl 0b" \
123 : "=&d" (old_val), "=&d" (new_val), \
124 "=m" (((atomic_t *)(ptr))->counter) \
125 : "a" (ptr), "d" (op_val), \
126 "m" (((atomic_t *)(ptr))->counter) \
127 : "cc", "memory" ); \
128 new_val; \
129})
130#define atomic64_read(v) ((v)->counter)
131#define atomic64_set(v,i) (((v)->counter) = (i))
132
46ee058c 133static __inline__ long long atomic64_add_return(long long i, atomic64_t * v)
1da177e4
LT
134{
135 return __CSG_LOOP(v, i, "agr");
136}
973bd993
MS
137#define atomic64_add(_i, _v) atomic64_add_return(_i, _v)
138#define atomic64_add_negative(_i, _v) (atomic64_add_return(_i, _v) < 0)
139#define atomic64_inc(_v) atomic64_add_return(1, _v)
140#define atomic64_inc_return(_v) atomic64_add_return(1, _v)
141#define atomic64_inc_and_test(_v) (atomic64_add_return(1, _v) == 0)
142
143static __inline__ long long atomic64_sub_return(long long i, atomic64_t * v)
1da177e4 144{
973bd993 145 return __CSG_LOOP(v, i, "sgr");
1da177e4 146}
973bd993
MS
147#define atomic64_sub(_i, _v) atomic64_sub_return(_i, _v)
148#define atomic64_sub_and_test(_i, _v) (atomic64_sub_return(_i, _v) == 0)
149#define atomic64_dec(_v) atomic64_sub_return(1, _v)
150#define atomic64_dec_return(_v) atomic64_sub_return(1, _v)
151#define atomic64_dec_and_test(_v) (atomic64_sub_return(1, _v) == 0)
152
1da177e4
LT
153static __inline__ void atomic64_clear_mask(unsigned long mask, atomic64_t * v)
154{
155 __CSG_LOOP(v, ~mask, "ngr");
156}
973bd993 157
1da177e4
LT
158static __inline__ void atomic64_set_mask(unsigned long mask, atomic64_t * v)
159{
160 __CSG_LOOP(v, mask, "ogr");
161}
162
973bd993
MS
163static __inline__ long long atomic64_cmpxchg(atomic64_t *v,
164 long long old, long long new)
165{
166 __asm__ __volatile__(" csg %0,%3,0(%2)\n"
167 : "+d" (old), "=m" (v->counter)
168 : "a" (v), "d" (new), "m" (v->counter)
169 : "cc", "memory" );
170 return old;
171}
1da177e4 172
973bd993
MS
173static __inline__ int atomic64_add_unless(atomic64_t *v,
174 long long a, long long u)
1da177e4 175{
973bd993 176 long long c, old;
973bd993 177 c = atomic64_read(v);
0b2fcfdb
NP
178 for (;;) {
179 if (unlikely(c == u))
180 break;
181 old = atomic64_cmpxchg(v, c, c + a);
182 if (likely(old == c))
183 break;
973bd993 184 c = old;
0b2fcfdb 185 }
973bd993 186 return c != u;
1da177e4
LT
187}
188
973bd993 189#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
4a6dae6d 190
973bd993
MS
191#undef __CSG_LOOP
192#endif
8426e1f6 193
1da177e4
LT
194#define smp_mb__before_atomic_dec() smp_mb()
195#define smp_mb__after_atomic_dec() smp_mb()
196#define smp_mb__before_atomic_inc() smp_mb()
197#define smp_mb__after_atomic_inc() smp_mb()
198
d3cb4871 199#include <asm-generic/atomic.h>
1da177e4
LT
200#endif /* __KERNEL__ */
201#endif /* __ARCH_S390_ATOMIC__ */