License cleanup: add SPDX GPL-2.0 license identifier to files with no license
[linux-block.git] / arch / m32r / include / asm / atomic.h
CommitLineData
b2441318 1/* SPDX-License-Identifier: GPL-2.0 */
1da177e4
LT
2#ifndef _ASM_M32R_ATOMIC_H
3#define _ASM_M32R_ATOMIC_H
4
5/*
6 * linux/include/asm-m32r/atomic.h
7 *
8 * M32R version:
9 * Copyright (C) 2001, 2002 Hitoshi Yamamoto
10 * Copyright (C) 2004 Hirokazu Takata <takata at linux-m32r.org>
11 */
12
ea435467 13#include <linux/types.h>
1da177e4 14#include <asm/assembler.h>
c9034c3a
DH
15#include <asm/cmpxchg.h>
16#include <asm/dcache_clear.h>
89607d5e 17#include <asm/barrier.h>
1da177e4
LT
18
19/*
20 * Atomic operations that C can't guarantee us. Useful for
21 * resource counting etc..
22 */
23
1da177e4
LT
24#define ATOMIC_INIT(i) { (i) }
25
26/**
27 * atomic_read - read atomic variable
28 * @v: pointer of type atomic_t
29 *
30 * Atomically reads the value of @v.
31 */
62e8a325 32#define atomic_read(v) READ_ONCE((v)->counter)
1da177e4
LT
33
34/**
35 * atomic_set - set atomic variable
36 * @v: pointer of type atomic_t
37 * @i: required value
38 *
39 * Atomically sets the value of @v to @i.
40 */
62e8a325 41#define atomic_set(v,i) WRITE_ONCE(((v)->counter), (i))
1da177e4 42
1da177e4 43#ifdef CONFIG_CHIP_M32700_TS1
c9ebe21b
PZ
44#define __ATOMIC_CLOBBER , "r4"
45#else
46#define __ATOMIC_CLOBBER
47#endif
48
49#define ATOMIC_OP(op) \
50static __inline__ void atomic_##op(int i, atomic_t *v) \
51{ \
52 unsigned long flags; \
53 int result; \
54 \
55 local_irq_save(flags); \
56 __asm__ __volatile__ ( \
57 "# atomic_" #op " \n\t" \
58 DCACHE_CLEAR("%0", "r4", "%1") \
59 M32R_LOCK" %0, @%1; \n\t" \
60 #op " %0, %2; \n\t" \
61 M32R_UNLOCK" %0, @%1; \n\t" \
62 : "=&r" (result) \
63 : "r" (&v->counter), "r" (i) \
64 : "memory" \
65 __ATOMIC_CLOBBER \
66 ); \
67 local_irq_restore(flags); \
68} \
69
70#define ATOMIC_OP_RETURN(op) \
71static __inline__ int atomic_##op##_return(int i, atomic_t *v) \
72{ \
73 unsigned long flags; \
74 int result; \
75 \
76 local_irq_save(flags); \
77 __asm__ __volatile__ ( \
78 "# atomic_" #op "_return \n\t" \
79 DCACHE_CLEAR("%0", "r4", "%1") \
80 M32R_LOCK" %0, @%1; \n\t" \
81 #op " %0, %2; \n\t" \
82 M32R_UNLOCK" %0, @%1; \n\t" \
83 : "=&r" (result) \
84 : "r" (&v->counter), "r" (i) \
85 : "memory" \
86 __ATOMIC_CLOBBER \
87 ); \
88 local_irq_restore(flags); \
89 \
90 return result; \
1da177e4
LT
91}
92
f6493705
PZ
93#define ATOMIC_FETCH_OP(op) \
94static __inline__ int atomic_fetch_##op(int i, atomic_t *v) \
95{ \
96 unsigned long flags; \
97 int result, val; \
98 \
99 local_irq_save(flags); \
100 __asm__ __volatile__ ( \
101 "# atomic_fetch_" #op " \n\t" \
102 DCACHE_CLEAR("%0", "r4", "%2") \
103 M32R_LOCK" %1, @%2; \n\t" \
104 "mv %0, %1 \n\t" \
105 #op " %1, %3; \n\t" \
106 M32R_UNLOCK" %1, @%2; \n\t" \
107 : "=&r" (result), "=&r" (val) \
108 : "r" (&v->counter), "r" (i) \
109 : "memory" \
110 __ATOMIC_CLOBBER \
111 ); \
112 local_irq_restore(flags); \
113 \
114 return result; \
115}
116
117#define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_OP_RETURN(op) ATOMIC_FETCH_OP(op)
1da177e4 118
c9ebe21b
PZ
119ATOMIC_OPS(add)
120ATOMIC_OPS(sub)
1da177e4 121
f6493705
PZ
122#undef ATOMIC_OPS
123#define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_FETCH_OP(op)
124
f6493705
PZ
125ATOMIC_OPS(and)
126ATOMIC_OPS(or)
127ATOMIC_OPS(xor)
c66e45ed 128
c9ebe21b 129#undef ATOMIC_OPS
f6493705 130#undef ATOMIC_FETCH_OP
c9ebe21b
PZ
131#undef ATOMIC_OP_RETURN
132#undef ATOMIC_OP
1da177e4
LT
133
134/**
135 * atomic_sub_and_test - subtract value from variable and test result
136 * @i: integer value to subtract
137 * @v: pointer of type atomic_t
138 *
139 * Atomically subtracts @i from @v and returns
140 * true if the result is zero, or false for all
141 * other cases.
142 */
143#define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
144
145/**
146 * atomic_inc_return - increment atomic variable and return it
147 * @v: pointer of type atomic_t
148 *
149 * Atomically increments @v by 1 and returns the result.
150 */
151static __inline__ int atomic_inc_return(atomic_t *v)
152{
153 unsigned long flags;
154 int result;
155
156 local_irq_save(flags);
157 __asm__ __volatile__ (
158 "# atomic_inc_return \n\t"
159 DCACHE_CLEAR("%0", "r4", "%1")
160 M32R_LOCK" %0, @%1; \n\t"
161 "addi %0, #1; \n\t"
162 M32R_UNLOCK" %0, @%1; \n\t"
163 : "=&r" (result)
164 : "r" (&v->counter)
165 : "memory"
c9ebe21b 166 __ATOMIC_CLOBBER
1da177e4
LT
167 );
168 local_irq_restore(flags);
169
170 return result;
171}
172
173/**
174 * atomic_dec_return - decrement atomic variable and return it
175 * @v: pointer of type atomic_t
176 *
177 * Atomically decrements @v by 1 and returns the result.
178 */
179static __inline__ int atomic_dec_return(atomic_t *v)
180{
181 unsigned long flags;
182 int result;
183
184 local_irq_save(flags);
185 __asm__ __volatile__ (
186 "# atomic_dec_return \n\t"
187 DCACHE_CLEAR("%0", "r4", "%1")
188 M32R_LOCK" %0, @%1; \n\t"
189 "addi %0, #-1; \n\t"
190 M32R_UNLOCK" %0, @%1; \n\t"
191 : "=&r" (result)
192 : "r" (&v->counter)
193 : "memory"
c9ebe21b 194 __ATOMIC_CLOBBER
1da177e4
LT
195 );
196 local_irq_restore(flags);
197
198 return result;
199}
200
201/**
202 * atomic_inc - increment atomic variable
203 * @v: pointer of type atomic_t
204 *
205 * Atomically increments @v by 1.
206 */
207#define atomic_inc(v) ((void)atomic_inc_return(v))
208
209/**
210 * atomic_dec - decrement atomic variable
211 * @v: pointer of type atomic_t
212 *
213 * Atomically decrements @v by 1.
214 */
215#define atomic_dec(v) ((void)atomic_dec_return(v))
216
217/**
218 * atomic_inc_and_test - increment and test
219 * @v: pointer of type atomic_t
220 *
221 * Atomically increments @v by 1
222 * and returns true if the result is zero, or false for all
223 * other cases.
224 */
225#define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
226
227/**
228 * atomic_dec_and_test - decrement and test
229 * @v: pointer of type atomic_t
230 *
231 * Atomically decrements @v by 1 and
232 * returns true if the result is 0, or false for all
233 * other cases.
234 */
235#define atomic_dec_and_test(v) (atomic_dec_return(v) == 0)
236
237/**
238 * atomic_add_negative - add and test if negative
239 * @v: pointer of type atomic_t
240 * @i: integer value to add
241 *
242 * Atomically adds @i to @v and returns true
243 * if the result is negative, or false when
244 * result is greater than or equal to zero.
245 */
246#define atomic_add_negative(i,v) (atomic_add_return((i), (v)) < 0)
247
0332db5a 248#define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
ffbf670f 249#define atomic_xchg(v, new) (xchg(&((v)->counter), new))
0332db5a
HT
250
251/**
f24219b4 252 * __atomic_add_unless - add unless the number is a given value
0332db5a
HT
253 * @v: pointer of type atomic_t
254 * @a: the amount to add to v...
255 * @u: ...unless v is equal to u.
256 *
257 * Atomically adds @a to @v, so long as it was not @u.
f24219b4 258 * Returns the old value of @v.
0332db5a 259 */
f24219b4 260static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
2856f5e3
MD
261{
262 int c, old;
263 c = atomic_read(v);
264 for (;;) {
265 if (unlikely(c == (u)))
266 break;
267 old = atomic_cmpxchg((v), c, c + (a));
268 if (likely(old == c))
269 break;
270 c = old;
271 }
f24219b4 272 return c;
2856f5e3
MD
273}
274
1da177e4 275#endif /* _ASM_M32R_ATOMIC_H */