License cleanup: add SPDX GPL-2.0 license identifier to files with no license
[linux-2.6-block.git] / arch / alpha / include / asm / xchg.h
CommitLineData
b2441318 1/* SPDX-License-Identifier: GPL-2.0 */
5ba840f9 2#ifndef _ALPHA_CMPXCHG_H
a6209d6d
IK
3#error Do not include xchg.h directly!
4#else
5/*
6 * xchg/xchg_local and cmpxchg/cmpxchg_local share the same code
7 * except that local version do not have the expensive memory barrier.
5ba840f9 8 * So this file is included twice from asm/cmpxchg.h.
a6209d6d
IK
9 */
10
11/*
12 * Atomic exchange.
13 * Since it can be used to implement critical sections
14 * it must clobber "memory" (also for interrupts in UP).
15 */
16
17static inline unsigned long
18____xchg(_u8, volatile char *m, unsigned long val)
19{
20 unsigned long ret, tmp, addr64;
21
22 __asm__ __volatile__(
23 " andnot %4,7,%3\n"
24 " insbl %1,%4,%1\n"
25 "1: ldq_l %2,0(%3)\n"
26 " extbl %2,%4,%0\n"
27 " mskbl %2,%4,%2\n"
28 " or %1,%2,%2\n"
29 " stq_c %2,0(%3)\n"
30 " beq %2,2f\n"
31 __ASM__MB
32 ".subsection 2\n"
33 "2: br 1b\n"
34 ".previous"
35 : "=&r" (ret), "=&r" (val), "=&r" (tmp), "=&r" (addr64)
36 : "r" ((long)m), "1" (val) : "memory");
37
38 return ret;
39}
40
41static inline unsigned long
42____xchg(_u16, volatile short *m, unsigned long val)
43{
44 unsigned long ret, tmp, addr64;
45
46 __asm__ __volatile__(
47 " andnot %4,7,%3\n"
48 " inswl %1,%4,%1\n"
49 "1: ldq_l %2,0(%3)\n"
50 " extwl %2,%4,%0\n"
51 " mskwl %2,%4,%2\n"
52 " or %1,%2,%2\n"
53 " stq_c %2,0(%3)\n"
54 " beq %2,2f\n"
55 __ASM__MB
56 ".subsection 2\n"
57 "2: br 1b\n"
58 ".previous"
59 : "=&r" (ret), "=&r" (val), "=&r" (tmp), "=&r" (addr64)
60 : "r" ((long)m), "1" (val) : "memory");
61
62 return ret;
63}
64
65static inline unsigned long
66____xchg(_u32, volatile int *m, unsigned long val)
67{
68 unsigned long dummy;
69
70 __asm__ __volatile__(
71 "1: ldl_l %0,%4\n"
72 " bis $31,%3,%1\n"
73 " stl_c %1,%2\n"
74 " beq %1,2f\n"
75 __ASM__MB
76 ".subsection 2\n"
77 "2: br 1b\n"
78 ".previous"
79 : "=&r" (val), "=&r" (dummy), "=m" (*m)
80 : "rI" (val), "m" (*m) : "memory");
81
82 return val;
83}
84
85static inline unsigned long
86____xchg(_u64, volatile long *m, unsigned long val)
87{
88 unsigned long dummy;
89
90 __asm__ __volatile__(
91 "1: ldq_l %0,%4\n"
92 " bis $31,%3,%1\n"
93 " stq_c %1,%2\n"
94 " beq %1,2f\n"
95 __ASM__MB
96 ".subsection 2\n"
97 "2: br 1b\n"
98 ".previous"
99 : "=&r" (val), "=&r" (dummy), "=m" (*m)
100 : "rI" (val), "m" (*m) : "memory");
101
102 return val;
103}
104
105/* This function doesn't exist, so you'll get a linker error
106 if something tries to do an invalid xchg(). */
107extern void __xchg_called_with_bad_pointer(void);
108
109static __always_inline unsigned long
110____xchg(, volatile void *ptr, unsigned long x, int size)
111{
112 switch (size) {
113 case 1:
114 return ____xchg(_u8, ptr, x);
115 case 2:
116 return ____xchg(_u16, ptr, x);
117 case 4:
118 return ____xchg(_u32, ptr, x);
119 case 8:
120 return ____xchg(_u64, ptr, x);
121 }
122 __xchg_called_with_bad_pointer();
123 return x;
124}
125
126/*
127 * Atomic compare and exchange. Compare OLD with MEM, if identical,
128 * store NEW in MEM. Return the initial value in MEM. Success is
129 * indicated by comparing RETURN with OLD.
130 *
131 * The memory barrier should be placed in SMP only when we actually
132 * make the change. If we don't change anything (so if the returned
133 * prev is equal to old) then we aren't acquiring anything new and
134 * we don't need any memory barrier as far I can tell.
135 */
136
137static inline unsigned long
138____cmpxchg(_u8, volatile char *m, unsigned char old, unsigned char new)
139{
140 unsigned long prev, tmp, cmp, addr64;
141
142 __asm__ __volatile__(
143 " andnot %5,7,%4\n"
144 " insbl %1,%5,%1\n"
145 "1: ldq_l %2,0(%4)\n"
146 " extbl %2,%5,%0\n"
147 " cmpeq %0,%6,%3\n"
148 " beq %3,2f\n"
149 " mskbl %2,%5,%2\n"
150 " or %1,%2,%2\n"
151 " stq_c %2,0(%4)\n"
152 " beq %2,3f\n"
153 __ASM__MB
154 "2:\n"
155 ".subsection 2\n"
156 "3: br 1b\n"
157 ".previous"
158 : "=&r" (prev), "=&r" (new), "=&r" (tmp), "=&r" (cmp), "=&r" (addr64)
159 : "r" ((long)m), "Ir" (old), "1" (new) : "memory");
160
161 return prev;
162}
163
164static inline unsigned long
165____cmpxchg(_u16, volatile short *m, unsigned short old, unsigned short new)
166{
167 unsigned long prev, tmp, cmp, addr64;
168
169 __asm__ __volatile__(
170 " andnot %5,7,%4\n"
171 " inswl %1,%5,%1\n"
172 "1: ldq_l %2,0(%4)\n"
173 " extwl %2,%5,%0\n"
174 " cmpeq %0,%6,%3\n"
175 " beq %3,2f\n"
176 " mskwl %2,%5,%2\n"
177 " or %1,%2,%2\n"
178 " stq_c %2,0(%4)\n"
179 " beq %2,3f\n"
180 __ASM__MB
181 "2:\n"
182 ".subsection 2\n"
183 "3: br 1b\n"
184 ".previous"
185 : "=&r" (prev), "=&r" (new), "=&r" (tmp), "=&r" (cmp), "=&r" (addr64)
186 : "r" ((long)m), "Ir" (old), "1" (new) : "memory");
187
188 return prev;
189}
190
191static inline unsigned long
192____cmpxchg(_u32, volatile int *m, int old, int new)
193{
194 unsigned long prev, cmp;
195
196 __asm__ __volatile__(
197 "1: ldl_l %0,%5\n"
198 " cmpeq %0,%3,%1\n"
199 " beq %1,2f\n"
200 " mov %4,%1\n"
201 " stl_c %1,%2\n"
202 " beq %1,3f\n"
203 __ASM__MB
204 "2:\n"
205 ".subsection 2\n"
206 "3: br 1b\n"
207 ".previous"
208 : "=&r"(prev), "=&r"(cmp), "=m"(*m)
209 : "r"((long) old), "r"(new), "m"(*m) : "memory");
210
211 return prev;
212}
213
214static inline unsigned long
215____cmpxchg(_u64, volatile long *m, unsigned long old, unsigned long new)
216{
217 unsigned long prev, cmp;
218
219 __asm__ __volatile__(
220 "1: ldq_l %0,%5\n"
221 " cmpeq %0,%3,%1\n"
222 " beq %1,2f\n"
223 " mov %4,%1\n"
224 " stq_c %1,%2\n"
225 " beq %1,3f\n"
226 __ASM__MB
227 "2:\n"
228 ".subsection 2\n"
229 "3: br 1b\n"
230 ".previous"
231 : "=&r"(prev), "=&r"(cmp), "=m"(*m)
232 : "r"((long) old), "r"(new), "m"(*m) : "memory");
233
234 return prev;
235}
236
237/* This function doesn't exist, so you'll get a linker error
238 if something tries to do an invalid cmpxchg(). */
239extern void __cmpxchg_called_with_bad_pointer(void);
240
241static __always_inline unsigned long
242____cmpxchg(, volatile void *ptr, unsigned long old, unsigned long new,
243 int size)
244{
245 switch (size) {
246 case 1:
247 return ____cmpxchg(_u8, ptr, old, new);
248 case 2:
249 return ____cmpxchg(_u16, ptr, old, new);
250 case 4:
251 return ____cmpxchg(_u32, ptr, old, new);
252 case 8:
253 return ____cmpxchg(_u64, ptr, old, new);
254 }
255 __cmpxchg_called_with_bad_pointer();
256 return old;
257}
258
259#endif