Commit | Line | Data |
---|---|---|
1da177e4 LT |
1 | /* |
2 | * This file is subject to the terms and conditions of the GNU General Public | |
3 | * License. See the file "COPYING" in the main directory of this archive | |
4 | * for more details. | |
5 | * | |
102fa15c | 6 | * Copyright (c) 1994 - 1997, 99, 2000, 06, 07 Ralf Baechle (ralf@linux-mips.org) |
1da177e4 LT |
7 | * Copyright (c) 1999, 2000 Silicon Graphics, Inc. |
8 | */ | |
9 | #ifndef _ASM_BITOPS_H | |
10 | #define _ASM_BITOPS_H | |
11 | ||
0624517d JS |
12 | #ifndef _LINUX_BITOPS_H |
13 | #error only <linux/bitops.h> can be included directly | |
14 | #endif | |
15 | ||
1da177e4 LT |
16 | #include <linux/compiler.h> |
17 | #include <linux/types.h> | |
0004a9df | 18 | #include <asm/barrier.h> |
1da177e4 | 19 | #include <asm/byteorder.h> /* sigh ... */ |
b0984c43 | 20 | #include <asm/compiler.h> |
1da177e4 | 21 | #include <asm/cpu-features.h> |
05490626 | 22 | #include <asm/llsc.h> |
4ffd8b38 RB |
23 | #include <asm/sgidefs.h> |
24 | #include <asm/war.h> | |
1da177e4 | 25 | |
92d11594 JQ |
26 | /* |
27 | * These are the "slower" versions of the functions and are in bitops.c. | |
28 | * These functions call raw_local_irq_{save,restore}(). | |
29 | */ | |
30 | void __mips_set_bit(unsigned long nr, volatile unsigned long *addr); | |
31 | void __mips_clear_bit(unsigned long nr, volatile unsigned long *addr); | |
32 | void __mips_change_bit(unsigned long nr, volatile unsigned long *addr); | |
33 | int __mips_test_and_set_bit(unsigned long nr, | |
34 | volatile unsigned long *addr); | |
35 | int __mips_test_and_set_bit_lock(unsigned long nr, | |
36 | volatile unsigned long *addr); | |
37 | int __mips_test_and_clear_bit(unsigned long nr, | |
38 | volatile unsigned long *addr); | |
39 | int __mips_test_and_change_bit(unsigned long nr, | |
40 | volatile unsigned long *addr); | |
41 | ||
42 | ||
1da177e4 LT |
43 | /* |
44 | * set_bit - Atomically set a bit in memory | |
45 | * @nr: the bit to set | |
46 | * @addr: the address to start counting from | |
47 | * | |
48 | * This function is atomic and may not be reordered. See __set_bit() | |
49 | * if you do not require the atomic guarantees. | |
50 | * Note that @nr may be almost arbitrarily large; this function is not | |
51 | * restricted to acting on a single-word quantity. | |
52 | */ | |
53 | static inline void set_bit(unsigned long nr, volatile unsigned long *addr) | |
54 | { | |
55 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); | |
9de79c50 | 56 | int bit = nr & SZLONG_MASK; |
1da177e4 LT |
57 | unsigned long temp; |
58 | ||
b791d119 | 59 | if (kernel_uses_llsc && R10000_LLSC_WAR) { |
1da177e4 | 60 | __asm__ __volatile__( |
a809d460 | 61 | " .set arch=r4000 \n" |
1da177e4 LT |
62 | "1: " __LL "%0, %1 # set_bit \n" |
63 | " or %0, %2 \n" | |
aac8aa77 | 64 | " " __SC "%0, %1 \n" |
1da177e4 | 65 | " beqzl %0, 1b \n" |
aac8aa77 | 66 | " .set mips0 \n" |
94bfb75a MC |
67 | : "=&r" (temp), "=" GCC_OFF_SMALL_ASM() (*m) |
68 | : "ir" (1UL << bit), GCC_OFF_SMALL_ASM() (*m)); | |
87a927ef | 69 | #if defined(CONFIG_CPU_MIPSR2) || defined(CONFIG_CPU_MIPSR6) |
b791d119 | 70 | } else if (kernel_uses_llsc && __builtin_constant_p(bit)) { |
7837314d RB |
71 | do { |
72 | __asm__ __volatile__( | |
73 | " " __LL "%0, %1 # set_bit \n" | |
74 | " " __INS "%0, %3, %2, 1 \n" | |
75 | " " __SC "%0, %1 \n" | |
94bfb75a | 76 | : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m) |
7837314d RB |
77 | : "ir" (bit), "r" (~0)); |
78 | } while (unlikely(!temp)); | |
87a927ef | 79 | #endif /* CONFIG_CPU_MIPSR2 || CONFIG_CPU_MIPSR6 */ |
b791d119 | 80 | } else if (kernel_uses_llsc) { |
7837314d RB |
81 | do { |
82 | __asm__ __volatile__( | |
87a927ef | 83 | " .set "MIPS_ISA_ARCH_LEVEL" \n" |
7837314d RB |
84 | " " __LL "%0, %1 # set_bit \n" |
85 | " or %0, %2 \n" | |
86 | " " __SC "%0, %1 \n" | |
87 | " .set mips0 \n" | |
94bfb75a | 88 | : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m) |
7837314d RB |
89 | : "ir" (1UL << bit)); |
90 | } while (unlikely(!temp)); | |
92d11594 JQ |
91 | } else |
92 | __mips_set_bit(nr, addr); | |
1da177e4 LT |
93 | } |
94 | ||
1da177e4 LT |
95 | /* |
96 | * clear_bit - Clears a bit in memory | |
97 | * @nr: Bit to clear | |
98 | * @addr: Address to start counting from | |
99 | * | |
100 | * clear_bit() is atomic and may not be reordered. However, it does | |
101 | * not contain a memory barrier, so if it is used for locking purposes, | |
91bbefe6 | 102 | * you should call smp_mb__before_atomic() and/or smp_mb__after_atomic() |
1da177e4 LT |
103 | * in order to ensure changes are visible on other processors. |
104 | */ | |
105 | static inline void clear_bit(unsigned long nr, volatile unsigned long *addr) | |
106 | { | |
107 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); | |
9de79c50 | 108 | int bit = nr & SZLONG_MASK; |
1da177e4 LT |
109 | unsigned long temp; |
110 | ||
b791d119 | 111 | if (kernel_uses_llsc && R10000_LLSC_WAR) { |
1da177e4 | 112 | __asm__ __volatile__( |
a809d460 | 113 | " .set arch=r4000 \n" |
1da177e4 LT |
114 | "1: " __LL "%0, %1 # clear_bit \n" |
115 | " and %0, %2 \n" | |
116 | " " __SC "%0, %1 \n" | |
117 | " beqzl %0, 1b \n" | |
aac8aa77 | 118 | " .set mips0 \n" |
94bfb75a | 119 | : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m) |
7837314d | 120 | : "ir" (~(1UL << bit))); |
87a927ef | 121 | #if defined(CONFIG_CPU_MIPSR2) || defined(CONFIG_CPU_MIPSR6) |
b791d119 | 122 | } else if (kernel_uses_llsc && __builtin_constant_p(bit)) { |
7837314d RB |
123 | do { |
124 | __asm__ __volatile__( | |
125 | " " __LL "%0, %1 # clear_bit \n" | |
126 | " " __INS "%0, $0, %2, 1 \n" | |
127 | " " __SC "%0, %1 \n" | |
94bfb75a | 128 | : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m) |
7837314d RB |
129 | : "ir" (bit)); |
130 | } while (unlikely(!temp)); | |
87a927ef | 131 | #endif /* CONFIG_CPU_MIPSR2 || CONFIG_CPU_MIPSR6 */ |
b791d119 | 132 | } else if (kernel_uses_llsc) { |
7837314d RB |
133 | do { |
134 | __asm__ __volatile__( | |
87a927ef | 135 | " .set "MIPS_ISA_ARCH_LEVEL" \n" |
7837314d RB |
136 | " " __LL "%0, %1 # clear_bit \n" |
137 | " and %0, %2 \n" | |
138 | " " __SC "%0, %1 \n" | |
139 | " .set mips0 \n" | |
94bfb75a | 140 | : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m) |
7837314d RB |
141 | : "ir" (~(1UL << bit))); |
142 | } while (unlikely(!temp)); | |
92d11594 JQ |
143 | } else |
144 | __mips_clear_bit(nr, addr); | |
1da177e4 LT |
145 | } |
146 | ||
728697cd NP |
147 | /* |
148 | * clear_bit_unlock - Clears a bit in memory | |
149 | * @nr: Bit to clear | |
150 | * @addr: Address to start counting from | |
151 | * | |
152 | * clear_bit() is atomic and implies release semantics before the memory | |
153 | * operation. It can be used for an unlock. | |
154 | */ | |
155 | static inline void clear_bit_unlock(unsigned long nr, volatile unsigned long *addr) | |
156 | { | |
91bbefe6 | 157 | smp_mb__before_atomic(); |
728697cd NP |
158 | clear_bit(nr, addr); |
159 | } | |
160 | ||
1da177e4 LT |
161 | /* |
162 | * change_bit - Toggle a bit in memory | |
163 | * @nr: Bit to change | |
164 | * @addr: Address to start counting from | |
165 | * | |
166 | * change_bit() is atomic and may not be reordered. | |
167 | * Note that @nr may be almost arbitrarily large; this function is not | |
168 | * restricted to acting on a single-word quantity. | |
169 | */ | |
170 | static inline void change_bit(unsigned long nr, volatile unsigned long *addr) | |
171 | { | |
9de79c50 | 172 | int bit = nr & SZLONG_MASK; |
b961153b | 173 | |
b791d119 | 174 | if (kernel_uses_llsc && R10000_LLSC_WAR) { |
1da177e4 LT |
175 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); |
176 | unsigned long temp; | |
177 | ||
178 | __asm__ __volatile__( | |
a809d460 | 179 | " .set arch=r4000 \n" |
1da177e4 LT |
180 | "1: " __LL "%0, %1 # change_bit \n" |
181 | " xor %0, %2 \n" | |
aac8aa77 | 182 | " " __SC "%0, %1 \n" |
1da177e4 | 183 | " beqzl %0, 1b \n" |
aac8aa77 | 184 | " .set mips0 \n" |
94bfb75a | 185 | : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m) |
7837314d | 186 | : "ir" (1UL << bit)); |
b791d119 | 187 | } else if (kernel_uses_llsc) { |
1da177e4 LT |
188 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); |
189 | unsigned long temp; | |
190 | ||
7837314d RB |
191 | do { |
192 | __asm__ __volatile__( | |
87a927ef | 193 | " .set "MIPS_ISA_ARCH_LEVEL" \n" |
7837314d RB |
194 | " " __LL "%0, %1 # change_bit \n" |
195 | " xor %0, %2 \n" | |
196 | " " __SC "%0, %1 \n" | |
197 | " .set mips0 \n" | |
94bfb75a | 198 | : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m) |
7837314d RB |
199 | : "ir" (1UL << bit)); |
200 | } while (unlikely(!temp)); | |
92d11594 JQ |
201 | } else |
202 | __mips_change_bit(nr, addr); | |
1da177e4 LT |
203 | } |
204 | ||
1da177e4 LT |
205 | /* |
206 | * test_and_set_bit - Set a bit and return its old value | |
207 | * @nr: Bit to set | |
208 | * @addr: Address to count from | |
209 | * | |
210 | * This operation is atomic and cannot be reordered. | |
211 | * It also implies a memory barrier. | |
212 | */ | |
213 | static inline int test_and_set_bit(unsigned long nr, | |
214 | volatile unsigned long *addr) | |
215 | { | |
9de79c50 | 216 | int bit = nr & SZLONG_MASK; |
ff72b7a6 | 217 | unsigned long res; |
b961153b | 218 | |
f252ffd5 | 219 | smp_mb__before_llsc(); |
c8f30ae5 | 220 | |
b791d119 | 221 | if (kernel_uses_llsc && R10000_LLSC_WAR) { |
1da177e4 | 222 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); |
ff72b7a6 | 223 | unsigned long temp; |
1da177e4 LT |
224 | |
225 | __asm__ __volatile__( | |
a809d460 | 226 | " .set arch=r4000 \n" |
1da177e4 LT |
227 | "1: " __LL "%0, %1 # test_and_set_bit \n" |
228 | " or %2, %0, %3 \n" | |
229 | " " __SC "%2, %1 \n" | |
230 | " beqzl %2, 1b \n" | |
231 | " and %2, %0, %3 \n" | |
aac8aa77 | 232 | " .set mips0 \n" |
94bfb75a | 233 | : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res) |
7837314d | 234 | : "r" (1UL << bit) |
1da177e4 | 235 | : "memory"); |
b791d119 | 236 | } else if (kernel_uses_llsc) { |
1da177e4 | 237 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); |
ff72b7a6 | 238 | unsigned long temp; |
1da177e4 | 239 | |
7837314d RB |
240 | do { |
241 | __asm__ __volatile__( | |
87a927ef | 242 | " .set "MIPS_ISA_ARCH_LEVEL" \n" |
7837314d RB |
243 | " " __LL "%0, %1 # test_and_set_bit \n" |
244 | " or %2, %0, %3 \n" | |
245 | " " __SC "%2, %1 \n" | |
246 | " .set mips0 \n" | |
94bfb75a | 247 | : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res) |
7837314d RB |
248 | : "r" (1UL << bit) |
249 | : "memory"); | |
250 | } while (unlikely(!res)); | |
251 | ||
252 | res = temp & (1UL << bit); | |
92d11594 JQ |
253 | } else |
254 | res = __mips_test_and_set_bit(nr, addr); | |
0004a9df | 255 | |
17099b11 | 256 | smp_llsc_mb(); |
ff72b7a6 RB |
257 | |
258 | return res != 0; | |
1da177e4 LT |
259 | } |
260 | ||
728697cd NP |
261 | /* |
262 | * test_and_set_bit_lock - Set a bit and return its old value | |
263 | * @nr: Bit to set | |
264 | * @addr: Address to count from | |
265 | * | |
266 | * This operation is atomic and implies acquire ordering semantics | |
267 | * after the memory operation. | |
268 | */ | |
269 | static inline int test_and_set_bit_lock(unsigned long nr, | |
270 | volatile unsigned long *addr) | |
271 | { | |
9de79c50 | 272 | int bit = nr & SZLONG_MASK; |
728697cd NP |
273 | unsigned long res; |
274 | ||
b791d119 | 275 | if (kernel_uses_llsc && R10000_LLSC_WAR) { |
728697cd NP |
276 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); |
277 | unsigned long temp; | |
278 | ||
279 | __asm__ __volatile__( | |
a809d460 | 280 | " .set arch=r4000 \n" |
728697cd NP |
281 | "1: " __LL "%0, %1 # test_and_set_bit \n" |
282 | " or %2, %0, %3 \n" | |
283 | " " __SC "%2, %1 \n" | |
284 | " beqzl %2, 1b \n" | |
285 | " and %2, %0, %3 \n" | |
286 | " .set mips0 \n" | |
7837314d RB |
287 | : "=&r" (temp), "+m" (*m), "=&r" (res) |
288 | : "r" (1UL << bit) | |
728697cd | 289 | : "memory"); |
b791d119 | 290 | } else if (kernel_uses_llsc) { |
728697cd NP |
291 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); |
292 | unsigned long temp; | |
293 | ||
7837314d RB |
294 | do { |
295 | __asm__ __volatile__( | |
87a927ef | 296 | " .set "MIPS_ISA_ARCH_LEVEL" \n" |
7837314d RB |
297 | " " __LL "%0, %1 # test_and_set_bit \n" |
298 | " or %2, %0, %3 \n" | |
299 | " " __SC "%2, %1 \n" | |
300 | " .set mips0 \n" | |
94bfb75a | 301 | : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res) |
7837314d RB |
302 | : "r" (1UL << bit) |
303 | : "memory"); | |
304 | } while (unlikely(!res)); | |
305 | ||
306 | res = temp & (1UL << bit); | |
92d11594 JQ |
307 | } else |
308 | res = __mips_test_and_set_bit_lock(nr, addr); | |
728697cd NP |
309 | |
310 | smp_llsc_mb(); | |
311 | ||
312 | return res != 0; | |
313 | } | |
1da177e4 LT |
314 | /* |
315 | * test_and_clear_bit - Clear a bit and return its old value | |
316 | * @nr: Bit to clear | |
317 | * @addr: Address to count from | |
318 | * | |
319 | * This operation is atomic and cannot be reordered. | |
320 | * It also implies a memory barrier. | |
321 | */ | |
322 | static inline int test_and_clear_bit(unsigned long nr, | |
323 | volatile unsigned long *addr) | |
324 | { | |
9de79c50 | 325 | int bit = nr & SZLONG_MASK; |
ff72b7a6 | 326 | unsigned long res; |
b961153b | 327 | |
f252ffd5 | 328 | smp_mb__before_llsc(); |
c8f30ae5 | 329 | |
b791d119 | 330 | if (kernel_uses_llsc && R10000_LLSC_WAR) { |
1da177e4 | 331 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); |
8e09ffb6 | 332 | unsigned long temp; |
1da177e4 LT |
333 | |
334 | __asm__ __volatile__( | |
a809d460 | 335 | " .set arch=r4000 \n" |
1da177e4 LT |
336 | "1: " __LL "%0, %1 # test_and_clear_bit \n" |
337 | " or %2, %0, %3 \n" | |
338 | " xor %2, %3 \n" | |
70342287 | 339 | " " __SC "%2, %1 \n" |
1da177e4 LT |
340 | " beqzl %2, 1b \n" |
341 | " and %2, %0, %3 \n" | |
aac8aa77 | 342 | " .set mips0 \n" |
94bfb75a | 343 | : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res) |
7837314d | 344 | : "r" (1UL << bit) |
1da177e4 | 345 | : "memory"); |
87a927ef | 346 | #if defined(CONFIG_CPU_MIPSR2) || defined(CONFIG_CPU_MIPSR6) |
b791d119 | 347 | } else if (kernel_uses_llsc && __builtin_constant_p(nr)) { |
102fa15c | 348 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); |
ff72b7a6 | 349 | unsigned long temp; |
102fa15c | 350 | |
7837314d RB |
351 | do { |
352 | __asm__ __volatile__( | |
70342287 | 353 | " " __LL "%0, %1 # test_and_clear_bit \n" |
7837314d | 354 | " " __EXT "%2, %0, %3, 1 \n" |
70342287 RB |
355 | " " __INS "%0, $0, %3, 1 \n" |
356 | " " __SC "%0, %1 \n" | |
94bfb75a | 357 | : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res) |
7837314d RB |
358 | : "ir" (bit) |
359 | : "memory"); | |
360 | } while (unlikely(!temp)); | |
102fa15c | 361 | #endif |
b791d119 | 362 | } else if (kernel_uses_llsc) { |
1da177e4 | 363 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); |
ff72b7a6 | 364 | unsigned long temp; |
1da177e4 | 365 | |
7837314d RB |
366 | do { |
367 | __asm__ __volatile__( | |
87a927ef | 368 | " .set "MIPS_ISA_ARCH_LEVEL" \n" |
70342287 | 369 | " " __LL "%0, %1 # test_and_clear_bit \n" |
7837314d RB |
370 | " or %2, %0, %3 \n" |
371 | " xor %2, %3 \n" | |
70342287 | 372 | " " __SC "%2, %1 \n" |
7837314d | 373 | " .set mips0 \n" |
94bfb75a | 374 | : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res) |
7837314d RB |
375 | : "r" (1UL << bit) |
376 | : "memory"); | |
377 | } while (unlikely(!res)); | |
378 | ||
379 | res = temp & (1UL << bit); | |
92d11594 JQ |
380 | } else |
381 | res = __mips_test_and_clear_bit(nr, addr); | |
0004a9df | 382 | |
17099b11 | 383 | smp_llsc_mb(); |
ff72b7a6 RB |
384 | |
385 | return res != 0; | |
1da177e4 LT |
386 | } |
387 | ||
1da177e4 LT |
388 | /* |
389 | * test_and_change_bit - Change a bit and return its old value | |
390 | * @nr: Bit to change | |
391 | * @addr: Address to count from | |
392 | * | |
393 | * This operation is atomic and cannot be reordered. | |
394 | * It also implies a memory barrier. | |
395 | */ | |
396 | static inline int test_and_change_bit(unsigned long nr, | |
397 | volatile unsigned long *addr) | |
398 | { | |
9de79c50 | 399 | int bit = nr & SZLONG_MASK; |
ff72b7a6 | 400 | unsigned long res; |
b961153b | 401 | |
f252ffd5 | 402 | smp_mb__before_llsc(); |
c8f30ae5 | 403 | |
b791d119 | 404 | if (kernel_uses_llsc && R10000_LLSC_WAR) { |
1da177e4 | 405 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); |
ff72b7a6 | 406 | unsigned long temp; |
1da177e4 LT |
407 | |
408 | __asm__ __volatile__( | |
a809d460 | 409 | " .set arch=r4000 \n" |
aac8aa77 | 410 | "1: " __LL "%0, %1 # test_and_change_bit \n" |
1da177e4 | 411 | " xor %2, %0, %3 \n" |
aac8aa77 | 412 | " " __SC "%2, %1 \n" |
1da177e4 LT |
413 | " beqzl %2, 1b \n" |
414 | " and %2, %0, %3 \n" | |
aac8aa77 | 415 | " .set mips0 \n" |
94bfb75a | 416 | : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res) |
7837314d | 417 | : "r" (1UL << bit) |
1da177e4 | 418 | : "memory"); |
b791d119 | 419 | } else if (kernel_uses_llsc) { |
1da177e4 | 420 | unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); |
ff72b7a6 | 421 | unsigned long temp; |
1da177e4 | 422 | |
7837314d RB |
423 | do { |
424 | __asm__ __volatile__( | |
87a927ef | 425 | " .set "MIPS_ISA_ARCH_LEVEL" \n" |
70342287 | 426 | " " __LL "%0, %1 # test_and_change_bit \n" |
7837314d RB |
427 | " xor %2, %0, %3 \n" |
428 | " " __SC "\t%2, %1 \n" | |
429 | " .set mips0 \n" | |
94bfb75a | 430 | : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res) |
7837314d RB |
431 | : "r" (1UL << bit) |
432 | : "memory"); | |
433 | } while (unlikely(!res)); | |
434 | ||
435 | res = temp & (1UL << bit); | |
92d11594 JQ |
436 | } else |
437 | res = __mips_test_and_change_bit(nr, addr); | |
0004a9df | 438 | |
17099b11 | 439 | smp_llsc_mb(); |
ff72b7a6 RB |
440 | |
441 | return res != 0; | |
1da177e4 LT |
442 | } |
443 | ||
3c9ee7ef | 444 | #include <asm-generic/bitops/non-atomic.h> |
1da177e4 | 445 | |
728697cd NP |
446 | /* |
447 | * __clear_bit_unlock - Clears a bit in memory | |
448 | * @nr: Bit to clear | |
449 | * @addr: Address to start counting from | |
450 | * | |
451 | * __clear_bit() is non-atomic and implies release semantics before the memory | |
452 | * operation. It can be used for an unlock if no other CPUs can concurrently | |
453 | * modify other bits in the word. | |
454 | */ | |
455 | static inline void __clear_bit_unlock(unsigned long nr, volatile unsigned long *addr) | |
456 | { | |
6f6ed482 | 457 | smp_mb__before_llsc(); |
728697cd | 458 | __clear_bit(nr, addr); |
34ae9c91 | 459 | nudge_writes(); |
728697cd NP |
460 | } |
461 | ||
1da177e4 | 462 | /* |
ec917c2c | 463 | * Return the bit position (0..63) of the most significant 1 bit in a word |
65903265 RB |
464 | * Returns -1 if no 1 bit exists |
465 | */ | |
4816227b | 466 | static inline unsigned long __fls(unsigned long word) |
65903265 | 467 | { |
4816227b | 468 | int num; |
65903265 | 469 | |
cb5d4aad | 470 | if (BITS_PER_LONG == 32 && !__builtin_constant_p(word) && |
47740eb8 | 471 | __builtin_constant_p(cpu_has_clo_clz) && cpu_has_clo_clz) { |
49a89efb | 472 | __asm__( |
ec917c2c | 473 | " .set push \n" |
87a927ef | 474 | " .set "MIPS_ISA_LEVEL" \n" |
ec917c2c RB |
475 | " clz %0, %1 \n" |
476 | " .set pop \n" | |
4816227b RB |
477 | : "=r" (num) |
478 | : "r" (word)); | |
65903265 | 479 | |
4816227b | 480 | return 31 - num; |
ec917c2c RB |
481 | } |
482 | ||
cb5d4aad | 483 | if (BITS_PER_LONG == 64 && !__builtin_constant_p(word) && |
4816227b RB |
484 | __builtin_constant_p(cpu_has_mips64) && cpu_has_mips64) { |
485 | __asm__( | |
486 | " .set push \n" | |
87a927ef | 487 | " .set "MIPS_ISA_LEVEL" \n" |
4816227b RB |
488 | " dclz %0, %1 \n" |
489 | " .set pop \n" | |
490 | : "=r" (num) | |
491 | : "r" (word)); | |
65903265 | 492 | |
4816227b RB |
493 | return 63 - num; |
494 | } | |
495 | ||
496 | num = BITS_PER_LONG - 1; | |
65903265 | 497 | |
4816227b RB |
498 | #if BITS_PER_LONG == 64 |
499 | if (!(word & (~0ul << 32))) { | |
500 | num -= 32; | |
501 | word <<= 32; | |
502 | } | |
503 | #endif | |
504 | if (!(word & (~0ul << (BITS_PER_LONG-16)))) { | |
505 | num -= 16; | |
506 | word <<= 16; | |
507 | } | |
508 | if (!(word & (~0ul << (BITS_PER_LONG-8)))) { | |
509 | num -= 8; | |
510 | word <<= 8; | |
511 | } | |
512 | if (!(word & (~0ul << (BITS_PER_LONG-4)))) { | |
513 | num -= 4; | |
514 | word <<= 4; | |
515 | } | |
516 | if (!(word & (~0ul << (BITS_PER_LONG-2)))) { | |
517 | num -= 2; | |
518 | word <<= 2; | |
519 | } | |
520 | if (!(word & (~0ul << (BITS_PER_LONG-1)))) | |
521 | num -= 1; | |
522 | return num; | |
65903265 | 523 | } |
65903265 RB |
524 | |
525 | /* | |
526 | * __ffs - find first bit in word. | |
1da177e4 LT |
527 | * @word: The word to search |
528 | * | |
65903265 RB |
529 | * Returns 0..SZLONG-1 |
530 | * Undefined if no bit exists, so code should check against 0 first. | |
1da177e4 | 531 | */ |
65903265 | 532 | static inline unsigned long __ffs(unsigned long word) |
1da177e4 | 533 | { |
ddc0d009 | 534 | return __fls(word & -word); |
1da177e4 LT |
535 | } |
536 | ||
537 | /* | |
bc818247 | 538 | * fls - find last bit set. |
1da177e4 LT |
539 | * @word: The word to search |
540 | * | |
bc818247 AN |
541 | * This is defined the same way as ffs. |
542 | * Note fls(0) = 0, fls(1) = 1, fls(0x80000000) = 32. | |
1da177e4 | 543 | */ |
4816227b | 544 | static inline int fls(int x) |
1da177e4 | 545 | { |
4816227b | 546 | int r; |
65903265 | 547 | |
cb5d4aad MR |
548 | if (!__builtin_constant_p(x) && |
549 | __builtin_constant_p(cpu_has_clo_clz) && cpu_has_clo_clz) { | |
db873131 MR |
550 | __asm__( |
551 | " .set push \n" | |
87a927ef | 552 | " .set "MIPS_ISA_LEVEL" \n" |
db873131 MR |
553 | " clz %0, %1 \n" |
554 | " .set pop \n" | |
555 | : "=r" (x) | |
556 | : "r" (x)); | |
1da177e4 | 557 | |
4816227b RB |
558 | return 32 - x; |
559 | } | |
bc818247 | 560 | |
4816227b RB |
561 | r = 32; |
562 | if (!x) | |
563 | return 0; | |
564 | if (!(x & 0xffff0000u)) { | |
565 | x <<= 16; | |
566 | r -= 16; | |
567 | } | |
568 | if (!(x & 0xff000000u)) { | |
569 | x <<= 8; | |
570 | r -= 8; | |
571 | } | |
572 | if (!(x & 0xf0000000u)) { | |
573 | x <<= 4; | |
574 | r -= 4; | |
575 | } | |
576 | if (!(x & 0xc0000000u)) { | |
577 | x <<= 2; | |
578 | r -= 2; | |
579 | } | |
580 | if (!(x & 0x80000000u)) { | |
581 | x <<= 1; | |
582 | r -= 1; | |
583 | } | |
584 | return r; | |
65903265 | 585 | } |
4816227b | 586 | |
bc818247 | 587 | #include <asm-generic/bitops/fls64.h> |
65903265 RB |
588 | |
589 | /* | |
bc818247 | 590 | * ffs - find first bit set. |
65903265 RB |
591 | * @word: The word to search |
592 | * | |
bc818247 AN |
593 | * This is defined the same way as |
594 | * the libc and compiler builtin ffs routines, therefore | |
595 | * differs in spirit from the above ffz (man ffs). | |
65903265 | 596 | */ |
bc818247 | 597 | static inline int ffs(int word) |
65903265 | 598 | { |
bc818247 AN |
599 | if (!word) |
600 | return 0; | |
2caf1900 | 601 | |
bc818247 | 602 | return fls(word & -word); |
65903265 RB |
603 | } |
604 | ||
bc818247 | 605 | #include <asm-generic/bitops/ffz.h> |
3c9ee7ef | 606 | #include <asm-generic/bitops/find.h> |
1da177e4 LT |
607 | |
608 | #ifdef __KERNEL__ | |
609 | ||
3c9ee7ef | 610 | #include <asm-generic/bitops/sched.h> |
1a403d1d DD |
611 | |
612 | #include <asm/arch_hweight.h> | |
613 | #include <asm-generic/bitops/const_hweight.h> | |
614 | ||
861b5ae7 | 615 | #include <asm-generic/bitops/le.h> |
3c9ee7ef | 616 | #include <asm-generic/bitops/ext2-atomic.h> |
1da177e4 LT |
617 | |
618 | #endif /* __KERNEL__ */ | |
619 | ||
620 | #endif /* _ASM_BITOPS_H */ |