Commit | Line | Data |
---|---|---|
1da177e4 LT |
1 | /* |
2 | * atomic32.c: 32-bit atomic_t implementation | |
3 | * | |
4 | * Copyright (C) 2004 Keith M Wesolowski | |
6197fe4d | 5 | * Copyright (C) 2007 Kyle McMartin |
1da177e4 LT |
6 | * |
7 | * Based on asm-parisc/atomic.h Copyright (C) 2000 Philipp Rumpf | |
8 | */ | |
9 | ||
60063497 | 10 | #include <linux/atomic.h> |
1da177e4 LT |
11 | #include <linux/spinlock.h> |
12 | #include <linux/module.h> | |
13 | ||
14 | #ifdef CONFIG_SMP | |
15 | #define ATOMIC_HASH_SIZE 4 | |
16 | #define ATOMIC_HASH(a) (&__atomic_hash[(((unsigned long)a)>>8) & (ATOMIC_HASH_SIZE-1)]) | |
17 | ||
18 | spinlock_t __atomic_hash[ATOMIC_HASH_SIZE] = { | |
24774fbd | 19 | [0 ... (ATOMIC_HASH_SIZE-1)] = __SPIN_LOCK_UNLOCKED(__atomic_hash) |
1da177e4 LT |
20 | }; |
21 | ||
22 | #else /* SMP */ | |
23 | ||
a9f6a0dd | 24 | static DEFINE_SPINLOCK(dummy); |
1da177e4 LT |
25 | #define ATOMIC_HASH_SIZE 1 |
26 | #define ATOMIC_HASH(a) (&dummy) | |
27 | ||
28 | #endif /* SMP */ | |
29 | ||
4f3316c2 PZ |
30 | #define ATOMIC_OP(op, cop) \ |
31 | int atomic_##op##_return(int i, atomic_t *v) \ | |
32 | { \ | |
33 | int ret; \ | |
34 | unsigned long flags; \ | |
35 | spin_lock_irqsave(ATOMIC_HASH(v), flags); \ | |
36 | \ | |
37 | ret = (v->counter cop i); \ | |
38 | \ | |
39 | spin_unlock_irqrestore(ATOMIC_HASH(v), flags); \ | |
40 | return ret; \ | |
41 | } \ | |
42 | EXPORT_SYMBOL(atomic_##op##_return); | |
43 | ||
44 | ATOMIC_OP(add, +=) | |
45 | ||
46 | #undef ATOMIC_OP | |
1da177e4 | 47 | |
1a17fdc4 AL |
48 | int atomic_xchg(atomic_t *v, int new) |
49 | { | |
50 | int ret; | |
51 | unsigned long flags; | |
52 | ||
53 | spin_lock_irqsave(ATOMIC_HASH(v), flags); | |
54 | ret = v->counter; | |
55 | v->counter = new; | |
56 | spin_unlock_irqrestore(ATOMIC_HASH(v), flags); | |
57 | return ret; | |
58 | } | |
59 | EXPORT_SYMBOL(atomic_xchg); | |
60 | ||
4a6dae6d | 61 | int atomic_cmpxchg(atomic_t *v, int old, int new) |
1da177e4 | 62 | { |
4a6dae6d | 63 | int ret; |
1da177e4 | 64 | unsigned long flags; |
1da177e4 | 65 | |
4a6dae6d NP |
66 | spin_lock_irqsave(ATOMIC_HASH(v), flags); |
67 | ret = v->counter; | |
68 | if (likely(ret == old)) | |
69 | v->counter = new; | |
1da177e4 LT |
70 | |
71 | spin_unlock_irqrestore(ATOMIC_HASH(v), flags); | |
4a6dae6d | 72 | return ret; |
1da177e4 | 73 | } |
74e61dee | 74 | EXPORT_SYMBOL(atomic_cmpxchg); |
1da177e4 | 75 | |
678624e4 | 76 | int __atomic_add_unless(atomic_t *v, int a, int u) |
8426e1f6 NP |
77 | { |
78 | int ret; | |
79 | unsigned long flags; | |
80 | ||
81 | spin_lock_irqsave(ATOMIC_HASH(v), flags); | |
82 | ret = v->counter; | |
83 | if (ret != u) | |
84 | v->counter += a; | |
85 | spin_unlock_irqrestore(ATOMIC_HASH(v), flags); | |
a61b5829 | 86 | return ret; |
8426e1f6 | 87 | } |
678624e4 | 88 | EXPORT_SYMBOL(__atomic_add_unless); |
8426e1f6 | 89 | |
8426e1f6 | 90 | /* Atomic operations are already serializing */ |
4a6dae6d NP |
91 | void atomic_set(atomic_t *v, int i) |
92 | { | |
93 | unsigned long flags; | |
1da177e4 | 94 | |
4a6dae6d NP |
95 | spin_lock_irqsave(ATOMIC_HASH(v), flags); |
96 | v->counter = i; | |
97 | spin_unlock_irqrestore(ATOMIC_HASH(v), flags); | |
98 | } | |
99 | EXPORT_SYMBOL(atomic_set); | |
8a8b836b DM |
100 | |
101 | unsigned long ___set_bit(unsigned long *addr, unsigned long mask) | |
102 | { | |
103 | unsigned long old, flags; | |
104 | ||
105 | spin_lock_irqsave(ATOMIC_HASH(addr), flags); | |
106 | old = *addr; | |
107 | *addr = old | mask; | |
108 | spin_unlock_irqrestore(ATOMIC_HASH(addr), flags); | |
109 | ||
110 | return old & mask; | |
111 | } | |
112 | EXPORT_SYMBOL(___set_bit); | |
113 | ||
114 | unsigned long ___clear_bit(unsigned long *addr, unsigned long mask) | |
115 | { | |
116 | unsigned long old, flags; | |
117 | ||
118 | spin_lock_irqsave(ATOMIC_HASH(addr), flags); | |
119 | old = *addr; | |
120 | *addr = old & ~mask; | |
121 | spin_unlock_irqrestore(ATOMIC_HASH(addr), flags); | |
122 | ||
123 | return old & mask; | |
124 | } | |
125 | EXPORT_SYMBOL(___clear_bit); | |
126 | ||
127 | unsigned long ___change_bit(unsigned long *addr, unsigned long mask) | |
128 | { | |
129 | unsigned long old, flags; | |
130 | ||
131 | spin_lock_irqsave(ATOMIC_HASH(addr), flags); | |
132 | old = *addr; | |
133 | *addr = old ^ mask; | |
134 | spin_unlock_irqrestore(ATOMIC_HASH(addr), flags); | |
135 | ||
136 | return old & mask; | |
137 | } | |
138 | EXPORT_SYMBOL(___change_bit); | |
6197fe4d KM |
139 | |
140 | unsigned long __cmpxchg_u32(volatile u32 *ptr, u32 old, u32 new) | |
141 | { | |
142 | unsigned long flags; | |
143 | u32 prev; | |
144 | ||
1fb8812b | 145 | spin_lock_irqsave(ATOMIC_HASH(ptr), flags); |
6197fe4d KM |
146 | if ((prev = *ptr) == old) |
147 | *ptr = new; | |
1fb8812b | 148 | spin_unlock_irqrestore(ATOMIC_HASH(ptr), flags); |
6197fe4d KM |
149 | |
150 | return (unsigned long)prev; | |
151 | } | |
152 | EXPORT_SYMBOL(__cmpxchg_u32); | |
1a17fdc4 AL |
153 | |
154 | unsigned long __xchg_u32(volatile u32 *ptr, u32 new) | |
155 | { | |
156 | unsigned long flags; | |
157 | u32 prev; | |
158 | ||
159 | spin_lock_irqsave(ATOMIC_HASH(ptr), flags); | |
160 | prev = *ptr; | |
161 | *ptr = new; | |
162 | spin_unlock_irqrestore(ATOMIC_HASH(ptr), flags); | |
163 | ||
164 | return (unsigned long)prev; | |
165 | } | |
166 | EXPORT_SYMBOL(__xchg_u32); |