Merge branch 'acpica'
[linux-2.6-block.git] / include / asm-generic / percpu.h
CommitLineData
1da177e4
LT
1#ifndef _ASM_GENERIC_PERCPU_H_
2#define _ASM_GENERIC_PERCPU_H_
5028eaa9 3
1da177e4 4#include <linux/compiler.h>
ae1ee11b 5#include <linux/threads.h>
5028eaa9 6#include <linux/percpu-defs.h>
acdac872 7
1da177e4
LT
8#ifdef CONFIG_SMP
9
acdac872 10/*
11 * per_cpu_offset() is the offset that has to be added to a
12 * percpu variable to get to the instance for a certain processor.
13 *
14 * Most arches use the __per_cpu_offset array for those offsets but
15 * some arches have their own ways of determining the offset (x86_64, s390).
16 */
17#ifndef __per_cpu_offset
1da177e4
LT
18extern unsigned long __per_cpu_offset[NR_CPUS];
19
a875a69f 20#define per_cpu_offset(x) (__per_cpu_offset[x])
acdac872 21#endif
22
23/*
24 * Determine the offset for the currently active processor.
25 * An arch may define __my_cpu_offset to provide a more effective
26 * means of obtaining the offset to the per cpu variables of the
27 * current processor.
28 */
29#ifndef __my_cpu_offset
30#define __my_cpu_offset per_cpu_offset(raw_smp_processor_id())
1e835278
HD
31#endif
32#ifdef CONFIG_DEBUG_PREEMPT
acdac872 33#define my_cpu_offset per_cpu_offset(smp_processor_id())
34#else
35#define my_cpu_offset __my_cpu_offset
36#endif
37
bbc344e1
TH
38/*
39 * Arch may define arch_raw_cpu_ptr() to provide more efficient address
40 * translations for raw_cpu_ptr().
41 */
42#ifndef arch_raw_cpu_ptr
43#define arch_raw_cpu_ptr(ptr) SHIFT_PERCPU_PTR(ptr, __my_cpu_offset)
db7829c6 44#endif
bbc344e1 45
dd5af90a 46#ifdef CONFIG_HAVE_SETUP_PER_CPU_AREA
acdac872 47extern void setup_per_cpu_areas(void);
48#endif
1da177e4 49
1da177e4
LT
50#endif /* SMP */
51
9b8de747
DH
52#ifndef PER_CPU_BASE_SECTION
53#ifdef CONFIG_SMP
3d9a854c 54#define PER_CPU_BASE_SECTION ".data..percpu"
9b8de747
DH
55#else
56#define PER_CPU_BASE_SECTION ".data"
57#endif
58#endif
59
acdac872 60#ifndef PER_CPU_ATTRIBUTES
61#define PER_CPU_ATTRIBUTES
62#endif
63
b01e8dc3
TH
64#ifndef PER_CPU_DEF_ATTRIBUTES
65#define PER_CPU_DEF_ATTRIBUTES
66#endif
67
9c28278a
TH
68#define raw_cpu_generic_to_op(pcp, val, op) \
69do { \
70 *raw_cpu_ptr(&(pcp)) op val; \
71} while (0)
72
73#define raw_cpu_generic_add_return(pcp, val) \
74({ \
75 raw_cpu_add(pcp, val); \
76 raw_cpu_read(pcp); \
77})
78
79#define raw_cpu_generic_xchg(pcp, nval) \
eba11788
TH
80({ \
81 typeof(pcp) __ret; \
82 __ret = raw_cpu_read(pcp); \
9c28278a 83 raw_cpu_write(pcp, nval); \
eba11788 84 __ret; \
9c28278a
TH
85})
86
87#define raw_cpu_generic_cmpxchg(pcp, oval, nval) \
88({ \
eba11788
TH
89 typeof(pcp) __ret; \
90 __ret = raw_cpu_read(pcp); \
91 if (__ret == (oval)) \
9c28278a 92 raw_cpu_write(pcp, nval); \
eba11788 93 __ret; \
9c28278a
TH
94})
95
eba11788 96#define raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \
9c28278a
TH
97({ \
98 int __ret = 0; \
99 if (raw_cpu_read(pcp1) == (oval1) && \
100 raw_cpu_read(pcp2) == (oval2)) { \
eba11788
TH
101 raw_cpu_write(pcp1, nval1); \
102 raw_cpu_write(pcp2, nval2); \
9c28278a
TH
103 __ret = 1; \
104 } \
105 (__ret); \
106})
107
eba11788
TH
108#define this_cpu_generic_read(pcp) \
109({ \
110 typeof(pcp) __ret; \
9c28278a 111 preempt_disable(); \
eba11788 112 __ret = *this_cpu_ptr(&(pcp)); \
9c28278a 113 preempt_enable(); \
eba11788 114 __ret; \
9c28278a
TH
115})
116
eba11788 117#define this_cpu_generic_to_op(pcp, val, op) \
9c28278a 118do { \
eba11788
TH
119 unsigned long __flags; \
120 raw_local_irq_save(__flags); \
9c28278a 121 *raw_cpu_ptr(&(pcp)) op val; \
eba11788 122 raw_local_irq_restore(__flags); \
9c28278a
TH
123} while (0)
124
eba11788 125#define this_cpu_generic_add_return(pcp, val) \
9c28278a 126({ \
eba11788
TH
127 typeof(pcp) __ret; \
128 unsigned long __flags; \
129 raw_local_irq_save(__flags); \
130 raw_cpu_add(pcp, val); \
131 __ret = raw_cpu_read(pcp); \
132 raw_local_irq_restore(__flags); \
133 __ret; \
9c28278a
TH
134})
135
eba11788
TH
136#define this_cpu_generic_xchg(pcp, nval) \
137({ \
138 typeof(pcp) __ret; \
139 unsigned long __flags; \
140 raw_local_irq_save(__flags); \
141 __ret = raw_cpu_read(pcp); \
9c28278a 142 raw_cpu_write(pcp, nval); \
eba11788
TH
143 raw_local_irq_restore(__flags); \
144 __ret; \
9c28278a
TH
145})
146
eba11788 147#define this_cpu_generic_cmpxchg(pcp, oval, nval) \
9c28278a 148({ \
eba11788
TH
149 typeof(pcp) __ret; \
150 unsigned long __flags; \
151 raw_local_irq_save(__flags); \
152 __ret = raw_cpu_read(pcp); \
153 if (__ret == (oval)) \
9c28278a 154 raw_cpu_write(pcp, nval); \
eba11788
TH
155 raw_local_irq_restore(__flags); \
156 __ret; \
9c28278a
TH
157})
158
eba11788 159#define this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \
9c28278a 160({ \
eba11788
TH
161 int __ret; \
162 unsigned long __flags; \
163 raw_local_irq_save(__flags); \
164 __ret = raw_cpu_generic_cmpxchg_double(pcp1, pcp2, \
9c28278a 165 oval1, oval2, nval1, nval2); \
eba11788
TH
166 raw_local_irq_restore(__flags); \
167 __ret; \
9c28278a
TH
168})
169
eba11788
TH
170#ifndef raw_cpu_read_1
171#define raw_cpu_read_1(pcp) (*raw_cpu_ptr(&(pcp)))
172#endif
173#ifndef raw_cpu_read_2
174#define raw_cpu_read_2(pcp) (*raw_cpu_ptr(&(pcp)))
175#endif
176#ifndef raw_cpu_read_4
177#define raw_cpu_read_4(pcp) (*raw_cpu_ptr(&(pcp)))
178#endif
179#ifndef raw_cpu_read_8
180#define raw_cpu_read_8(pcp) (*raw_cpu_ptr(&(pcp)))
181#endif
182
183#ifndef raw_cpu_write_1
184#define raw_cpu_write_1(pcp, val) raw_cpu_generic_to_op(pcp, val, =)
185#endif
186#ifndef raw_cpu_write_2
187#define raw_cpu_write_2(pcp, val) raw_cpu_generic_to_op(pcp, val, =)
188#endif
189#ifndef raw_cpu_write_4
190#define raw_cpu_write_4(pcp, val) raw_cpu_generic_to_op(pcp, val, =)
191#endif
192#ifndef raw_cpu_write_8
193#define raw_cpu_write_8(pcp, val) raw_cpu_generic_to_op(pcp, val, =)
194#endif
195
196#ifndef raw_cpu_add_1
197#define raw_cpu_add_1(pcp, val) raw_cpu_generic_to_op(pcp, val, +=)
198#endif
199#ifndef raw_cpu_add_2
200#define raw_cpu_add_2(pcp, val) raw_cpu_generic_to_op(pcp, val, +=)
201#endif
202#ifndef raw_cpu_add_4
203#define raw_cpu_add_4(pcp, val) raw_cpu_generic_to_op(pcp, val, +=)
204#endif
205#ifndef raw_cpu_add_8
206#define raw_cpu_add_8(pcp, val) raw_cpu_generic_to_op(pcp, val, +=)
207#endif
208
209#ifndef raw_cpu_and_1
210#define raw_cpu_and_1(pcp, val) raw_cpu_generic_to_op(pcp, val, &=)
211#endif
212#ifndef raw_cpu_and_2
213#define raw_cpu_and_2(pcp, val) raw_cpu_generic_to_op(pcp, val, &=)
214#endif
215#ifndef raw_cpu_and_4
216#define raw_cpu_and_4(pcp, val) raw_cpu_generic_to_op(pcp, val, &=)
217#endif
218#ifndef raw_cpu_and_8
219#define raw_cpu_and_8(pcp, val) raw_cpu_generic_to_op(pcp, val, &=)
220#endif
221
222#ifndef raw_cpu_or_1
223#define raw_cpu_or_1(pcp, val) raw_cpu_generic_to_op(pcp, val, |=)
224#endif
225#ifndef raw_cpu_or_2
226#define raw_cpu_or_2(pcp, val) raw_cpu_generic_to_op(pcp, val, |=)
227#endif
228#ifndef raw_cpu_or_4
229#define raw_cpu_or_4(pcp, val) raw_cpu_generic_to_op(pcp, val, |=)
230#endif
231#ifndef raw_cpu_or_8
232#define raw_cpu_or_8(pcp, val) raw_cpu_generic_to_op(pcp, val, |=)
233#endif
234
235#ifndef raw_cpu_add_return_1
236#define raw_cpu_add_return_1(pcp, val) raw_cpu_generic_add_return(pcp, val)
237#endif
238#ifndef raw_cpu_add_return_2
239#define raw_cpu_add_return_2(pcp, val) raw_cpu_generic_add_return(pcp, val)
240#endif
241#ifndef raw_cpu_add_return_4
242#define raw_cpu_add_return_4(pcp, val) raw_cpu_generic_add_return(pcp, val)
243#endif
244#ifndef raw_cpu_add_return_8
245#define raw_cpu_add_return_8(pcp, val) raw_cpu_generic_add_return(pcp, val)
246#endif
247
248#ifndef raw_cpu_xchg_1
249#define raw_cpu_xchg_1(pcp, nval) raw_cpu_generic_xchg(pcp, nval)
250#endif
251#ifndef raw_cpu_xchg_2
252#define raw_cpu_xchg_2(pcp, nval) raw_cpu_generic_xchg(pcp, nval)
253#endif
254#ifndef raw_cpu_xchg_4
255#define raw_cpu_xchg_4(pcp, nval) raw_cpu_generic_xchg(pcp, nval)
256#endif
257#ifndef raw_cpu_xchg_8
258#define raw_cpu_xchg_8(pcp, nval) raw_cpu_generic_xchg(pcp, nval)
259#endif
260
261#ifndef raw_cpu_cmpxchg_1
262#define raw_cpu_cmpxchg_1(pcp, oval, nval) \
263 raw_cpu_generic_cmpxchg(pcp, oval, nval)
264#endif
265#ifndef raw_cpu_cmpxchg_2
266#define raw_cpu_cmpxchg_2(pcp, oval, nval) \
267 raw_cpu_generic_cmpxchg(pcp, oval, nval)
268#endif
269#ifndef raw_cpu_cmpxchg_4
270#define raw_cpu_cmpxchg_4(pcp, oval, nval) \
271 raw_cpu_generic_cmpxchg(pcp, oval, nval)
272#endif
273#ifndef raw_cpu_cmpxchg_8
274#define raw_cpu_cmpxchg_8(pcp, oval, nval) \
275 raw_cpu_generic_cmpxchg(pcp, oval, nval)
276#endif
277
278#ifndef raw_cpu_cmpxchg_double_1
279#define raw_cpu_cmpxchg_double_1(pcp1, pcp2, oval1, oval2, nval1, nval2) \
47b69ad6 280 raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
eba11788
TH
281#endif
282#ifndef raw_cpu_cmpxchg_double_2
283#define raw_cpu_cmpxchg_double_2(pcp1, pcp2, oval1, oval2, nval1, nval2) \
47b69ad6 284 raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
eba11788
TH
285#endif
286#ifndef raw_cpu_cmpxchg_double_4
287#define raw_cpu_cmpxchg_double_4(pcp1, pcp2, oval1, oval2, nval1, nval2) \
47b69ad6 288 raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
eba11788
TH
289#endif
290#ifndef raw_cpu_cmpxchg_double_8
291#define raw_cpu_cmpxchg_double_8(pcp1, pcp2, oval1, oval2, nval1, nval2) \
47b69ad6 292 raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
eba11788
TH
293#endif
294
295#ifndef this_cpu_read_1
296#define this_cpu_read_1(pcp) this_cpu_generic_read(pcp)
297#endif
298#ifndef this_cpu_read_2
299#define this_cpu_read_2(pcp) this_cpu_generic_read(pcp)
300#endif
301#ifndef this_cpu_read_4
302#define this_cpu_read_4(pcp) this_cpu_generic_read(pcp)
303#endif
304#ifndef this_cpu_read_8
305#define this_cpu_read_8(pcp) this_cpu_generic_read(pcp)
306#endif
307
308#ifndef this_cpu_write_1
309#define this_cpu_write_1(pcp, val) this_cpu_generic_to_op(pcp, val, =)
310#endif
311#ifndef this_cpu_write_2
312#define this_cpu_write_2(pcp, val) this_cpu_generic_to_op(pcp, val, =)
313#endif
314#ifndef this_cpu_write_4
315#define this_cpu_write_4(pcp, val) this_cpu_generic_to_op(pcp, val, =)
316#endif
317#ifndef this_cpu_write_8
318#define this_cpu_write_8(pcp, val) this_cpu_generic_to_op(pcp, val, =)
319#endif
320
321#ifndef this_cpu_add_1
322#define this_cpu_add_1(pcp, val) this_cpu_generic_to_op(pcp, val, +=)
323#endif
324#ifndef this_cpu_add_2
325#define this_cpu_add_2(pcp, val) this_cpu_generic_to_op(pcp, val, +=)
326#endif
327#ifndef this_cpu_add_4
328#define this_cpu_add_4(pcp, val) this_cpu_generic_to_op(pcp, val, +=)
329#endif
330#ifndef this_cpu_add_8
331#define this_cpu_add_8(pcp, val) this_cpu_generic_to_op(pcp, val, +=)
332#endif
333
334#ifndef this_cpu_and_1
335#define this_cpu_and_1(pcp, val) this_cpu_generic_to_op(pcp, val, &=)
336#endif
337#ifndef this_cpu_and_2
338#define this_cpu_and_2(pcp, val) this_cpu_generic_to_op(pcp, val, &=)
339#endif
340#ifndef this_cpu_and_4
341#define this_cpu_and_4(pcp, val) this_cpu_generic_to_op(pcp, val, &=)
342#endif
343#ifndef this_cpu_and_8
344#define this_cpu_and_8(pcp, val) this_cpu_generic_to_op(pcp, val, &=)
345#endif
346
347#ifndef this_cpu_or_1
348#define this_cpu_or_1(pcp, val) this_cpu_generic_to_op(pcp, val, |=)
349#endif
350#ifndef this_cpu_or_2
351#define this_cpu_or_2(pcp, val) this_cpu_generic_to_op(pcp, val, |=)
352#endif
353#ifndef this_cpu_or_4
354#define this_cpu_or_4(pcp, val) this_cpu_generic_to_op(pcp, val, |=)
355#endif
356#ifndef this_cpu_or_8
357#define this_cpu_or_8(pcp, val) this_cpu_generic_to_op(pcp, val, |=)
358#endif
359
360#ifndef this_cpu_add_return_1
361#define this_cpu_add_return_1(pcp, val) this_cpu_generic_add_return(pcp, val)
362#endif
363#ifndef this_cpu_add_return_2
364#define this_cpu_add_return_2(pcp, val) this_cpu_generic_add_return(pcp, val)
365#endif
366#ifndef this_cpu_add_return_4
367#define this_cpu_add_return_4(pcp, val) this_cpu_generic_add_return(pcp, val)
368#endif
369#ifndef this_cpu_add_return_8
370#define this_cpu_add_return_8(pcp, val) this_cpu_generic_add_return(pcp, val)
371#endif
372
373#ifndef this_cpu_xchg_1
374#define this_cpu_xchg_1(pcp, nval) this_cpu_generic_xchg(pcp, nval)
375#endif
376#ifndef this_cpu_xchg_2
377#define this_cpu_xchg_2(pcp, nval) this_cpu_generic_xchg(pcp, nval)
378#endif
379#ifndef this_cpu_xchg_4
380#define this_cpu_xchg_4(pcp, nval) this_cpu_generic_xchg(pcp, nval)
381#endif
382#ifndef this_cpu_xchg_8
383#define this_cpu_xchg_8(pcp, nval) this_cpu_generic_xchg(pcp, nval)
384#endif
385
386#ifndef this_cpu_cmpxchg_1
387#define this_cpu_cmpxchg_1(pcp, oval, nval) \
388 this_cpu_generic_cmpxchg(pcp, oval, nval)
389#endif
390#ifndef this_cpu_cmpxchg_2
391#define this_cpu_cmpxchg_2(pcp, oval, nval) \
392 this_cpu_generic_cmpxchg(pcp, oval, nval)
393#endif
394#ifndef this_cpu_cmpxchg_4
395#define this_cpu_cmpxchg_4(pcp, oval, nval) \
396 this_cpu_generic_cmpxchg(pcp, oval, nval)
397#endif
398#ifndef this_cpu_cmpxchg_8
399#define this_cpu_cmpxchg_8(pcp, oval, nval) \
400 this_cpu_generic_cmpxchg(pcp, oval, nval)
401#endif
402
403#ifndef this_cpu_cmpxchg_double_1
404#define this_cpu_cmpxchg_double_1(pcp1, pcp2, oval1, oval2, nval1, nval2) \
405 this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
406#endif
407#ifndef this_cpu_cmpxchg_double_2
408#define this_cpu_cmpxchg_double_2(pcp1, pcp2, oval1, oval2, nval1, nval2) \
409 this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
410#endif
411#ifndef this_cpu_cmpxchg_double_4
412#define this_cpu_cmpxchg_double_4(pcp1, pcp2, oval1, oval2, nval1, nval2) \
413 this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
414#endif
415#ifndef this_cpu_cmpxchg_double_8
416#define this_cpu_cmpxchg_double_8(pcp1, pcp2, oval1, oval2, nval1, nval2) \
417 this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
418#endif
47b69ad6 419
1da177e4 420#endif /* _ASM_GENERIC_PERCPU_H_ */