Commit | Line | Data |
---|---|---|
1da177e4 LT |
1 | #ifndef _ASM_GENERIC_PERCPU_H_ |
2 | #define _ASM_GENERIC_PERCPU_H_ | |
3 | #include <linux/compiler.h> | |
ae1ee11b | 4 | #include <linux/threads.h> |
1da177e4 | 5 | |
acdac872 | 6 | /* |
7 | * Determine the real variable name from the name visible in the | |
8 | * kernel sources. | |
9 | */ | |
10 | #define per_cpu_var(var) per_cpu__##var | |
11 | ||
1da177e4 LT |
12 | #ifdef CONFIG_SMP |
13 | ||
acdac872 | 14 | /* |
15 | * per_cpu_offset() is the offset that has to be added to a | |
16 | * percpu variable to get to the instance for a certain processor. | |
17 | * | |
18 | * Most arches use the __per_cpu_offset array for those offsets but | |
19 | * some arches have their own ways of determining the offset (x86_64, s390). | |
20 | */ | |
21 | #ifndef __per_cpu_offset | |
1da177e4 LT |
22 | extern unsigned long __per_cpu_offset[NR_CPUS]; |
23 | ||
a875a69f | 24 | #define per_cpu_offset(x) (__per_cpu_offset[x]) |
acdac872 | 25 | #endif |
26 | ||
27 | /* | |
28 | * Determine the offset for the currently active processor. | |
29 | * An arch may define __my_cpu_offset to provide a more effective | |
30 | * means of obtaining the offset to the per cpu variables of the | |
31 | * current processor. | |
32 | */ | |
33 | #ifndef __my_cpu_offset | |
34 | #define __my_cpu_offset per_cpu_offset(raw_smp_processor_id()) | |
1e835278 HD |
35 | #endif |
36 | #ifdef CONFIG_DEBUG_PREEMPT | |
acdac872 | 37 | #define my_cpu_offset per_cpu_offset(smp_processor_id()) |
38 | #else | |
39 | #define my_cpu_offset __my_cpu_offset | |
40 | #endif | |
41 | ||
42 | /* | |
43 | * Add a offset to a pointer but keep the pointer as is. | |
44 | * | |
45 | * Only S390 provides its own means of moving the pointer. | |
46 | */ | |
47 | #ifndef SHIFT_PERCPU_PTR | |
48 | #define SHIFT_PERCPU_PTR(__p, __offset) RELOC_HIDE((__p), (__offset)) | |
49 | #endif | |
a875a69f | 50 | |
acdac872 | 51 | /* |
dd5af90a | 52 | * A percpu variable may point to a discarded regions. The following are |
acdac872 | 53 | * established ways to produce a usable pointer from the percpu variable |
54 | * offset. | |
55 | */ | |
56 | #define per_cpu(var, cpu) \ | |
57 | (*SHIFT_PERCPU_PTR(&per_cpu_var(var), per_cpu_offset(cpu))) | |
58 | #define __get_cpu_var(var) \ | |
59 | (*SHIFT_PERCPU_PTR(&per_cpu_var(var), my_cpu_offset)) | |
60 | #define __raw_get_cpu_var(var) \ | |
61 | (*SHIFT_PERCPU_PTR(&per_cpu_var(var), __my_cpu_offset)) | |
62 | ||
63 | ||
dd5af90a | 64 | #ifdef CONFIG_HAVE_SETUP_PER_CPU_AREA |
acdac872 | 65 | extern void setup_per_cpu_areas(void); |
66 | #endif | |
1da177e4 | 67 | |
1da177e4 LT |
68 | #else /* ! SMP */ |
69 | ||
acdac872 | 70 | #define per_cpu(var, cpu) (*((void)(cpu), &per_cpu_var(var))) |
71 | #define __get_cpu_var(var) per_cpu_var(var) | |
72 | #define __raw_get_cpu_var(var) per_cpu_var(var) | |
1da177e4 LT |
73 | |
74 | #endif /* SMP */ | |
75 | ||
acdac872 | 76 | #ifndef PER_CPU_ATTRIBUTES |
77 | #define PER_CPU_ATTRIBUTES | |
78 | #endif | |
79 | ||
80 | #define DECLARE_PER_CPU(type, name) extern PER_CPU_ATTRIBUTES \ | |
81 | __typeof__(type) per_cpu_var(name) | |
1da177e4 | 82 | |
6dbde353 IM |
83 | /* |
84 | * Optional methods for optimized non-lvalue per-cpu variable access. | |
85 | * | |
86 | * @var can be a percpu variable or a field of it and its size should | |
87 | * equal char, int or long. percpu_read() evaluates to a lvalue and | |
88 | * all others to void. | |
89 | * | |
90 | * These operations are guaranteed to be atomic w.r.t. preemption. | |
91 | * The generic versions use plain get/put_cpu_var(). Archs are | |
92 | * encouraged to implement single-instruction alternatives which don't | |
93 | * require preemption protection. | |
94 | */ | |
95 | #ifndef percpu_read | |
96 | # define percpu_read(var) \ | |
97 | ({ \ | |
98 | typeof(per_cpu_var(var)) __tmp_var__; \ | |
99 | __tmp_var__ = get_cpu_var(var); \ | |
100 | put_cpu_var(var); \ | |
101 | __tmp_var__; \ | |
102 | }) | |
103 | #endif | |
104 | ||
105 | #define __percpu_generic_to_op(var, val, op) \ | |
106 | do { \ | |
107 | get_cpu_var(var) op val; \ | |
108 | put_cpu_var(var); \ | |
109 | } while (0) | |
110 | ||
111 | #ifndef percpu_write | |
112 | # define percpu_write(var, val) __percpu_generic_to_op(var, (val), =) | |
113 | #endif | |
114 | ||
115 | #ifndef percpu_add | |
116 | # define percpu_add(var, val) __percpu_generic_to_op(var, (val), +=) | |
117 | #endif | |
118 | ||
119 | #ifndef percpu_sub | |
120 | # define percpu_sub(var, val) __percpu_generic_to_op(var, (val), -=) | |
121 | #endif | |
122 | ||
123 | #ifndef percpu_and | |
124 | # define percpu_and(var, val) __percpu_generic_to_op(var, (val), &=) | |
125 | #endif | |
126 | ||
127 | #ifndef percpu_or | |
128 | # define percpu_or(var, val) __percpu_generic_to_op(var, (val), |=) | |
129 | #endif | |
130 | ||
131 | #ifndef percpu_xor | |
132 | # define percpu_xor(var, val) __percpu_generic_to_op(var, (val), ^=) | |
133 | #endif | |
134 | ||
1da177e4 | 135 | #endif /* _ASM_GENERIC_PERCPU_H_ */ |