Commit | Line | Data |
---|---|---|
e589ed23 MP |
1 | #ifndef _ASM_ARM_FUTEX_H |
2 | #define _ASM_ARM_FUTEX_H | |
3 | ||
4 | #ifdef __KERNEL__ | |
5 | ||
c1b0db56 WD |
6 | #if defined(CONFIG_CPU_USE_DOMAINS) && defined(CONFIG_SMP) |
7 | /* ARM doesn't provide unprivileged exclusive memory accessors */ | |
8 | #include <asm-generic/futex.h> | |
9 | #else | |
10 | ||
11 | #include <linux/futex.h> | |
12 | #include <linux/uaccess.h> | |
13 | #include <asm/errno.h> | |
14 | ||
15 | #define __futex_atomic_ex_table(err_reg) \ | |
16 | "3:\n" \ | |
17 | " .pushsection __ex_table,\"a\"\n" \ | |
18 | " .align 3\n" \ | |
19 | " .long 1b, 4f, 2b, 4f\n" \ | |
20 | " .popsection\n" \ | |
21 | " .pushsection .fixup,\"ax\"\n" \ | |
667d1b48 | 22 | " .align 2\n" \ |
c1b0db56 WD |
23 | "4: mov %0, " err_reg "\n" \ |
24 | " b 3b\n" \ | |
25 | " .popsection" | |
26 | ||
e589ed23 | 27 | #ifdef CONFIG_SMP |
4732efbe | 28 | |
df77abca | 29 | #define __futex_atomic_op(insn, ret, oldval, tmp, uaddr, oparg) \ |
c1b0db56 WD |
30 | smp_mb(); \ |
31 | __asm__ __volatile__( \ | |
df77abca | 32 | "1: ldrex %1, [%3]\n" \ |
c1b0db56 | 33 | " " insn "\n" \ |
df77abca WD |
34 | "2: strex %2, %0, [%3]\n" \ |
35 | " teq %2, #0\n" \ | |
c1b0db56 WD |
36 | " bne 1b\n" \ |
37 | " mov %0, #0\n" \ | |
df77abca WD |
38 | __futex_atomic_ex_table("%5") \ |
39 | : "=&r" (ret), "=&r" (oldval), "=&r" (tmp) \ | |
c1b0db56 WD |
40 | : "r" (uaddr), "r" (oparg), "Ir" (-EFAULT) \ |
41 | : "cc", "memory") | |
42 | ||
43 | static inline int | |
44 | futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr, | |
45 | u32 oldval, u32 newval) | |
46 | { | |
47 | int ret; | |
48 | u32 val; | |
49 | ||
50 | if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32))) | |
51 | return -EFAULT; | |
52 | ||
53 | smp_mb(); | |
54 | __asm__ __volatile__("@futex_atomic_cmpxchg_inatomic\n" | |
55 | "1: ldrex %1, [%4]\n" | |
56 | " teq %1, %2\n" | |
57 | " ite eq @ explicit IT needed for the 2b label\n" | |
58 | "2: strexeq %0, %3, [%4]\n" | |
59 | " movne %0, #0\n" | |
60 | " teq %0, #0\n" | |
61 | " bne 1b\n" | |
62 | __futex_atomic_ex_table("%5") | |
63 | : "=&r" (ret), "=&r" (val) | |
64 | : "r" (oldval), "r" (newval), "r" (uaddr), "Ir" (-EFAULT) | |
65 | : "cc", "memory"); | |
66 | smp_mb(); | |
67 | ||
68 | *uval = val; | |
69 | return ret; | |
70 | } | |
4732efbe | 71 | |
e589ed23 MP |
72 | #else /* !SMP, we can work around lack of atomic ops by disabling preemption */ |
73 | ||
e589ed23 | 74 | #include <linux/preempt.h> |
247055aa | 75 | #include <asm/domain.h> |
e589ed23 | 76 | |
df77abca | 77 | #define __futex_atomic_op(insn, ret, oldval, tmp, uaddr, oparg) \ |
e589ed23 | 78 | __asm__ __volatile__( \ |
4e7682d0 | 79 | "1: " TUSER(ldr) " %1, [%3]\n" \ |
e589ed23 | 80 | " " insn "\n" \ |
4e7682d0 | 81 | "2: " TUSER(str) " %0, [%3]\n" \ |
e589ed23 | 82 | " mov %0, #0\n" \ |
df77abca WD |
83 | __futex_atomic_ex_table("%5") \ |
84 | : "=&r" (ret), "=&r" (oldval), "=&r" (tmp) \ | |
e589ed23 MP |
85 | : "r" (uaddr), "r" (oparg), "Ir" (-EFAULT) \ |
86 | : "cc", "memory") | |
87 | ||
c1b0db56 WD |
88 | static inline int |
89 | futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr, | |
90 | u32 oldval, u32 newval) | |
91 | { | |
92 | int ret = 0; | |
93 | u32 val; | |
94 | ||
95 | if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32))) | |
96 | return -EFAULT; | |
97 | ||
98 | __asm__ __volatile__("@futex_atomic_cmpxchg_inatomic\n" | |
4e7682d0 | 99 | "1: " TUSER(ldr) " %1, [%4]\n" |
c1b0db56 WD |
100 | " teq %1, %2\n" |
101 | " it eq @ explicit IT needed for the 2b label\n" | |
4e7682d0 | 102 | "2: " TUSER(streq) " %3, [%4]\n" |
c1b0db56 WD |
103 | __futex_atomic_ex_table("%5") |
104 | : "+r" (ret), "=&r" (val) | |
105 | : "r" (oldval), "r" (newval), "r" (uaddr), "Ir" (-EFAULT) | |
106 | : "cc", "memory"); | |
107 | ||
108 | *uval = val; | |
109 | return ret; | |
110 | } | |
111 | ||
112 | #endif /* !SMP */ | |
113 | ||
e589ed23 | 114 | static inline int |
8d7718aa | 115 | futex_atomic_op_inuser (int encoded_op, u32 __user *uaddr) |
e589ed23 MP |
116 | { |
117 | int op = (encoded_op >> 28) & 7; | |
118 | int cmp = (encoded_op >> 24) & 15; | |
119 | int oparg = (encoded_op << 8) >> 20; | |
120 | int cmparg = (encoded_op << 20) >> 20; | |
df77abca | 121 | int oldval = 0, ret, tmp; |
e589ed23 MP |
122 | |
123 | if (encoded_op & (FUTEX_OP_OPARG_SHIFT << 28)) | |
124 | oparg = 1 << oparg; | |
125 | ||
8d7718aa | 126 | if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32))) |
e589ed23 MP |
127 | return -EFAULT; |
128 | ||
129 | pagefault_disable(); /* implies preempt_disable() */ | |
130 | ||
131 | switch (op) { | |
132 | case FUTEX_OP_SET: | |
df77abca | 133 | __futex_atomic_op("mov %0, %4", ret, oldval, tmp, uaddr, oparg); |
e589ed23 MP |
134 | break; |
135 | case FUTEX_OP_ADD: | |
df77abca | 136 | __futex_atomic_op("add %0, %1, %4", ret, oldval, tmp, uaddr, oparg); |
e589ed23 MP |
137 | break; |
138 | case FUTEX_OP_OR: | |
df77abca | 139 | __futex_atomic_op("orr %0, %1, %4", ret, oldval, tmp, uaddr, oparg); |
e589ed23 MP |
140 | break; |
141 | case FUTEX_OP_ANDN: | |
df77abca | 142 | __futex_atomic_op("and %0, %1, %4", ret, oldval, tmp, uaddr, ~oparg); |
e589ed23 MP |
143 | break; |
144 | case FUTEX_OP_XOR: | |
df77abca | 145 | __futex_atomic_op("eor %0, %1, %4", ret, oldval, tmp, uaddr, oparg); |
e589ed23 MP |
146 | break; |
147 | default: | |
148 | ret = -ENOSYS; | |
149 | } | |
150 | ||
151 | pagefault_enable(); /* subsumes preempt_enable() */ | |
152 | ||
153 | if (!ret) { | |
154 | switch (cmp) { | |
155 | case FUTEX_OP_CMP_EQ: ret = (oldval == cmparg); break; | |
156 | case FUTEX_OP_CMP_NE: ret = (oldval != cmparg); break; | |
157 | case FUTEX_OP_CMP_LT: ret = (oldval < cmparg); break; | |
158 | case FUTEX_OP_CMP_GE: ret = (oldval >= cmparg); break; | |
159 | case FUTEX_OP_CMP_LE: ret = (oldval <= cmparg); break; | |
160 | case FUTEX_OP_CMP_GT: ret = (oldval > cmparg); break; | |
161 | default: ret = -ENOSYS; | |
162 | } | |
163 | } | |
164 | return ret; | |
165 | } | |
166 | ||
c1b0db56 | 167 | #endif /* !(CPU_USE_DOMAINS && SMP) */ |
e589ed23 MP |
168 | #endif /* __KERNEL__ */ |
169 | #endif /* _ASM_ARM_FUTEX_H */ |