ARC: uaccess friends
[linux-2.6-block.git] / arch / arc / include / asm / uaccess.h
CommitLineData
43697cb0
VG
1/*
2 * Copyright (C) 2004, 2007-2010, 2011-2012 Synopsys, Inc. (www.synopsys.com)
3 *
4 * This program is free software; you can redistribute it and/or modify
5 * it under the terms of the GNU General Public License version 2 as
6 * published by the Free Software Foundation.
7 *
8 * vineetg: June 2010
9 * -__clear_user( ) called multiple times during elf load was byte loop
10 * converted to do as much word clear as possible.
11 *
12 * vineetg: Dec 2009
13 * -Hand crafted constant propagation for "constant" copy sizes
14 * -stock kernel shrunk by 33K at -O3
15 *
16 * vineetg: Sept 2009
17 * -Added option to (UN)inline copy_(to|from)_user to reduce code sz
18 * -kernel shrunk by 200K even at -O3 (gcc 4.2.1)
19 * -Enabled when doing -Os
20 *
21 * Amit Bhor, Sameer Dhavale: Codito Technologies 2004
22 */
23
24#ifndef _ASM_ARC_UACCESS_H
25#define _ASM_ARC_UACCESS_H
26
27#include <linux/sched.h>
28#include <asm/errno.h>
29#include <linux/string.h> /* for generic string functions */
30
31
32#define __kernel_ok (segment_eq(get_fs(), KERNEL_DS))
33
34/*
35 * Algorthmically, for __user_ok() we want do:
36 * (start < TASK_SIZE) && (start+len < TASK_SIZE)
37 * where TASK_SIZE could either be retrieved from thread_info->addr_limit or
38 * emitted directly in code.
39 *
40 * This can however be rewritten as follows:
41 * (len <= TASK_SIZE) && (start+len < TASK_SIZE)
42 *
43 * Because it essentially checks if buffer end is within limit and @len is
44 * non-ngeative, which implies that buffer start will be within limit too.
45 *
46 * The reason for rewriting being, for majorit yof cases, @len is generally
47 * compile time constant, causing first sub-expression to be compile time
48 * subsumed.
49 *
50 * The second part would generate weird large LIMMs e.g. (0x6000_0000 - 0x10),
51 * so we check for TASK_SIZE using get_fs() since the addr_limit load from mem
52 * would already have been done at this call site for __kernel_ok()
53 *
54 */
55#define __user_ok(addr, sz) (((sz) <= TASK_SIZE) && \
56 (((addr)+(sz)) <= get_fs()))
57#define __access_ok(addr, sz) (unlikely(__kernel_ok) || \
58 likely(__user_ok((addr), (sz))))
59
60static inline unsigned long
61__arc_copy_from_user(void *to, const void __user *from, unsigned long n)
62{
63 long res = 0;
64 char val;
65 unsigned long tmp1, tmp2, tmp3, tmp4;
66 unsigned long orig_n = n;
67
68 if (n == 0)
69 return 0;
70
71 /* unaligned */
72 if (((unsigned long)to & 0x3) || ((unsigned long)from & 0x3)) {
73
74 unsigned char tmp;
75
76 __asm__ __volatile__ (
77 " mov.f lp_count, %0 \n"
78 " lpnz 2f \n"
79 "1: ldb.ab %1, [%3, 1] \n"
80 " stb.ab %1, [%2, 1] \n"
81 " sub %0,%0,1 \n"
82 "2: ;nop \n"
83 " .section .fixup, \"ax\" \n"
84 " .align 4 \n"
85 "3: j 2b \n"
86 " .previous \n"
87 " .section __ex_table, \"a\" \n"
88 " .align 4 \n"
89 " .word 1b, 3b \n"
90 " .previous \n"
91
92 : "+r" (n),
93 /*
94 * Note as an '&' earlyclobber operand to make sure the
95 * temporary register inside the loop is not the same as
96 * FROM or TO.
97 */
98 "=&r" (tmp), "+r" (to), "+r" (from)
99 :
100 : "lp_count", "lp_start", "lp_end", "memory");
101
102 return n;
103 }
104
105 /*
106 * Hand-crafted constant propagation to reduce code sz of the
107 * laddered copy 16x,8,4,2,1
108 */
109 if (__builtin_constant_p(orig_n)) {
110 res = orig_n;
111
112 if (orig_n / 16) {
113 orig_n = orig_n % 16;
114
115 __asm__ __volatile__(
116 " lsr lp_count, %7,4 \n"
117 " lp 3f \n"
118 "1: ld.ab %3, [%2, 4] \n"
119 "11: ld.ab %4, [%2, 4] \n"
120 "12: ld.ab %5, [%2, 4] \n"
121 "13: ld.ab %6, [%2, 4] \n"
122 " st.ab %3, [%1, 4] \n"
123 " st.ab %4, [%1, 4] \n"
124 " st.ab %5, [%1, 4] \n"
125 " st.ab %6, [%1, 4] \n"
126 " sub %0,%0,16 \n"
127 "3: ;nop \n"
128 " .section .fixup, \"ax\" \n"
129 " .align 4 \n"
130 "4: j 3b \n"
131 " .previous \n"
132 " .section __ex_table, \"a\" \n"
133 " .align 4 \n"
134 " .word 1b, 4b \n"
135 " .word 11b,4b \n"
136 " .word 12b,4b \n"
137 " .word 13b,4b \n"
138 " .previous \n"
139 : "+r" (res), "+r"(to), "+r"(from),
140 "=r"(tmp1), "=r"(tmp2), "=r"(tmp3), "=r"(tmp4)
141 : "ir"(n)
142 : "lp_count", "memory");
143 }
144 if (orig_n / 8) {
145 orig_n = orig_n % 8;
146
147 __asm__ __volatile__(
148 "14: ld.ab %3, [%2,4] \n"
149 "15: ld.ab %4, [%2,4] \n"
150 " st.ab %3, [%1,4] \n"
151 " st.ab %4, [%1,4] \n"
152 " sub %0,%0,8 \n"
153 "31: ;nop \n"
154 " .section .fixup, \"ax\" \n"
155 " .align 4 \n"
156 "4: j 31b \n"
157 " .previous \n"
158 " .section __ex_table, \"a\" \n"
159 " .align 4 \n"
160 " .word 14b,4b \n"
161 " .word 15b,4b \n"
162 " .previous \n"
163 : "+r" (res), "+r"(to), "+r"(from),
164 "=r"(tmp1), "=r"(tmp2)
165 :
166 : "memory");
167 }
168 if (orig_n / 4) {
169 orig_n = orig_n % 4;
170
171 __asm__ __volatile__(
172 "16: ld.ab %3, [%2,4] \n"
173 " st.ab %3, [%1,4] \n"
174 " sub %0,%0,4 \n"
175 "32: ;nop \n"
176 " .section .fixup, \"ax\" \n"
177 " .align 4 \n"
178 "4: j 32b \n"
179 " .previous \n"
180 " .section __ex_table, \"a\" \n"
181 " .align 4 \n"
182 " .word 16b,4b \n"
183 " .previous \n"
184 : "+r" (res), "+r"(to), "+r"(from), "=r"(tmp1)
185 :
186 : "memory");
187 }
188 if (orig_n / 2) {
189 orig_n = orig_n % 2;
190
191 __asm__ __volatile__(
192 "17: ldw.ab %3, [%2,2] \n"
193 " stw.ab %3, [%1,2] \n"
194 " sub %0,%0,2 \n"
195 "33: ;nop \n"
196 " .section .fixup, \"ax\" \n"
197 " .align 4 \n"
198 "4: j 33b \n"
199 " .previous \n"
200 " .section __ex_table, \"a\" \n"
201 " .align 4 \n"
202 " .word 17b,4b \n"
203 " .previous \n"
204 : "+r" (res), "+r"(to), "+r"(from), "=r"(tmp1)
205 :
206 : "memory");
207 }
208 if (orig_n & 1) {
209 __asm__ __volatile__(
210 "18: ldb.ab %3, [%2,2] \n"
211 " stb.ab %3, [%1,2] \n"
212 " sub %0,%0,1 \n"
213 "34: ; nop \n"
214 " .section .fixup, \"ax\" \n"
215 " .align 4 \n"
216 "4: j 34b \n"
217 " .previous \n"
218 " .section __ex_table, \"a\" \n"
219 " .align 4 \n"
220 " .word 18b,4b \n"
221 " .previous \n"
222 : "+r" (res), "+r"(to), "+r"(from), "=r"(tmp1)
223 :
224 : "memory");
225 }
226 } else { /* n is NOT constant, so laddered copy of 16x,8,4,2,1 */
227
228 __asm__ __volatile__(
229 " mov %0,%3 \n"
230 " lsr.f lp_count, %3,4 \n" /* 16x bytes */
231 " lpnz 3f \n"
232 "1: ld.ab %5, [%2, 4] \n"
233 "11: ld.ab %6, [%2, 4] \n"
234 "12: ld.ab %7, [%2, 4] \n"
235 "13: ld.ab %8, [%2, 4] \n"
236 " st.ab %5, [%1, 4] \n"
237 " st.ab %6, [%1, 4] \n"
238 " st.ab %7, [%1, 4] \n"
239 " st.ab %8, [%1, 4] \n"
240 " sub %0,%0,16 \n"
241 "3: and.f %3,%3,0xf \n" /* stragglers */
242 " bz 34f \n"
243 " bbit0 %3,3,31f \n" /* 8 bytes left */
244 "14: ld.ab %5, [%2,4] \n"
245 "15: ld.ab %6, [%2,4] \n"
246 " st.ab %5, [%1,4] \n"
247 " st.ab %6, [%1,4] \n"
248 " sub.f %0,%0,8 \n"
249 "31: bbit0 %3,2,32f \n" /* 4 bytes left */
250 "16: ld.ab %5, [%2,4] \n"
251 " st.ab %5, [%1,4] \n"
252 " sub.f %0,%0,4 \n"
253 "32: bbit0 %3,1,33f \n" /* 2 bytes left */
254 "17: ldw.ab %5, [%2,2] \n"
255 " stw.ab %5, [%1,2] \n"
256 " sub.f %0,%0,2 \n"
257 "33: bbit0 %3,0,34f \n"
258 "18: ldb.ab %5, [%2,1] \n" /* 1 byte left */
259 " stb.ab %5, [%1,1] \n"
260 " sub.f %0,%0,1 \n"
261 "34: ;nop \n"
262 " .section .fixup, \"ax\" \n"
263 " .align 4 \n"
264 "4: j 34b \n"
265 " .previous \n"
266 " .section __ex_table, \"a\" \n"
267 " .align 4 \n"
268 " .word 1b, 4b \n"
269 " .word 11b,4b \n"
270 " .word 12b,4b \n"
271 " .word 13b,4b \n"
272 " .word 14b,4b \n"
273 " .word 15b,4b \n"
274 " .word 16b,4b \n"
275 " .word 17b,4b \n"
276 " .word 18b,4b \n"
277 " .previous \n"
278 : "=r" (res), "+r"(to), "+r"(from), "+r"(n), "=r"(val),
279 "=r"(tmp1), "=r"(tmp2), "=r"(tmp3), "=r"(tmp4)
280 :
281 : "lp_count", "memory");
282 }
283
284 return res;
285}
286
287extern unsigned long slowpath_copy_to_user(void __user *to, const void *from,
288 unsigned long n);
289
290static inline unsigned long
291__arc_copy_to_user(void __user *to, const void *from, unsigned long n)
292{
293 long res = 0;
294 char val;
295 unsigned long tmp1, tmp2, tmp3, tmp4;
296 unsigned long orig_n = n;
297
298 if (n == 0)
299 return 0;
300
301 /* unaligned */
302 if (((unsigned long)to & 0x3) || ((unsigned long)from & 0x3)) {
303
304 unsigned char tmp;
305
306 __asm__ __volatile__(
307 " mov.f lp_count, %0 \n"
308 " lpnz 3f \n"
309 " ldb.ab %1, [%3, 1] \n"
310 "1: stb.ab %1, [%2, 1] \n"
311 " sub %0, %0, 1 \n"
312 "3: ;nop \n"
313 " .section .fixup, \"ax\" \n"
314 " .align 4 \n"
315 "4: j 3b \n"
316 " .previous \n"
317 " .section __ex_table, \"a\" \n"
318 " .align 4 \n"
319 " .word 1b, 4b \n"
320 " .previous \n"
321
322 : "+r" (n),
323 /* Note as an '&' earlyclobber operand to make sure the
324 * temporary register inside the loop is not the same as
325 * FROM or TO.
326 */
327 "=&r" (tmp), "+r" (to), "+r" (from)
328 :
329 : "lp_count", "lp_start", "lp_end", "memory");
330
331 return n;
332 }
333
334 if (__builtin_constant_p(orig_n)) {
335 res = orig_n;
336
337 if (orig_n / 16) {
338 orig_n = orig_n % 16;
339
340 __asm__ __volatile__(
341 " lsr lp_count, %7,4 \n"
342 " lp 3f \n"
343 " ld.ab %3, [%2, 4] \n"
344 " ld.ab %4, [%2, 4] \n"
345 " ld.ab %5, [%2, 4] \n"
346 " ld.ab %6, [%2, 4] \n"
347 "1: st.ab %3, [%1, 4] \n"
348 "11: st.ab %4, [%1, 4] \n"
349 "12: st.ab %5, [%1, 4] \n"
350 "13: st.ab %6, [%1, 4] \n"
351 " sub %0, %0, 16 \n"
352 "3:;nop \n"
353 " .section .fixup, \"ax\" \n"
354 " .align 4 \n"
355 "4: j 3b \n"
356 " .previous \n"
357 " .section __ex_table, \"a\" \n"
358 " .align 4 \n"
359 " .word 1b, 4b \n"
360 " .word 11b,4b \n"
361 " .word 12b,4b \n"
362 " .word 13b,4b \n"
363 " .previous \n"
364 : "+r" (res), "+r"(to), "+r"(from),
365 "=r"(tmp1), "=r"(tmp2), "=r"(tmp3), "=r"(tmp4)
366 : "ir"(n)
367 : "lp_count", "memory");
368 }
369 if (orig_n / 8) {
370 orig_n = orig_n % 8;
371
372 __asm__ __volatile__(
373 " ld.ab %3, [%2,4] \n"
374 " ld.ab %4, [%2,4] \n"
375 "14: st.ab %3, [%1,4] \n"
376 "15: st.ab %4, [%1,4] \n"
377 " sub %0, %0, 8 \n"
378 "31:;nop \n"
379 " .section .fixup, \"ax\" \n"
380 " .align 4 \n"
381 "4: j 31b \n"
382 " .previous \n"
383 " .section __ex_table, \"a\" \n"
384 " .align 4 \n"
385 " .word 14b,4b \n"
386 " .word 15b,4b \n"
387 " .previous \n"
388 : "+r" (res), "+r"(to), "+r"(from),
389 "=r"(tmp1), "=r"(tmp2)
390 :
391 : "memory");
392 }
393 if (orig_n / 4) {
394 orig_n = orig_n % 4;
395
396 __asm__ __volatile__(
397 " ld.ab %3, [%2,4] \n"
398 "16: st.ab %3, [%1,4] \n"
399 " sub %0, %0, 4 \n"
400 "32:;nop \n"
401 " .section .fixup, \"ax\" \n"
402 " .align 4 \n"
403 "4: j 32b \n"
404 " .previous \n"
405 " .section __ex_table, \"a\" \n"
406 " .align 4 \n"
407 " .word 16b,4b \n"
408 " .previous \n"
409 : "+r" (res), "+r"(to), "+r"(from), "=r"(tmp1)
410 :
411 : "memory");
412 }
413 if (orig_n / 2) {
414 orig_n = orig_n % 2;
415
416 __asm__ __volatile__(
417 " ldw.ab %3, [%2,2] \n"
418 "17: stw.ab %3, [%1,2] \n"
419 " sub %0, %0, 2 \n"
420 "33:;nop \n"
421 " .section .fixup, \"ax\" \n"
422 " .align 4 \n"
423 "4: j 33b \n"
424 " .previous \n"
425 " .section __ex_table, \"a\" \n"
426 " .align 4 \n"
427 " .word 17b,4b \n"
428 " .previous \n"
429 : "+r" (res), "+r"(to), "+r"(from), "=r"(tmp1)
430 :
431 : "memory");
432 }
433 if (orig_n & 1) {
434 __asm__ __volatile__(
435 " ldb.ab %3, [%2,1] \n"
436 "18: stb.ab %3, [%1,1] \n"
437 " sub %0, %0, 1 \n"
438 "34: ;nop \n"
439 " .section .fixup, \"ax\" \n"
440 " .align 4 \n"
441 "4: j 34b \n"
442 " .previous \n"
443 " .section __ex_table, \"a\" \n"
444 " .align 4 \n"
445 " .word 18b,4b \n"
446 " .previous \n"
447 : "+r" (res), "+r"(to), "+r"(from), "=r"(tmp1)
448 :
449 : "memory");
450 }
451 } else { /* n is NOT constant, so laddered copy of 16x,8,4,2,1 */
452
453 __asm__ __volatile__(
454 " mov %0,%3 \n"
455 " lsr.f lp_count, %3,4 \n" /* 16x bytes */
456 " lpnz 3f \n"
457 " ld.ab %5, [%2, 4] \n"
458 " ld.ab %6, [%2, 4] \n"
459 " ld.ab %7, [%2, 4] \n"
460 " ld.ab %8, [%2, 4] \n"
461 "1: st.ab %5, [%1, 4] \n"
462 "11: st.ab %6, [%1, 4] \n"
463 "12: st.ab %7, [%1, 4] \n"
464 "13: st.ab %8, [%1, 4] \n"
465 " sub %0, %0, 16 \n"
466 "3: and.f %3,%3,0xf \n" /* stragglers */
467 " bz 34f \n"
468 " bbit0 %3,3,31f \n" /* 8 bytes left */
469 " ld.ab %5, [%2,4] \n"
470 " ld.ab %6, [%2,4] \n"
471 "14: st.ab %5, [%1,4] \n"
472 "15: st.ab %6, [%1,4] \n"
473 " sub.f %0, %0, 8 \n"
474 "31: bbit0 %3,2,32f \n" /* 4 bytes left */
475 " ld.ab %5, [%2,4] \n"
476 "16: st.ab %5, [%1,4] \n"
477 " sub.f %0, %0, 4 \n"
478 "32: bbit0 %3,1,33f \n" /* 2 bytes left */
479 " ldw.ab %5, [%2,2] \n"
480 "17: stw.ab %5, [%1,2] \n"
481 " sub.f %0, %0, 2 \n"
482 "33: bbit0 %3,0,34f \n"
483 " ldb.ab %5, [%2,1] \n" /* 1 byte left */
484 "18: stb.ab %5, [%1,1] \n"
485 " sub.f %0, %0, 1 \n"
486 "34: ;nop \n"
487 " .section .fixup, \"ax\" \n"
488 " .align 4 \n"
489 "4: j 34b \n"
490 " .previous \n"
491 " .section __ex_table, \"a\" \n"
492 " .align 4 \n"
493 " .word 1b, 4b \n"
494 " .word 11b,4b \n"
495 " .word 12b,4b \n"
496 " .word 13b,4b \n"
497 " .word 14b,4b \n"
498 " .word 15b,4b \n"
499 " .word 16b,4b \n"
500 " .word 17b,4b \n"
501 " .word 18b,4b \n"
502 " .previous \n"
503 : "=r" (res), "+r"(to), "+r"(from), "+r"(n), "=r"(val),
504 "=r"(tmp1), "=r"(tmp2), "=r"(tmp3), "=r"(tmp4)
505 :
506 : "lp_count", "memory");
507 }
508
509 return res;
510}
511
512static inline unsigned long __arc_clear_user(void __user *to, unsigned long n)
513{
514 long res = n;
515 unsigned char *d_char = to;
516
517 __asm__ __volatile__(
518 " bbit0 %0, 0, 1f \n"
519 "75: stb.ab %2, [%0,1] \n"
520 " sub %1, %1, 1 \n"
521 "1: bbit0 %0, 1, 2f \n"
522 "76: stw.ab %2, [%0,2] \n"
523 " sub %1, %1, 2 \n"
524 "2: asr.f lp_count, %1, 2 \n"
525 " lpnz 3f \n"
526 "77: st.ab %2, [%0,4] \n"
527 " sub %1, %1, 4 \n"
528 "3: bbit0 %1, 1, 4f \n"
529 "78: stw.ab %2, [%0,2] \n"
530 " sub %1, %1, 2 \n"
531 "4: bbit0 %1, 0, 5f \n"
532 "79: stb.ab %2, [%0,1] \n"
533 " sub %1, %1, 1 \n"
534 "5: \n"
535 " .section .fixup, \"ax\" \n"
536 " .align 4 \n"
537 "3: j 5b \n"
538 " .previous \n"
539 " .section __ex_table, \"a\" \n"
540 " .align 4 \n"
541 " .word 75b, 3b \n"
542 " .word 76b, 3b \n"
543 " .word 77b, 3b \n"
544 " .word 78b, 3b \n"
545 " .word 79b, 3b \n"
546 " .previous \n"
547 : "+r"(d_char), "+r"(res)
548 : "i"(0)
549 : "lp_count", "lp_start", "lp_end", "memory");
550
551 return res;
552}
553
554static inline long
555__arc_strncpy_from_user(char *dst, const char __user *src, long count)
556{
557 long res = count;
558 char val;
559 unsigned int hw_count;
560
561 if (count == 0)
562 return 0;
563
564 __asm__ __volatile__(
565 " lp 2f \n"
566 "1: ldb.ab %3, [%2, 1] \n"
567 " breq.d %3, 0, 2f \n"
568 " stb.ab %3, [%1, 1] \n"
569 "2: sub %0, %6, %4 \n"
570 "3: ;nop \n"
571 " .section .fixup, \"ax\" \n"
572 " .align 4 \n"
573 "4: mov %0, %5 \n"
574 " j 3b \n"
575 " .previous \n"
576 " .section __ex_table, \"a\" \n"
577 " .align 4 \n"
578 " .word 1b, 4b \n"
579 " .previous \n"
580 : "=r"(res), "+r"(dst), "+r"(src), "=&r"(val), "=l"(hw_count)
581 : "g"(-EFAULT), "ir"(count), "4"(count) /* this "4" seeds lp_count */
582 : "memory");
583
584 return res;
585}
586
587static inline long __arc_strnlen_user(const char __user *s, long n)
588{
589 long res, tmp1, cnt;
590 char val;
591
592 __asm__ __volatile__(
593 " mov %2, %1 \n"
594 "1: ldb.ab %3, [%0, 1] \n"
595 " breq.d %3, 0, 2f \n"
596 " sub.f %2, %2, 1 \n"
597 " bnz 1b \n"
598 " sub %2, %2, 1 \n"
599 "2: sub %0, %1, %2 \n"
600 "3: ;nop \n"
601 " .section .fixup, \"ax\" \n"
602 " .align 4 \n"
603 "4: mov %0, 0 \n"
604 " j 3b \n"
605 " .previous \n"
606 " .section __ex_table, \"a\" \n"
607 " .align 4 \n"
608 " .word 1b, 4b \n"
609 " .previous \n"
610 : "=r"(res), "=r"(tmp1), "=r"(cnt), "=r"(val)
611 : "0"(s), "1"(n)
612 : "memory");
613
614 return res;
615}
616
617#ifndef CONFIG_CC_OPTIMIZE_FOR_SIZE
618#define __copy_from_user(t, f, n) __arc_copy_from_user(t, f, n)
619#define __copy_to_user(t, f, n) __arc_copy_to_user(t, f, n)
620#define __clear_user(d, n) __arc_clear_user(d, n)
621#define __strncpy_from_user(d, s, n) __arc_strncpy_from_user(d, s, n)
622#define __strnlen_user(s, n) __arc_strnlen_user(s, n)
623#else
624extern long arc_copy_from_user_noinline(void *to, const void __user * from,
625 unsigned long n);
626extern long arc_copy_to_user_noinline(void __user *to, const void *from,
627 unsigned long n);
628extern unsigned long arc_clear_user_noinline(void __user *to,
629 unsigned long n);
630extern long arc_strncpy_from_user_noinline (char *dst, const char __user *src,
631 long count);
632extern long arc_strnlen_user_noinline(const char __user *src, long n);
633
634#define __copy_from_user(t, f, n) arc_copy_from_user_noinline(t, f, n)
635#define __copy_to_user(t, f, n) arc_copy_to_user_noinline(t, f, n)
636#define __clear_user(d, n) arc_clear_user_noinline(d, n)
637#define __strncpy_from_user(d, s, n) arc_strncpy_from_user_noinline(d, s, n)
638#define __strnlen_user(s, n) arc_strnlen_user_noinline(s, n)
639
640#endif
641
642#include <asm-generic/uaccess.h>
643
644extern int fixup_exception(struct pt_regs *regs);
645
646#endif