ARC: uacces: remove lp_start, lp_end from clobber list
[linux-2.6-block.git] / arch / arc / include / asm / uaccess.h
CommitLineData
43697cb0
VG
1/*
2 * Copyright (C) 2004, 2007-2010, 2011-2012 Synopsys, Inc. (www.synopsys.com)
3 *
4 * This program is free software; you can redistribute it and/or modify
5 * it under the terms of the GNU General Public License version 2 as
6 * published by the Free Software Foundation.
7 *
8 * vineetg: June 2010
9 * -__clear_user( ) called multiple times during elf load was byte loop
10 * converted to do as much word clear as possible.
11 *
12 * vineetg: Dec 2009
13 * -Hand crafted constant propagation for "constant" copy sizes
14 * -stock kernel shrunk by 33K at -O3
15 *
16 * vineetg: Sept 2009
17 * -Added option to (UN)inline copy_(to|from)_user to reduce code sz
18 * -kernel shrunk by 200K even at -O3 (gcc 4.2.1)
19 * -Enabled when doing -Os
20 *
21 * Amit Bhor, Sameer Dhavale: Codito Technologies 2004
22 */
23
24#ifndef _ASM_ARC_UACCESS_H
25#define _ASM_ARC_UACCESS_H
26
43697cb0
VG
27#include <linux/string.h> /* for generic string functions */
28
29
db68ce10 30#define __kernel_ok (uaccess_kernel())
43697cb0
VG
31
32/*
2547476a 33 * Algorithmically, for __user_ok() we want do:
43697cb0
VG
34 * (start < TASK_SIZE) && (start+len < TASK_SIZE)
35 * where TASK_SIZE could either be retrieved from thread_info->addr_limit or
36 * emitted directly in code.
37 *
38 * This can however be rewritten as follows:
39 * (len <= TASK_SIZE) && (start+len < TASK_SIZE)
40 *
41 * Because it essentially checks if buffer end is within limit and @len is
42 * non-ngeative, which implies that buffer start will be within limit too.
43 *
0752adfd 44 * The reason for rewriting being, for majority of cases, @len is generally
43697cb0
VG
45 * compile time constant, causing first sub-expression to be compile time
46 * subsumed.
47 *
48 * The second part would generate weird large LIMMs e.g. (0x6000_0000 - 0x10),
49 * so we check for TASK_SIZE using get_fs() since the addr_limit load from mem
50 * would already have been done at this call site for __kernel_ok()
51 *
52 */
53#define __user_ok(addr, sz) (((sz) <= TASK_SIZE) && \
0752adfd 54 ((addr) <= (get_fs() - (sz))))
43697cb0
VG
55#define __access_ok(addr, sz) (unlikely(__kernel_ok) || \
56 likely(__user_ok((addr), (sz))))
57
0a5eae45
VG
58/*********** Single byte/hword/word copies ******************/
59
60#define __get_user_fn(sz, u, k) \
61({ \
62 long __ret = 0; /* success by default */ \
63 switch (sz) { \
64 case 1: __arc_get_user_one(*(k), u, "ldb", __ret); break; \
65 case 2: __arc_get_user_one(*(k), u, "ldw", __ret); break; \
66 case 4: __arc_get_user_one(*(k), u, "ld", __ret); break; \
67 case 8: __arc_get_user_one_64(*(k), u, __ret); break; \
68 } \
69 __ret; \
70})
71
72/*
73 * Returns 0 on success, -EFAULT if not.
74 * @ret already contains 0 - given that errors will be less likely
75 * (hence +r asm constraint below).
76 * In case of error, fixup code will make it -EFAULT
77 */
78#define __arc_get_user_one(dst, src, op, ret) \
79 __asm__ __volatile__( \
80 "1: "op" %1,[%2]\n" \
81 "2: ;nop\n" \
82 " .section .fixup, \"ax\"\n" \
83 " .align 4\n" \
05d9d0b9
VG
84 "3: # return -EFAULT\n" \
85 " mov %0, %3\n" \
86 " # zero out dst ptr\n" \
87 " mov %1, 0\n" \
0a5eae45
VG
88 " j 2b\n" \
89 " .previous\n" \
90 " .section __ex_table, \"a\"\n" \
91 " .align 4\n" \
92 " .word 1b,3b\n" \
93 " .previous\n" \
94 \
95 : "+r" (ret), "=r" (dst) \
96 : "r" (src), "ir" (-EFAULT))
97
98#define __arc_get_user_one_64(dst, src, ret) \
99 __asm__ __volatile__( \
100 "1: ld %1,[%2]\n" \
101 "4: ld %R1,[%2, 4]\n" \
102 "2: ;nop\n" \
103 " .section .fixup, \"ax\"\n" \
104 " .align 4\n" \
05d9d0b9
VG
105 "3: # return -EFAULT\n" \
106 " mov %0, %3\n" \
107 " # zero out dst ptr\n" \
108 " mov %1, 0\n" \
109 " mov %R1, 0\n" \
0a5eae45
VG
110 " j 2b\n" \
111 " .previous\n" \
112 " .section __ex_table, \"a\"\n" \
113 " .align 4\n" \
114 " .word 1b,3b\n" \
115 " .word 4b,3b\n" \
116 " .previous\n" \
117 \
118 : "+r" (ret), "=r" (dst) \
119 : "r" (src), "ir" (-EFAULT))
120
121#define __put_user_fn(sz, u, k) \
122({ \
123 long __ret = 0; /* success by default */ \
124 switch (sz) { \
125 case 1: __arc_put_user_one(*(k), u, "stb", __ret); break; \
126 case 2: __arc_put_user_one(*(k), u, "stw", __ret); break; \
127 case 4: __arc_put_user_one(*(k), u, "st", __ret); break; \
128 case 8: __arc_put_user_one_64(*(k), u, __ret); break; \
129 } \
130 __ret; \
131})
132
133#define __arc_put_user_one(src, dst, op, ret) \
134 __asm__ __volatile__( \
135 "1: "op" %1,[%2]\n" \
136 "2: ;nop\n" \
137 " .section .fixup, \"ax\"\n" \
138 " .align 4\n" \
139 "3: mov %0, %3\n" \
140 " j 2b\n" \
141 " .previous\n" \
142 " .section __ex_table, \"a\"\n" \
143 " .align 4\n" \
144 " .word 1b,3b\n" \
145 " .previous\n" \
146 \
147 : "+r" (ret) \
148 : "r" (src), "r" (dst), "ir" (-EFAULT))
149
150#define __arc_put_user_one_64(src, dst, ret) \
151 __asm__ __volatile__( \
152 "1: st %1,[%2]\n" \
153 "4: st %R1,[%2, 4]\n" \
154 "2: ;nop\n" \
155 " .section .fixup, \"ax\"\n" \
156 " .align 4\n" \
157 "3: mov %0, %3\n" \
158 " j 2b\n" \
159 " .previous\n" \
160 " .section __ex_table, \"a\"\n" \
161 " .align 4\n" \
162 " .word 1b,3b\n" \
163 " .word 4b,3b\n" \
164 " .previous\n" \
165 \
166 : "+r" (ret) \
167 : "r" (src), "r" (dst), "ir" (-EFAULT))
168
169
43697cb0 170static inline unsigned long
e13909a4 171raw_copy_from_user(void *to, const void __user *from, unsigned long n)
43697cb0
VG
172{
173 long res = 0;
174 char val;
175 unsigned long tmp1, tmp2, tmp3, tmp4;
176 unsigned long orig_n = n;
177
178 if (n == 0)
179 return 0;
180
181 /* unaligned */
182 if (((unsigned long)to & 0x3) || ((unsigned long)from & 0x3)) {
183
184 unsigned char tmp;
185
186 __asm__ __volatile__ (
187 " mov.f lp_count, %0 \n"
188 " lpnz 2f \n"
189 "1: ldb.ab %1, [%3, 1] \n"
190 " stb.ab %1, [%2, 1] \n"
191 " sub %0,%0,1 \n"
192 "2: ;nop \n"
193 " .section .fixup, \"ax\" \n"
194 " .align 4 \n"
195 "3: j 2b \n"
196 " .previous \n"
197 " .section __ex_table, \"a\" \n"
198 " .align 4 \n"
199 " .word 1b, 3b \n"
200 " .previous \n"
201
202 : "+r" (n),
203 /*
204 * Note as an '&' earlyclobber operand to make sure the
205 * temporary register inside the loop is not the same as
206 * FROM or TO.
207 */
208 "=&r" (tmp), "+r" (to), "+r" (from)
209 :
d5e3c55e 210 : "lp_count", "memory");
43697cb0
VG
211
212 return n;
213 }
214
215 /*
216 * Hand-crafted constant propagation to reduce code sz of the
217 * laddered copy 16x,8,4,2,1
218 */
219 if (__builtin_constant_p(orig_n)) {
220 res = orig_n;
221
222 if (orig_n / 16) {
223 orig_n = orig_n % 16;
224
225 __asm__ __volatile__(
226 " lsr lp_count, %7,4 \n"
227 " lp 3f \n"
228 "1: ld.ab %3, [%2, 4] \n"
229 "11: ld.ab %4, [%2, 4] \n"
230 "12: ld.ab %5, [%2, 4] \n"
231 "13: ld.ab %6, [%2, 4] \n"
232 " st.ab %3, [%1, 4] \n"
233 " st.ab %4, [%1, 4] \n"
234 " st.ab %5, [%1, 4] \n"
235 " st.ab %6, [%1, 4] \n"
236 " sub %0,%0,16 \n"
237 "3: ;nop \n"
238 " .section .fixup, \"ax\" \n"
239 " .align 4 \n"
240 "4: j 3b \n"
241 " .previous \n"
242 " .section __ex_table, \"a\" \n"
243 " .align 4 \n"
244 " .word 1b, 4b \n"
245 " .word 11b,4b \n"
246 " .word 12b,4b \n"
247 " .word 13b,4b \n"
248 " .previous \n"
249 : "+r" (res), "+r"(to), "+r"(from),
250 "=r"(tmp1), "=r"(tmp2), "=r"(tmp3), "=r"(tmp4)
251 : "ir"(n)
252 : "lp_count", "memory");
253 }
254 if (orig_n / 8) {
255 orig_n = orig_n % 8;
256
257 __asm__ __volatile__(
258 "14: ld.ab %3, [%2,4] \n"
259 "15: ld.ab %4, [%2,4] \n"
260 " st.ab %3, [%1,4] \n"
261 " st.ab %4, [%1,4] \n"
262 " sub %0,%0,8 \n"
263 "31: ;nop \n"
264 " .section .fixup, \"ax\" \n"
265 " .align 4 \n"
266 "4: j 31b \n"
267 " .previous \n"
268 " .section __ex_table, \"a\" \n"
269 " .align 4 \n"
270 " .word 14b,4b \n"
271 " .word 15b,4b \n"
272 " .previous \n"
273 : "+r" (res), "+r"(to), "+r"(from),
274 "=r"(tmp1), "=r"(tmp2)
275 :
276 : "memory");
277 }
278 if (orig_n / 4) {
279 orig_n = orig_n % 4;
280
281 __asm__ __volatile__(
282 "16: ld.ab %3, [%2,4] \n"
283 " st.ab %3, [%1,4] \n"
284 " sub %0,%0,4 \n"
285 "32: ;nop \n"
286 " .section .fixup, \"ax\" \n"
287 " .align 4 \n"
288 "4: j 32b \n"
289 " .previous \n"
290 " .section __ex_table, \"a\" \n"
291 " .align 4 \n"
292 " .word 16b,4b \n"
293 " .previous \n"
294 : "+r" (res), "+r"(to), "+r"(from), "=r"(tmp1)
295 :
296 : "memory");
297 }
298 if (orig_n / 2) {
299 orig_n = orig_n % 2;
300
301 __asm__ __volatile__(
302 "17: ldw.ab %3, [%2,2] \n"
303 " stw.ab %3, [%1,2] \n"
304 " sub %0,%0,2 \n"
305 "33: ;nop \n"
306 " .section .fixup, \"ax\" \n"
307 " .align 4 \n"
308 "4: j 33b \n"
309 " .previous \n"
310 " .section __ex_table, \"a\" \n"
311 " .align 4 \n"
312 " .word 17b,4b \n"
313 " .previous \n"
314 : "+r" (res), "+r"(to), "+r"(from), "=r"(tmp1)
315 :
316 : "memory");
317 }
318 if (orig_n & 1) {
319 __asm__ __volatile__(
320 "18: ldb.ab %3, [%2,2] \n"
321 " stb.ab %3, [%1,2] \n"
322 " sub %0,%0,1 \n"
323 "34: ; nop \n"
324 " .section .fixup, \"ax\" \n"
325 " .align 4 \n"
326 "4: j 34b \n"
327 " .previous \n"
328 " .section __ex_table, \"a\" \n"
329 " .align 4 \n"
330 " .word 18b,4b \n"
331 " .previous \n"
332 : "+r" (res), "+r"(to), "+r"(from), "=r"(tmp1)
333 :
334 : "memory");
335 }
336 } else { /* n is NOT constant, so laddered copy of 16x,8,4,2,1 */
337
338 __asm__ __volatile__(
339 " mov %0,%3 \n"
340 " lsr.f lp_count, %3,4 \n" /* 16x bytes */
341 " lpnz 3f \n"
342 "1: ld.ab %5, [%2, 4] \n"
343 "11: ld.ab %6, [%2, 4] \n"
344 "12: ld.ab %7, [%2, 4] \n"
345 "13: ld.ab %8, [%2, 4] \n"
346 " st.ab %5, [%1, 4] \n"
347 " st.ab %6, [%1, 4] \n"
348 " st.ab %7, [%1, 4] \n"
349 " st.ab %8, [%1, 4] \n"
350 " sub %0,%0,16 \n"
351 "3: and.f %3,%3,0xf \n" /* stragglers */
352 " bz 34f \n"
353 " bbit0 %3,3,31f \n" /* 8 bytes left */
354 "14: ld.ab %5, [%2,4] \n"
355 "15: ld.ab %6, [%2,4] \n"
356 " st.ab %5, [%1,4] \n"
357 " st.ab %6, [%1,4] \n"
358 " sub.f %0,%0,8 \n"
359 "31: bbit0 %3,2,32f \n" /* 4 bytes left */
360 "16: ld.ab %5, [%2,4] \n"
361 " st.ab %5, [%1,4] \n"
362 " sub.f %0,%0,4 \n"
363 "32: bbit0 %3,1,33f \n" /* 2 bytes left */
364 "17: ldw.ab %5, [%2,2] \n"
365 " stw.ab %5, [%1,2] \n"
366 " sub.f %0,%0,2 \n"
367 "33: bbit0 %3,0,34f \n"
368 "18: ldb.ab %5, [%2,1] \n" /* 1 byte left */
369 " stb.ab %5, [%1,1] \n"
370 " sub.f %0,%0,1 \n"
371 "34: ;nop \n"
372 " .section .fixup, \"ax\" \n"
373 " .align 4 \n"
374 "4: j 34b \n"
375 " .previous \n"
376 " .section __ex_table, \"a\" \n"
377 " .align 4 \n"
378 " .word 1b, 4b \n"
379 " .word 11b,4b \n"
380 " .word 12b,4b \n"
381 " .word 13b,4b \n"
382 " .word 14b,4b \n"
383 " .word 15b,4b \n"
384 " .word 16b,4b \n"
385 " .word 17b,4b \n"
386 " .word 18b,4b \n"
387 " .previous \n"
388 : "=r" (res), "+r"(to), "+r"(from), "+r"(n), "=r"(val),
389 "=r"(tmp1), "=r"(tmp2), "=r"(tmp3), "=r"(tmp4)
390 :
391 : "lp_count", "memory");
392 }
393
394 return res;
395}
396
43697cb0 397static inline unsigned long
e13909a4 398raw_copy_to_user(void __user *to, const void *from, unsigned long n)
43697cb0
VG
399{
400 long res = 0;
401 char val;
402 unsigned long tmp1, tmp2, tmp3, tmp4;
403 unsigned long orig_n = n;
404
405 if (n == 0)
406 return 0;
407
408 /* unaligned */
409 if (((unsigned long)to & 0x3) || ((unsigned long)from & 0x3)) {
410
411 unsigned char tmp;
412
413 __asm__ __volatile__(
414 " mov.f lp_count, %0 \n"
415 " lpnz 3f \n"
416 " ldb.ab %1, [%3, 1] \n"
417 "1: stb.ab %1, [%2, 1] \n"
418 " sub %0, %0, 1 \n"
419 "3: ;nop \n"
420 " .section .fixup, \"ax\" \n"
421 " .align 4 \n"
422 "4: j 3b \n"
423 " .previous \n"
424 " .section __ex_table, \"a\" \n"
425 " .align 4 \n"
426 " .word 1b, 4b \n"
427 " .previous \n"
428
429 : "+r" (n),
430 /* Note as an '&' earlyclobber operand to make sure the
431 * temporary register inside the loop is not the same as
432 * FROM or TO.
433 */
434 "=&r" (tmp), "+r" (to), "+r" (from)
435 :
d5e3c55e 436 : "lp_count", "memory");
43697cb0
VG
437
438 return n;
439 }
440
441 if (__builtin_constant_p(orig_n)) {
442 res = orig_n;
443
444 if (orig_n / 16) {
445 orig_n = orig_n % 16;
446
447 __asm__ __volatile__(
448 " lsr lp_count, %7,4 \n"
449 " lp 3f \n"
450 " ld.ab %3, [%2, 4] \n"
451 " ld.ab %4, [%2, 4] \n"
452 " ld.ab %5, [%2, 4] \n"
453 " ld.ab %6, [%2, 4] \n"
454 "1: st.ab %3, [%1, 4] \n"
455 "11: st.ab %4, [%1, 4] \n"
456 "12: st.ab %5, [%1, 4] \n"
457 "13: st.ab %6, [%1, 4] \n"
458 " sub %0, %0, 16 \n"
459 "3:;nop \n"
460 " .section .fixup, \"ax\" \n"
461 " .align 4 \n"
462 "4: j 3b \n"
463 " .previous \n"
464 " .section __ex_table, \"a\" \n"
465 " .align 4 \n"
466 " .word 1b, 4b \n"
467 " .word 11b,4b \n"
468 " .word 12b,4b \n"
469 " .word 13b,4b \n"
470 " .previous \n"
471 : "+r" (res), "+r"(to), "+r"(from),
472 "=r"(tmp1), "=r"(tmp2), "=r"(tmp3), "=r"(tmp4)
473 : "ir"(n)
474 : "lp_count", "memory");
475 }
476 if (orig_n / 8) {
477 orig_n = orig_n % 8;
478
479 __asm__ __volatile__(
480 " ld.ab %3, [%2,4] \n"
481 " ld.ab %4, [%2,4] \n"
482 "14: st.ab %3, [%1,4] \n"
483 "15: st.ab %4, [%1,4] \n"
484 " sub %0, %0, 8 \n"
485 "31:;nop \n"
486 " .section .fixup, \"ax\" \n"
487 " .align 4 \n"
488 "4: j 31b \n"
489 " .previous \n"
490 " .section __ex_table, \"a\" \n"
491 " .align 4 \n"
492 " .word 14b,4b \n"
493 " .word 15b,4b \n"
494 " .previous \n"
495 : "+r" (res), "+r"(to), "+r"(from),
496 "=r"(tmp1), "=r"(tmp2)
497 :
498 : "memory");
499 }
500 if (orig_n / 4) {
501 orig_n = orig_n % 4;
502
503 __asm__ __volatile__(
504 " ld.ab %3, [%2,4] \n"
505 "16: st.ab %3, [%1,4] \n"
506 " sub %0, %0, 4 \n"
507 "32:;nop \n"
508 " .section .fixup, \"ax\" \n"
509 " .align 4 \n"
510 "4: j 32b \n"
511 " .previous \n"
512 " .section __ex_table, \"a\" \n"
513 " .align 4 \n"
514 " .word 16b,4b \n"
515 " .previous \n"
516 : "+r" (res), "+r"(to), "+r"(from), "=r"(tmp1)
517 :
518 : "memory");
519 }
520 if (orig_n / 2) {
521 orig_n = orig_n % 2;
522
523 __asm__ __volatile__(
524 " ldw.ab %3, [%2,2] \n"
525 "17: stw.ab %3, [%1,2] \n"
526 " sub %0, %0, 2 \n"
527 "33:;nop \n"
528 " .section .fixup, \"ax\" \n"
529 " .align 4 \n"
530 "4: j 33b \n"
531 " .previous \n"
532 " .section __ex_table, \"a\" \n"
533 " .align 4 \n"
534 " .word 17b,4b \n"
535 " .previous \n"
536 : "+r" (res), "+r"(to), "+r"(from), "=r"(tmp1)
537 :
538 : "memory");
539 }
540 if (orig_n & 1) {
541 __asm__ __volatile__(
542 " ldb.ab %3, [%2,1] \n"
543 "18: stb.ab %3, [%1,1] \n"
544 " sub %0, %0, 1 \n"
545 "34: ;nop \n"
546 " .section .fixup, \"ax\" \n"
547 " .align 4 \n"
548 "4: j 34b \n"
549 " .previous \n"
550 " .section __ex_table, \"a\" \n"
551 " .align 4 \n"
552 " .word 18b,4b \n"
553 " .previous \n"
554 : "+r" (res), "+r"(to), "+r"(from), "=r"(tmp1)
555 :
556 : "memory");
557 }
558 } else { /* n is NOT constant, so laddered copy of 16x,8,4,2,1 */
559
560 __asm__ __volatile__(
561 " mov %0,%3 \n"
562 " lsr.f lp_count, %3,4 \n" /* 16x bytes */
563 " lpnz 3f \n"
564 " ld.ab %5, [%2, 4] \n"
565 " ld.ab %6, [%2, 4] \n"
566 " ld.ab %7, [%2, 4] \n"
567 " ld.ab %8, [%2, 4] \n"
568 "1: st.ab %5, [%1, 4] \n"
569 "11: st.ab %6, [%1, 4] \n"
570 "12: st.ab %7, [%1, 4] \n"
571 "13: st.ab %8, [%1, 4] \n"
572 " sub %0, %0, 16 \n"
573 "3: and.f %3,%3,0xf \n" /* stragglers */
574 " bz 34f \n"
575 " bbit0 %3,3,31f \n" /* 8 bytes left */
576 " ld.ab %5, [%2,4] \n"
577 " ld.ab %6, [%2,4] \n"
578 "14: st.ab %5, [%1,4] \n"
579 "15: st.ab %6, [%1,4] \n"
580 " sub.f %0, %0, 8 \n"
581 "31: bbit0 %3,2,32f \n" /* 4 bytes left */
582 " ld.ab %5, [%2,4] \n"
583 "16: st.ab %5, [%1,4] \n"
584 " sub.f %0, %0, 4 \n"
585 "32: bbit0 %3,1,33f \n" /* 2 bytes left */
586 " ldw.ab %5, [%2,2] \n"
587 "17: stw.ab %5, [%1,2] \n"
588 " sub.f %0, %0, 2 \n"
589 "33: bbit0 %3,0,34f \n"
590 " ldb.ab %5, [%2,1] \n" /* 1 byte left */
591 "18: stb.ab %5, [%1,1] \n"
592 " sub.f %0, %0, 1 \n"
593 "34: ;nop \n"
594 " .section .fixup, \"ax\" \n"
595 " .align 4 \n"
596 "4: j 34b \n"
597 " .previous \n"
598 " .section __ex_table, \"a\" \n"
599 " .align 4 \n"
600 " .word 1b, 4b \n"
601 " .word 11b,4b \n"
602 " .word 12b,4b \n"
603 " .word 13b,4b \n"
604 " .word 14b,4b \n"
605 " .word 15b,4b \n"
606 " .word 16b,4b \n"
607 " .word 17b,4b \n"
608 " .word 18b,4b \n"
609 " .previous \n"
610 : "=r" (res), "+r"(to), "+r"(from), "+r"(n), "=r"(val),
611 "=r"(tmp1), "=r"(tmp2), "=r"(tmp3), "=r"(tmp4)
612 :
613 : "lp_count", "memory");
614 }
615
616 return res;
617}
618
619static inline unsigned long __arc_clear_user(void __user *to, unsigned long n)
620{
621 long res = n;
622 unsigned char *d_char = to;
623
624 __asm__ __volatile__(
625 " bbit0 %0, 0, 1f \n"
626 "75: stb.ab %2, [%0,1] \n"
627 " sub %1, %1, 1 \n"
628 "1: bbit0 %0, 1, 2f \n"
629 "76: stw.ab %2, [%0,2] \n"
630 " sub %1, %1, 2 \n"
631 "2: asr.f lp_count, %1, 2 \n"
632 " lpnz 3f \n"
633 "77: st.ab %2, [%0,4] \n"
634 " sub %1, %1, 4 \n"
635 "3: bbit0 %1, 1, 4f \n"
636 "78: stw.ab %2, [%0,2] \n"
637 " sub %1, %1, 2 \n"
638 "4: bbit0 %1, 0, 5f \n"
639 "79: stb.ab %2, [%0,1] \n"
640 " sub %1, %1, 1 \n"
641 "5: \n"
642 " .section .fixup, \"ax\" \n"
643 " .align 4 \n"
644 "3: j 5b \n"
645 " .previous \n"
646 " .section __ex_table, \"a\" \n"
647 " .align 4 \n"
648 " .word 75b, 3b \n"
649 " .word 76b, 3b \n"
650 " .word 77b, 3b \n"
651 " .word 78b, 3b \n"
652 " .word 79b, 3b \n"
653 " .previous \n"
654 : "+r"(d_char), "+r"(res)
655 : "i"(0)
d5e3c55e 656 : "lp_count", "memory");
43697cb0
VG
657
658 return res;
659}
660
661static inline long
662__arc_strncpy_from_user(char *dst, const char __user *src, long count)
663{
8922bc30 664 long res = 0;
43697cb0 665 char val;
43697cb0
VG
666
667 if (count == 0)
668 return 0;
669
670 __asm__ __volatile__(
79435ac7 671 " mov lp_count, %5 \n"
8922bc30 672 " lp 3f \n"
43697cb0 673 "1: ldb.ab %3, [%2, 1] \n"
8922bc30 674 " breq.d %3, 0, 3f \n"
43697cb0 675 " stb.ab %3, [%1, 1] \n"
8922bc30
VG
676 " add %0, %0, 1 # Num of NON NULL bytes copied \n"
677 "3: \n"
43697cb0
VG
678 " .section .fixup, \"ax\" \n"
679 " .align 4 \n"
8922bc30 680 "4: mov %0, %4 # sets @res as -EFAULT \n"
43697cb0
VG
681 " j 3b \n"
682 " .previous \n"
683 " .section __ex_table, \"a\" \n"
684 " .align 4 \n"
685 " .word 1b, 4b \n"
686 " .previous \n"
8922bc30 687 : "+r"(res), "+r"(dst), "+r"(src), "=r"(val)
79435ac7 688 : "g"(-EFAULT), "r"(count)
d5e3c55e 689 : "lp_count", "memory");
43697cb0
VG
690
691 return res;
692}
693
694static inline long __arc_strnlen_user(const char __user *s, long n)
695{
696 long res, tmp1, cnt;
697 char val;
698
699 __asm__ __volatile__(
700 " mov %2, %1 \n"
701 "1: ldb.ab %3, [%0, 1] \n"
702 " breq.d %3, 0, 2f \n"
703 " sub.f %2, %2, 1 \n"
704 " bnz 1b \n"
705 " sub %2, %2, 1 \n"
706 "2: sub %0, %1, %2 \n"
707 "3: ;nop \n"
708 " .section .fixup, \"ax\" \n"
709 " .align 4 \n"
710 "4: mov %0, 0 \n"
711 " j 3b \n"
712 " .previous \n"
713 " .section __ex_table, \"a\" \n"
714 " .align 4 \n"
715 " .word 1b, 4b \n"
716 " .previous \n"
717 : "=r"(res), "=r"(tmp1), "=r"(cnt), "=r"(val)
718 : "0"(s), "1"(n)
719 : "memory");
720
721 return res;
722}
723
724#ifndef CONFIG_CC_OPTIMIZE_FOR_SIZE
e13909a4
VG
725
726#define INLINE_COPY_TO_USER
727#define INLINE_COPY_FROM_USER
728
43697cb0
VG
729#define __clear_user(d, n) __arc_clear_user(d, n)
730#define __strncpy_from_user(d, s, n) __arc_strncpy_from_user(d, s, n)
731#define __strnlen_user(s, n) __arc_strnlen_user(s, n)
732#else
43697cb0
VG
733extern unsigned long arc_clear_user_noinline(void __user *to,
734 unsigned long n);
735extern long arc_strncpy_from_user_noinline (char *dst, const char __user *src,
736 long count);
737extern long arc_strnlen_user_noinline(const char __user *src, long n);
738
43697cb0
VG
739#define __clear_user(d, n) arc_clear_user_noinline(d, n)
740#define __strncpy_from_user(d, s, n) arc_strncpy_from_user_noinline(d, s, n)
741#define __strnlen_user(s, n) arc_strnlen_user_noinline(s, n)
742
743#endif
744
745#include <asm-generic/uaccess.h>
746
43697cb0 747#endif