Commit | Line | Data |
---|---|---|
1da177e4 LT |
1 | #ifndef __X86_64_UACCESS_H |
2 | #define __X86_64_UACCESS_H | |
3 | ||
4 | /* | |
5 | * User space memory access functions | |
6 | */ | |
1da177e4 LT |
7 | #include <linux/compiler.h> |
8 | #include <linux/errno.h> | |
1da177e4 LT |
9 | #include <linux/prefetch.h> |
10 | #include <asm/page.h> | |
11 | ||
1da177e4 LT |
12 | /* |
13 | * Copy To/From Userspace | |
14 | */ | |
15 | ||
16 | /* Handles exceptions in both to and from, but doesn't do access_ok */ | |
95912008 AK |
17 | __must_check unsigned long |
18 | copy_user_generic(void *to, const void *from, unsigned len); | |
19 | ||
20 | __must_check unsigned long | |
21 | copy_to_user(void __user *to, const void *from, unsigned len); | |
22 | __must_check unsigned long | |
23 | copy_from_user(void *to, const void __user *from, unsigned len); | |
24 | __must_check unsigned long | |
25 | copy_in_user(void __user *to, const void __user *from, unsigned len); | |
26 | ||
27 | static __always_inline __must_check | |
28 | int __copy_from_user(void *dst, const void __user *src, unsigned size) | |
b896313e | 29 | { |
383d079b | 30 | int ret = 0; |
1da177e4 | 31 | if (!__builtin_constant_p(size)) |
b896313e JP |
32 | return copy_user_generic(dst, (__force void *)src, size); |
33 | switch (size) { | |
34 | case 1:__get_user_asm(*(u8 *)dst, (u8 __user *)src, | |
35 | ret, "b", "b", "=q", 1); | |
1da177e4 | 36 | return ret; |
b896313e JP |
37 | case 2:__get_user_asm(*(u16 *)dst, (u16 __user *)src, |
38 | ret, "w", "w", "=r", 2); | |
1da177e4 | 39 | return ret; |
b896313e JP |
40 | case 4:__get_user_asm(*(u32 *)dst, (u32 __user *)src, |
41 | ret, "l", "k", "=r", 4); | |
42 | return ret; | |
43 | case 8:__get_user_asm(*(u64 *)dst, (u64 __user *)src, | |
44 | ret, "q", "", "=r", 8); | |
1da177e4 | 45 | return ret; |
1da177e4 | 46 | case 10: |
b896313e JP |
47 | __get_user_asm(*(u64 *)dst, (u64 __user *)src, |
48 | ret, "q", "", "=r", 16); | |
49 | if (unlikely(ret)) | |
50 | return ret; | |
51 | __get_user_asm(*(u16 *)(8 + (char *)dst), | |
52 | (u16 __user *)(8 + (char __user *)src), | |
53 | ret, "w", "w", "=r", 2); | |
54 | return ret; | |
1da177e4 | 55 | case 16: |
b896313e JP |
56 | __get_user_asm(*(u64 *)dst, (u64 __user *)src, |
57 | ret, "q", "", "=r", 16); | |
58 | if (unlikely(ret)) | |
59 | return ret; | |
60 | __get_user_asm(*(u64 *)(8 + (char *)dst), | |
61 | (u64 __user *)(8 + (char __user *)src), | |
62 | ret, "q", "", "=r", 8); | |
63 | return ret; | |
1da177e4 | 64 | default: |
b896313e | 65 | return copy_user_generic(dst, (__force void *)src, size); |
1da177e4 | 66 | } |
b896313e | 67 | } |
1da177e4 | 68 | |
95912008 AK |
69 | static __always_inline __must_check |
70 | int __copy_to_user(void __user *dst, const void *src, unsigned size) | |
b896313e | 71 | { |
383d079b | 72 | int ret = 0; |
1da177e4 | 73 | if (!__builtin_constant_p(size)) |
b896313e JP |
74 | return copy_user_generic((__force void *)dst, src, size); |
75 | switch (size) { | |
76 | case 1:__put_user_asm(*(u8 *)src, (u8 __user *)dst, | |
77 | ret, "b", "b", "iq", 1); | |
1da177e4 | 78 | return ret; |
b896313e JP |
79 | case 2:__put_user_asm(*(u16 *)src, (u16 __user *)dst, |
80 | ret, "w", "w", "ir", 2); | |
1da177e4 | 81 | return ret; |
b896313e JP |
82 | case 4:__put_user_asm(*(u32 *)src, (u32 __user *)dst, |
83 | ret, "l", "k", "ir", 4); | |
84 | return ret; | |
85 | case 8:__put_user_asm(*(u64 *)src, (u64 __user *)dst, | |
86 | ret, "q", "", "ir", 8); | |
1da177e4 | 87 | return ret; |
1da177e4 | 88 | case 10: |
b896313e JP |
89 | __put_user_asm(*(u64 *)src, (u64 __user *)dst, |
90 | ret, "q", "", "ir", 10); | |
91 | if (unlikely(ret)) | |
92 | return ret; | |
1da177e4 | 93 | asm("":::"memory"); |
b896313e JP |
94 | __put_user_asm(4[(u16 *)src], 4 + (u16 __user *)dst, |
95 | ret, "w", "w", "ir", 2); | |
96 | return ret; | |
1da177e4 | 97 | case 16: |
b896313e JP |
98 | __put_user_asm(*(u64 *)src, (u64 __user *)dst, |
99 | ret, "q", "", "ir", 16); | |
100 | if (unlikely(ret)) | |
101 | return ret; | |
1da177e4 | 102 | asm("":::"memory"); |
b896313e JP |
103 | __put_user_asm(1[(u64 *)src], 1 + (u64 __user *)dst, |
104 | ret, "q", "", "ir", 8); | |
105 | return ret; | |
1da177e4 | 106 | default: |
b896313e | 107 | return copy_user_generic((__force void *)dst, src, size); |
1da177e4 | 108 | } |
b896313e | 109 | } |
1da177e4 | 110 | |
95912008 AK |
111 | static __always_inline __must_check |
112 | int __copy_in_user(void __user *dst, const void __user *src, unsigned size) | |
b896313e | 113 | { |
383d079b | 114 | int ret = 0; |
1da177e4 | 115 | if (!__builtin_constant_p(size)) |
b896313e JP |
116 | return copy_user_generic((__force void *)dst, |
117 | (__force void *)src, size); | |
118 | switch (size) { | |
119 | case 1: { | |
1da177e4 | 120 | u8 tmp; |
b896313e JP |
121 | __get_user_asm(tmp, (u8 __user *)src, |
122 | ret, "b", "b", "=q", 1); | |
1da177e4 | 123 | if (likely(!ret)) |
b896313e JP |
124 | __put_user_asm(tmp, (u8 __user *)dst, |
125 | ret, "b", "b", "iq", 1); | |
1da177e4 LT |
126 | return ret; |
127 | } | |
b896313e | 128 | case 2: { |
1da177e4 | 129 | u16 tmp; |
b896313e JP |
130 | __get_user_asm(tmp, (u16 __user *)src, |
131 | ret, "w", "w", "=r", 2); | |
1da177e4 | 132 | if (likely(!ret)) |
b896313e JP |
133 | __put_user_asm(tmp, (u16 __user *)dst, |
134 | ret, "w", "w", "ir", 2); | |
1da177e4 LT |
135 | return ret; |
136 | } | |
137 | ||
b896313e | 138 | case 4: { |
1da177e4 | 139 | u32 tmp; |
b896313e JP |
140 | __get_user_asm(tmp, (u32 __user *)src, |
141 | ret, "l", "k", "=r", 4); | |
1da177e4 | 142 | if (likely(!ret)) |
b896313e JP |
143 | __put_user_asm(tmp, (u32 __user *)dst, |
144 | ret, "l", "k", "ir", 4); | |
1da177e4 LT |
145 | return ret; |
146 | } | |
b896313e | 147 | case 8: { |
1da177e4 | 148 | u64 tmp; |
b896313e JP |
149 | __get_user_asm(tmp, (u64 __user *)src, |
150 | ret, "q", "", "=r", 8); | |
1da177e4 | 151 | if (likely(!ret)) |
b896313e JP |
152 | __put_user_asm(tmp, (u64 __user *)dst, |
153 | ret, "q", "", "ir", 8); | |
1da177e4 LT |
154 | return ret; |
155 | } | |
156 | default: | |
b896313e JP |
157 | return copy_user_generic((__force void *)dst, |
158 | (__force void *)src, size); | |
1da177e4 | 159 | } |
b896313e | 160 | } |
1da177e4 | 161 | |
b896313e | 162 | __must_check long |
95912008 | 163 | strncpy_from_user(char *dst, const char __user *src, long count); |
b896313e | 164 | __must_check long |
95912008 AK |
165 | __strncpy_from_user(char *dst, const char __user *src, long count); |
166 | __must_check long strnlen_user(const char __user *str, long n); | |
167 | __must_check long __strnlen_user(const char __user *str, long n); | |
168 | __must_check long strlen_user(const char __user *str); | |
169 | __must_check unsigned long clear_user(void __user *mem, unsigned long len); | |
170 | __must_check unsigned long __clear_user(void __user *mem, unsigned long len); | |
171 | ||
b896313e JP |
172 | __must_check long __copy_from_user_inatomic(void *dst, const void __user *src, |
173 | unsigned size); | |
b885808e AK |
174 | |
175 | static __must_check __always_inline int | |
176 | __copy_to_user_inatomic(void __user *dst, const void *src, unsigned size) | |
177 | { | |
178 | return copy_user_generic((__force void *)dst, src, size); | |
179 | } | |
1da177e4 | 180 | |
b896313e JP |
181 | extern long __copy_user_nocache(void *dst, const void __user *src, |
182 | unsigned size, int zerorest); | |
0812a579 | 183 | |
b896313e JP |
184 | static inline int __copy_from_user_nocache(void *dst, const void __user *src, |
185 | unsigned size) | |
0812a579 AK |
186 | { |
187 | might_sleep(); | |
ecb7524c | 188 | return __copy_user_nocache(dst, src, size, 1); |
0812a579 AK |
189 | } |
190 | ||
b896313e JP |
191 | static inline int __copy_from_user_inatomic_nocache(void *dst, |
192 | const void __user *src, | |
193 | unsigned size) | |
0812a579 | 194 | { |
ecb7524c | 195 | return __copy_user_nocache(dst, src, size, 0); |
0812a579 AK |
196 | } |
197 | ||
1129585a VM |
198 | unsigned long |
199 | copy_user_handle_tail(char *to, char *from, unsigned len, unsigned zerorest); | |
200 | ||
1da177e4 | 201 | #endif /* __X86_64_UACCESS_H */ |