Commit | Line | Data |
---|---|---|
b2441318 | 1 | /* SPDX-License-Identifier: GPL-2.0 */ |
1965aae3 PA |
2 | #ifndef _ASM_X86_UACCESS_64_H |
3 | #define _ASM_X86_UACCESS_64_H | |
1da177e4 LT |
4 | |
5 | /* | |
6 | * User space memory access functions | |
7 | */ | |
1da177e4 | 8 | #include <linux/compiler.h> |
16dbc6c9 | 9 | #include <linux/lockdep.h> |
1771c6e1 | 10 | #include <linux/kasan-checks.h> |
1b1d9258 | 11 | #include <asm/alternative.h> |
cd4d09ec | 12 | #include <asm/cpufeatures.h> |
1da177e4 LT |
13 | #include <asm/page.h> |
14 | ||
1da177e4 LT |
15 | /* |
16 | * Copy To/From Userspace | |
17 | */ | |
18 | ||
19 | /* Handles exceptions in both to and from, but doesn't do access_ok */ | |
95912008 | 20 | __must_check unsigned long |
954e482b FY |
21 | copy_user_enhanced_fast_string(void *to, const void *from, unsigned len); |
22 | __must_check unsigned long | |
1b1d9258 JB |
23 | copy_user_generic_string(void *to, const void *from, unsigned len); |
24 | __must_check unsigned long | |
25 | copy_user_generic_unrolled(void *to, const void *from, unsigned len); | |
26 | ||
27 | static __always_inline __must_check unsigned long | |
28 | copy_user_generic(void *to, const void *from, unsigned len) | |
29 | { | |
30 | unsigned ret; | |
31 | ||
954e482b FY |
32 | /* |
33 | * If CPU has ERMS feature, use copy_user_enhanced_fast_string. | |
34 | * Otherwise, if CPU has rep_good feature, use copy_user_generic_string. | |
35 | * Otherwise, use copy_user_generic_unrolled. | |
36 | */ | |
37 | alternative_call_2(copy_user_generic_unrolled, | |
1b1d9258 JB |
38 | copy_user_generic_string, |
39 | X86_FEATURE_REP_GOOD, | |
954e482b FY |
40 | copy_user_enhanced_fast_string, |
41 | X86_FEATURE_ERMS, | |
1b1d9258 JB |
42 | ASM_OUTPUT2("=a" (ret), "=D" (to), "=S" (from), |
43 | "=d" (len)), | |
44 | "1" (to), "2" (from), "3" (len) | |
45 | : "memory", "rcx", "r8", "r9", "r10", "r11"); | |
46 | return ret; | |
47 | } | |
95912008 | 48 | |
beba3a20 AV |
49 | static __always_inline __must_check unsigned long |
50 | raw_copy_from_user(void *dst, const void __user *src, unsigned long size) | |
b896313e | 51 | { |
383d079b | 52 | int ret = 0; |
c10d38dd | 53 | |
1da177e4 | 54 | if (!__builtin_constant_p(size)) |
b896313e JP |
55 | return copy_user_generic(dst, (__force void *)src, size); |
56 | switch (size) { | |
11f1a4b9 LT |
57 | case 1: |
58 | __uaccess_begin(); | |
122b05dd | 59 | __get_user_asm_nozero(*(u8 *)dst, (u8 __user *)src, |
b896313e | 60 | ret, "b", "b", "=q", 1); |
11f1a4b9 | 61 | __uaccess_end(); |
1da177e4 | 62 | return ret; |
11f1a4b9 LT |
63 | case 2: |
64 | __uaccess_begin(); | |
122b05dd | 65 | __get_user_asm_nozero(*(u16 *)dst, (u16 __user *)src, |
b896313e | 66 | ret, "w", "w", "=r", 2); |
11f1a4b9 | 67 | __uaccess_end(); |
1da177e4 | 68 | return ret; |
11f1a4b9 LT |
69 | case 4: |
70 | __uaccess_begin(); | |
122b05dd | 71 | __get_user_asm_nozero(*(u32 *)dst, (u32 __user *)src, |
b896313e | 72 | ret, "l", "k", "=r", 4); |
11f1a4b9 | 73 | __uaccess_end(); |
b896313e | 74 | return ret; |
11f1a4b9 LT |
75 | case 8: |
76 | __uaccess_begin(); | |
122b05dd | 77 | __get_user_asm_nozero(*(u64 *)dst, (u64 __user *)src, |
b896313e | 78 | ret, "q", "", "=r", 8); |
11f1a4b9 | 79 | __uaccess_end(); |
1da177e4 | 80 | return ret; |
1da177e4 | 81 | case 10: |
11f1a4b9 | 82 | __uaccess_begin(); |
122b05dd | 83 | __get_user_asm_nozero(*(u64 *)dst, (u64 __user *)src, |
20a4a236 | 84 | ret, "q", "", "=r", 10); |
11f1a4b9 | 85 | if (likely(!ret)) |
122b05dd | 86 | __get_user_asm_nozero(*(u16 *)(8 + (char *)dst), |
11f1a4b9 LT |
87 | (u16 __user *)(8 + (char __user *)src), |
88 | ret, "w", "w", "=r", 2); | |
89 | __uaccess_end(); | |
b896313e | 90 | return ret; |
1da177e4 | 91 | case 16: |
11f1a4b9 | 92 | __uaccess_begin(); |
122b05dd | 93 | __get_user_asm_nozero(*(u64 *)dst, (u64 __user *)src, |
b896313e | 94 | ret, "q", "", "=r", 16); |
11f1a4b9 | 95 | if (likely(!ret)) |
122b05dd | 96 | __get_user_asm_nozero(*(u64 *)(8 + (char *)dst), |
11f1a4b9 LT |
97 | (u64 __user *)(8 + (char __user *)src), |
98 | ret, "q", "", "=r", 8); | |
99 | __uaccess_end(); | |
b896313e | 100 | return ret; |
1da177e4 | 101 | default: |
b896313e | 102 | return copy_user_generic(dst, (__force void *)src, size); |
1da177e4 | 103 | } |
b896313e | 104 | } |
1da177e4 | 105 | |
beba3a20 AV |
106 | static __always_inline __must_check unsigned long |
107 | raw_copy_to_user(void __user *dst, const void *src, unsigned long size) | |
b896313e | 108 | { |
383d079b | 109 | int ret = 0; |
c10d38dd | 110 | |
1da177e4 | 111 | if (!__builtin_constant_p(size)) |
b896313e JP |
112 | return copy_user_generic((__force void *)dst, src, size); |
113 | switch (size) { | |
11f1a4b9 LT |
114 | case 1: |
115 | __uaccess_begin(); | |
116 | __put_user_asm(*(u8 *)src, (u8 __user *)dst, | |
b896313e | 117 | ret, "b", "b", "iq", 1); |
11f1a4b9 | 118 | __uaccess_end(); |
1da177e4 | 119 | return ret; |
11f1a4b9 LT |
120 | case 2: |
121 | __uaccess_begin(); | |
122 | __put_user_asm(*(u16 *)src, (u16 __user *)dst, | |
b896313e | 123 | ret, "w", "w", "ir", 2); |
11f1a4b9 | 124 | __uaccess_end(); |
1da177e4 | 125 | return ret; |
11f1a4b9 LT |
126 | case 4: |
127 | __uaccess_begin(); | |
128 | __put_user_asm(*(u32 *)src, (u32 __user *)dst, | |
b896313e | 129 | ret, "l", "k", "ir", 4); |
11f1a4b9 | 130 | __uaccess_end(); |
b896313e | 131 | return ret; |
11f1a4b9 LT |
132 | case 8: |
133 | __uaccess_begin(); | |
134 | __put_user_asm(*(u64 *)src, (u64 __user *)dst, | |
155b7352 | 135 | ret, "q", "", "er", 8); |
11f1a4b9 | 136 | __uaccess_end(); |
1da177e4 | 137 | return ret; |
1da177e4 | 138 | case 10: |
11f1a4b9 | 139 | __uaccess_begin(); |
b896313e | 140 | __put_user_asm(*(u64 *)src, (u64 __user *)dst, |
155b7352 | 141 | ret, "q", "", "er", 10); |
11f1a4b9 LT |
142 | if (likely(!ret)) { |
143 | asm("":::"memory"); | |
144 | __put_user_asm(4[(u16 *)src], 4 + (u16 __user *)dst, | |
145 | ret, "w", "w", "ir", 2); | |
146 | } | |
147 | __uaccess_end(); | |
b896313e | 148 | return ret; |
1da177e4 | 149 | case 16: |
11f1a4b9 | 150 | __uaccess_begin(); |
b896313e | 151 | __put_user_asm(*(u64 *)src, (u64 __user *)dst, |
155b7352 | 152 | ret, "q", "", "er", 16); |
11f1a4b9 LT |
153 | if (likely(!ret)) { |
154 | asm("":::"memory"); | |
155 | __put_user_asm(1[(u64 *)src], 1 + (u64 __user *)dst, | |
156 | ret, "q", "", "er", 8); | |
157 | } | |
158 | __uaccess_end(); | |
b896313e | 159 | return ret; |
1da177e4 | 160 | default: |
b896313e | 161 | return copy_user_generic((__force void *)dst, src, size); |
1da177e4 | 162 | } |
b896313e | 163 | } |
1da177e4 | 164 | |
ff47ab4f | 165 | static __always_inline __must_check |
beba3a20 | 166 | unsigned long raw_copy_in_user(void __user *dst, const void __user *src, unsigned long size) |
b896313e | 167 | { |
a41e0d75 AV |
168 | return copy_user_generic((__force void *)dst, |
169 | (__force void *)src, size); | |
b896313e | 170 | } |
1da177e4 | 171 | |
b896313e JP |
172 | extern long __copy_user_nocache(void *dst, const void __user *src, |
173 | unsigned size, int zerorest); | |
0812a579 | 174 | |
0aed55af DW |
175 | extern long __copy_user_flushcache(void *dst, const void __user *src, unsigned size); |
176 | extern void memcpy_page_flushcache(char *to, struct page *page, size_t offset, | |
177 | size_t len); | |
178 | ||
f1800536 IM |
179 | static inline int |
180 | __copy_from_user_inatomic_nocache(void *dst, const void __user *src, | |
181 | unsigned size) | |
0812a579 | 182 | { |
1771c6e1 | 183 | kasan_check_write(dst, size); |
f1800536 | 184 | return __copy_user_nocache(dst, src, size, 0); |
0812a579 AK |
185 | } |
186 | ||
0aed55af DW |
187 | static inline int |
188 | __copy_from_user_flushcache(void *dst, const void __user *src, unsigned size) | |
189 | { | |
190 | kasan_check_write(dst, size); | |
191 | return __copy_user_flushcache(dst, src, size); | |
192 | } | |
193 | ||
1129585a | 194 | unsigned long |
cae2a173 | 195 | copy_user_handle_tail(char *to, char *from, unsigned len); |
1129585a | 196 | |
1965aae3 | 197 | #endif /* _ASM_X86_UACCESS_64_H */ |