1 // SPDX-License-Identifier: GPL-2.0 1 2 /* 3 * User address space access functions. 4 * The non inlined parts of asm-i386/uaccess.h 5 * 6 * Copyright 1997 Andi Kleen <ak@muc.de> 7 * Copyright 1997 Linus Torvalds 8 */ 9 #include <linux/export.h> 10 #include <linux/uaccess.h> 11 #include <asm/asm.h> 12 13 #ifdef CONFIG_X86_INTEL_USERCOPY 14 /* 15 * Alignment at which movsl is preferred for b 16 */ 17 struct movsl_mask movsl_mask __read_mostly; 18 #endif 19 20 static inline int __movsl_is_ok(unsigned long 21 { 22 #ifdef CONFIG_X86_INTEL_USERCOPY 23 if (n >= 64 && ((a1 ^ a2) & movsl_mask 24 return 0; 25 #endif 26 return 1; 27 } 28 #define movsl_is_ok(a1, a2, n) \ 29 __movsl_is_ok((unsigned long)(a1), (un 30 31 /* 32 * Zero Userspace 33 */ 34 35 #define __do_clear_user(addr,size) 36 do { 37 int __d0; 38 might_fault(); 39 __asm__ __volatile__( 40 ASM_STAC "\n" 41 "0: rep; stosl\n" 42 " movl %2,%0\n" 43 "1: rep; stosb\n" 44 "2: " ASM_CLAC "\n" 45 _ASM_EXTABLE_TYPE_REG(0b, 2b, 46 _ASM_EXTABLE_UA(1b, 2b) 47 : "=&c"(size), "=&D" (__d0) 48 : "r"(size & 3), ""(size / 4), 49 } while (0) 50 51 /** 52 * clear_user - Zero a block of memory in user 53 * @to: Destination address, in user space. 54 * @n: Number of bytes to zero. 55 * 56 * Zero a block of memory in user space. 57 * 58 * Return: number of bytes that could not be c 59 * On success, this will be zero. 60 */ 61 unsigned long 62 clear_user(void __user *to, unsigned long n) 63 { 64 might_fault(); 65 if (access_ok(to, n)) 66 __do_clear_user(to, n); 67 return n; 68 } 69 EXPORT_SYMBOL(clear_user); 70 71 /** 72 * __clear_user - Zero a block of memory in us 73 * @to: Destination address, in user space. 74 * @n: Number of bytes to zero. 75 * 76 * Zero a block of memory in user space. Call 77 * the specified block with access_ok() before 78 * 79 * Return: number of bytes that could not be c 80 * On success, this will be zero. 81 */ 82 unsigned long 83 __clear_user(void __user *to, unsigned long n) 84 { 85 __do_clear_user(to, n); 86 return n; 87 } 88 EXPORT_SYMBOL(__clear_user); 89 90 #ifdef CONFIG_X86_INTEL_USERCOPY 91 static unsigned long 92 __copy_user_intel(void __user *to, const void 93 { 94 int d0, d1; 95 __asm__ __volatile__( 96 " .align 2,0x90\n 97 "1: movl 32(%4), %% 98 " cmpl $67, %0\n" 99 " jbe 3f\n" 100 "2: movl 64(%4), %% 101 " .align 2,0x90\n 102 "3: movl 0(%4), %%e 103 "4: movl 4(%4), %%e 104 "5: movl %%eax, 0(% 105 "6: movl %%edx, 4(% 106 "7: movl 8(%4), %%e 107 "8: movl 12(%4),%%e 108 "9: movl %%eax, 8(% 109 "10: movl %%edx, 12( 110 "11: movl 16(%4), %% 111 "12: movl 20(%4), %% 112 "13: movl %%eax, 16( 113 "14: movl %%edx, 20( 114 "15: movl 24(%4), %% 115 "16: movl 28(%4), %% 116 "17: movl %%eax, 24( 117 "18: movl %%edx, 28( 118 "19: movl 32(%4), %% 119 "20: movl 36(%4), %% 120 "21: movl %%eax, 32( 121 "22: movl %%edx, 36( 122 "23: movl 40(%4), %% 123 "24: movl 44(%4), %% 124 "25: movl %%eax, 40( 125 "26: movl %%edx, 44( 126 "27: movl 48(%4), %% 127 "28: movl 52(%4), %% 128 "29: movl %%eax, 48( 129 "30: movl %%edx, 52( 130 "31: movl 56(%4), %% 131 "32: movl 60(%4), %% 132 "33: movl %%eax, 56( 133 "34: movl %%edx, 60( 134 " addl $-64, %0\n 135 " addl $64, %4\n" 136 " addl $64, %3\n" 137 " cmpl $63, %0\n" 138 " ja 1b\n" 139 "35: movl %0, %%eax 140 " shrl $2, %0\n" 141 " andl $3, %%eax 142 " cld\n" 143 "99: rep; movsl\n" 144 "36: movl %%eax, %0\ 145 "37: rep; movsb\n" 146 "100:\n" 147 _ASM_EXTABLE_UA(1b, 100 148 _ASM_EXTABLE_UA(2b, 100 149 _ASM_EXTABLE_UA(3b, 100 150 _ASM_EXTABLE_UA(4b, 100 151 _ASM_EXTABLE_UA(5b, 100 152 _ASM_EXTABLE_UA(6b, 100 153 _ASM_EXTABLE_UA(7b, 100 154 _ASM_EXTABLE_UA(8b, 100 155 _ASM_EXTABLE_UA(9b, 100 156 _ASM_EXTABLE_UA(10b, 10 157 _ASM_EXTABLE_UA(11b, 10 158 _ASM_EXTABLE_UA(12b, 10 159 _ASM_EXTABLE_UA(13b, 10 160 _ASM_EXTABLE_UA(14b, 10 161 _ASM_EXTABLE_UA(15b, 10 162 _ASM_EXTABLE_UA(16b, 10 163 _ASM_EXTABLE_UA(17b, 10 164 _ASM_EXTABLE_UA(18b, 10 165 _ASM_EXTABLE_UA(19b, 10 166 _ASM_EXTABLE_UA(20b, 10 167 _ASM_EXTABLE_UA(21b, 10 168 _ASM_EXTABLE_UA(22b, 10 169 _ASM_EXTABLE_UA(23b, 10 170 _ASM_EXTABLE_UA(24b, 10 171 _ASM_EXTABLE_UA(25b, 10 172 _ASM_EXTABLE_UA(26b, 10 173 _ASM_EXTABLE_UA(27b, 10 174 _ASM_EXTABLE_UA(28b, 10 175 _ASM_EXTABLE_UA(29b, 10 176 _ASM_EXTABLE_UA(30b, 10 177 _ASM_EXTABLE_UA(31b, 10 178 _ASM_EXTABLE_UA(32b, 10 179 _ASM_EXTABLE_UA(33b, 10 180 _ASM_EXTABLE_UA(34b, 10 181 _ASM_EXTABLE_UA(35b, 10 182 _ASM_EXTABLE_UA(36b, 10 183 _ASM_EXTABLE_UA(37b, 10 184 _ASM_EXTABLE_TYPE_REG(9 185 : "=&c"(size), "=&D" (d 186 : "1"(to), "2"(from), 187 : "eax", "edx", "memory 188 return size; 189 } 190 191 static unsigned long __copy_user_intel_nocache 192 const void __u 193 { 194 int d0, d1; 195 196 __asm__ __volatile__( 197 " .align 2,0x90\n" 198 "0: movl 32(%4), %%eax\n" 199 " cmpl $67, %0\n" 200 " jbe 2f\n" 201 "1: movl 64(%4), %%eax\n" 202 " .align 2,0x90\n" 203 "2: movl 0(%4), %%eax\n" 204 "21: movl 4(%4), %%edx\n" 205 " movnti %%eax, 0(%3)\n" 206 " movnti %%edx, 4(%3)\n" 207 "3: movl 8(%4), %%eax\n" 208 "31: movl 12(%4),%%edx\n" 209 " movnti %%eax, 8(%3)\n" 210 " movnti %%edx, 12(%3)\n 211 "4: movl 16(%4), %%eax\n" 212 "41: movl 20(%4), %%edx\n" 213 " movnti %%eax, 16(%3)\n 214 " movnti %%edx, 20(%3)\n 215 "10: movl 24(%4), %%eax\n" 216 "51: movl 28(%4), %%edx\n" 217 " movnti %%eax, 24(%3)\n 218 " movnti %%edx, 28(%3)\n 219 "11: movl 32(%4), %%eax\n" 220 "61: movl 36(%4), %%edx\n" 221 " movnti %%eax, 32(%3)\n 222 " movnti %%edx, 36(%3)\n 223 "12: movl 40(%4), %%eax\n" 224 "71: movl 44(%4), %%edx\n" 225 " movnti %%eax, 40(%3)\n 226 " movnti %%edx, 44(%3)\n 227 "13: movl 48(%4), %%eax\n" 228 "81: movl 52(%4), %%edx\n" 229 " movnti %%eax, 48(%3)\n 230 " movnti %%edx, 52(%3)\n 231 "14: movl 56(%4), %%eax\n" 232 "91: movl 60(%4), %%edx\n" 233 " movnti %%eax, 56(%3)\n 234 " movnti %%edx, 60(%3)\n 235 " addl $-64, %0\n" 236 " addl $64, %4\n" 237 " addl $64, %3\n" 238 " cmpl $63, %0\n" 239 " ja 0b\n" 240 " sfence \n" 241 "5: movl %0, %%eax\n" 242 " shrl $2, %0\n" 243 " andl $3, %%eax\n" 244 " cld\n" 245 "6: rep; movsl\n" 246 " movl %%eax,%0\n" 247 "7: rep; movsb\n" 248 "8:\n" 249 _ASM_EXTABLE_UA(0b, 8b) 250 _ASM_EXTABLE_UA(1b, 8b) 251 _ASM_EXTABLE_UA(2b, 8b) 252 _ASM_EXTABLE_UA(21b, 8b) 253 _ASM_EXTABLE_UA(3b, 8b) 254 _ASM_EXTABLE_UA(31b, 8b) 255 _ASM_EXTABLE_UA(4b, 8b) 256 _ASM_EXTABLE_UA(41b, 8b) 257 _ASM_EXTABLE_UA(10b, 8b) 258 _ASM_EXTABLE_UA(51b, 8b) 259 _ASM_EXTABLE_UA(11b, 8b) 260 _ASM_EXTABLE_UA(61b, 8b) 261 _ASM_EXTABLE_UA(12b, 8b) 262 _ASM_EXTABLE_UA(71b, 8b) 263 _ASM_EXTABLE_UA(13b, 8b) 264 _ASM_EXTABLE_UA(81b, 8b) 265 _ASM_EXTABLE_UA(14b, 8b) 266 _ASM_EXTABLE_UA(91b, 8b) 267 _ASM_EXTABLE_TYPE_REG(6b, 8b, E 268 _ASM_EXTABLE_UA(7b, 8b) 269 : "=&c"(size), "=&D" (d0), "=&S 270 : "1"(to), "2"(from), ""(size) 271 : "eax", "edx", "memory"); 272 return size; 273 } 274 275 #else 276 277 /* 278 * Leave these declared but undefined. They s 279 * them 280 */ 281 unsigned long __copy_user_intel(void __user *t 282 unsign 283 #endif /* CONFIG_X86_INTEL_USERCOPY */ 284 285 /* Generic arbitrary sized copy. */ 286 #define __copy_user(to, from, size) 287 do { 288 int __d0, __d1, __d2; 289 __asm__ __volatile__( 290 " cmp $7,%0\n" 291 " jbe 1f\n" 292 " movl %1,%0\n" 293 " negl %0\n" 294 " andl $7,%0\n" 295 " subl %0,%3\n" 296 "4: rep; movsb\n" 297 " movl %3,%0\n" 298 " shrl $2,%0\n" 299 " andl $3,%3\n" 300 " .align 2,0x90\n" 301 "0: rep; movsl\n" 302 " movl %3,%0\n" 303 "1: rep; movsb\n" 304 "2:\n" 305 _ASM_EXTABLE_TYPE_REG(4b, 2b, 306 _ASM_EXTABLE_TYPE_REG(0b, 2b, 307 _ASM_EXTABLE_UA(1b, 2b) 308 : "=&c"(size), "=&D" (__d0), " 309 : "3"(size), ""(size), "1"(to) 310 : "memory"); 311 } while (0) 312 313 unsigned long __copy_user_ll(void *to, const v 314 { 315 __uaccess_begin_nospec(); 316 if (movsl_is_ok(to, from, n)) 317 __copy_user(to, from, n); 318 else 319 n = __copy_user_intel(to, from 320 __uaccess_end(); 321 return n; 322 } 323 EXPORT_SYMBOL(__copy_user_ll); 324 325 unsigned long __copy_from_user_ll_nocache_noze 326 unsign 327 { 328 __uaccess_begin_nospec(); 329 #ifdef CONFIG_X86_INTEL_USERCOPY 330 if (n > 64 && static_cpu_has(X86_FEATU 331 n = __copy_user_intel_nocache( 332 else 333 __copy_user(to, from, n); 334 #else 335 __copy_user(to, from, n); 336 #endif 337 __uaccess_end(); 338 return n; 339 } 340 EXPORT_SYMBOL(__copy_from_user_ll_nocache_noze 341
Linux® is a registered trademark of Linus Torvalds in the United States and other countries.
TOMOYO® is a registered trademark of NTT DATA CORPORATION.