usercopy_32.c 10 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358
  1. /*
  2. * User address space access functions.
  3. * The non inlined parts of asm-i386/uaccess.h are here.
  4. *
  5. * Copyright 1997 Andi Kleen <ak@muc.de>
  6. * Copyright 1997 Linus Torvalds
  7. */
  8. #include <linux/export.h>
  9. #include <linux/uaccess.h>
  10. #include <asm/mmx.h>
  11. #include <asm/asm.h>
  12. #ifdef CONFIG_X86_INTEL_USERCOPY
  13. /*
  14. * Alignment at which movsl is preferred for bulk memory copies.
  15. */
  16. struct movsl_mask movsl_mask __read_mostly;
  17. #endif
  18. static inline int __movsl_is_ok(unsigned long a1, unsigned long a2, unsigned long n)
  19. {
  20. #ifdef CONFIG_X86_INTEL_USERCOPY
  21. if (n >= 64 && ((a1 ^ a2) & movsl_mask.mask))
  22. return 0;
  23. #endif
  24. return 1;
  25. }
  26. #define movsl_is_ok(a1, a2, n) \
  27. __movsl_is_ok((unsigned long)(a1), (unsigned long)(a2), (n))
  28. /*
  29. * Zero Userspace
  30. */
  31. #define __do_clear_user(addr,size) \
  32. do { \
  33. int __d0; \
  34. might_fault(); \
  35. __asm__ __volatile__( \
  36. ASM_STAC "\n" \
  37. "0: rep; stosl\n" \
  38. " movl %2,%0\n" \
  39. "1: rep; stosb\n" \
  40. "2: " ASM_CLAC "\n" \
  41. ".section .fixup,\"ax\"\n" \
  42. "3: lea 0(%2,%0,4),%0\n" \
  43. " jmp 2b\n" \
  44. ".previous\n" \
  45. _ASM_EXTABLE(0b,3b) \
  46. _ASM_EXTABLE(1b,2b) \
  47. : "=&c"(size), "=&D" (__d0) \
  48. : "r"(size & 3), "0"(size / 4), "1"(addr), "a"(0)); \
  49. } while (0)
  50. /**
  51. * clear_user: - Zero a block of memory in user space.
  52. * @to: Destination address, in user space.
  53. * @n: Number of bytes to zero.
  54. *
  55. * Zero a block of memory in user space.
  56. *
  57. * Returns number of bytes that could not be cleared.
  58. * On success, this will be zero.
  59. */
  60. unsigned long
  61. clear_user(void __user *to, unsigned long n)
  62. {
  63. might_fault();
  64. if (access_ok(VERIFY_WRITE, to, n))
  65. __do_clear_user(to, n);
  66. return n;
  67. }
  68. EXPORT_SYMBOL(clear_user);
  69. /**
  70. * __clear_user: - Zero a block of memory in user space, with less checking.
  71. * @to: Destination address, in user space.
  72. * @n: Number of bytes to zero.
  73. *
  74. * Zero a block of memory in user space. Caller must check
  75. * the specified block with access_ok() before calling this function.
  76. *
  77. * Returns number of bytes that could not be cleared.
  78. * On success, this will be zero.
  79. */
  80. unsigned long
  81. __clear_user(void __user *to, unsigned long n)
  82. {
  83. __do_clear_user(to, n);
  84. return n;
  85. }
  86. EXPORT_SYMBOL(__clear_user);
  87. #ifdef CONFIG_X86_INTEL_USERCOPY
  88. static unsigned long
  89. __copy_user_intel(void __user *to, const void *from, unsigned long size)
  90. {
  91. int d0, d1;
  92. __asm__ __volatile__(
  93. " .align 2,0x90\n"
  94. "1: movl 32(%4), %%eax\n"
  95. " cmpl $67, %0\n"
  96. " jbe 3f\n"
  97. "2: movl 64(%4), %%eax\n"
  98. " .align 2,0x90\n"
  99. "3: movl 0(%4), %%eax\n"
  100. "4: movl 4(%4), %%edx\n"
  101. "5: movl %%eax, 0(%3)\n"
  102. "6: movl %%edx, 4(%3)\n"
  103. "7: movl 8(%4), %%eax\n"
  104. "8: movl 12(%4),%%edx\n"
  105. "9: movl %%eax, 8(%3)\n"
  106. "10: movl %%edx, 12(%3)\n"
  107. "11: movl 16(%4), %%eax\n"
  108. "12: movl 20(%4), %%edx\n"
  109. "13: movl %%eax, 16(%3)\n"
  110. "14: movl %%edx, 20(%3)\n"
  111. "15: movl 24(%4), %%eax\n"
  112. "16: movl 28(%4), %%edx\n"
  113. "17: movl %%eax, 24(%3)\n"
  114. "18: movl %%edx, 28(%3)\n"
  115. "19: movl 32(%4), %%eax\n"
  116. "20: movl 36(%4), %%edx\n"
  117. "21: movl %%eax, 32(%3)\n"
  118. "22: movl %%edx, 36(%3)\n"
  119. "23: movl 40(%4), %%eax\n"
  120. "24: movl 44(%4), %%edx\n"
  121. "25: movl %%eax, 40(%3)\n"
  122. "26: movl %%edx, 44(%3)\n"
  123. "27: movl 48(%4), %%eax\n"
  124. "28: movl 52(%4), %%edx\n"
  125. "29: movl %%eax, 48(%3)\n"
  126. "30: movl %%edx, 52(%3)\n"
  127. "31: movl 56(%4), %%eax\n"
  128. "32: movl 60(%4), %%edx\n"
  129. "33: movl %%eax, 56(%3)\n"
  130. "34: movl %%edx, 60(%3)\n"
  131. " addl $-64, %0\n"
  132. " addl $64, %4\n"
  133. " addl $64, %3\n"
  134. " cmpl $63, %0\n"
  135. " ja 1b\n"
  136. "35: movl %0, %%eax\n"
  137. " shrl $2, %0\n"
  138. " andl $3, %%eax\n"
  139. " cld\n"
  140. "99: rep; movsl\n"
  141. "36: movl %%eax, %0\n"
  142. "37: rep; movsb\n"
  143. "100:\n"
  144. ".section .fixup,\"ax\"\n"
  145. "101: lea 0(%%eax,%0,4),%0\n"
  146. " jmp 100b\n"
  147. ".previous\n"
  148. _ASM_EXTABLE(1b,100b)
  149. _ASM_EXTABLE(2b,100b)
  150. _ASM_EXTABLE(3b,100b)
  151. _ASM_EXTABLE(4b,100b)
  152. _ASM_EXTABLE(5b,100b)
  153. _ASM_EXTABLE(6b,100b)
  154. _ASM_EXTABLE(7b,100b)
  155. _ASM_EXTABLE(8b,100b)
  156. _ASM_EXTABLE(9b,100b)
  157. _ASM_EXTABLE(10b,100b)
  158. _ASM_EXTABLE(11b,100b)
  159. _ASM_EXTABLE(12b,100b)
  160. _ASM_EXTABLE(13b,100b)
  161. _ASM_EXTABLE(14b,100b)
  162. _ASM_EXTABLE(15b,100b)
  163. _ASM_EXTABLE(16b,100b)
  164. _ASM_EXTABLE(17b,100b)
  165. _ASM_EXTABLE(18b,100b)
  166. _ASM_EXTABLE(19b,100b)
  167. _ASM_EXTABLE(20b,100b)
  168. _ASM_EXTABLE(21b,100b)
  169. _ASM_EXTABLE(22b,100b)
  170. _ASM_EXTABLE(23b,100b)
  171. _ASM_EXTABLE(24b,100b)
  172. _ASM_EXTABLE(25b,100b)
  173. _ASM_EXTABLE(26b,100b)
  174. _ASM_EXTABLE(27b,100b)
  175. _ASM_EXTABLE(28b,100b)
  176. _ASM_EXTABLE(29b,100b)
  177. _ASM_EXTABLE(30b,100b)
  178. _ASM_EXTABLE(31b,100b)
  179. _ASM_EXTABLE(32b,100b)
  180. _ASM_EXTABLE(33b,100b)
  181. _ASM_EXTABLE(34b,100b)
  182. _ASM_EXTABLE(35b,100b)
  183. _ASM_EXTABLE(36b,100b)
  184. _ASM_EXTABLE(37b,100b)
  185. _ASM_EXTABLE(99b,101b)
  186. : "=&c"(size), "=&D" (d0), "=&S" (d1)
  187. : "1"(to), "2"(from), "0"(size)
  188. : "eax", "edx", "memory");
  189. return size;
  190. }
  191. static unsigned long __copy_user_intel_nocache(void *to,
  192. const void __user *from, unsigned long size)
  193. {
  194. int d0, d1;
  195. __asm__ __volatile__(
  196. " .align 2,0x90\n"
  197. "0: movl 32(%4), %%eax\n"
  198. " cmpl $67, %0\n"
  199. " jbe 2f\n"
  200. "1: movl 64(%4), %%eax\n"
  201. " .align 2,0x90\n"
  202. "2: movl 0(%4), %%eax\n"
  203. "21: movl 4(%4), %%edx\n"
  204. " movnti %%eax, 0(%3)\n"
  205. " movnti %%edx, 4(%3)\n"
  206. "3: movl 8(%4), %%eax\n"
  207. "31: movl 12(%4),%%edx\n"
  208. " movnti %%eax, 8(%3)\n"
  209. " movnti %%edx, 12(%3)\n"
  210. "4: movl 16(%4), %%eax\n"
  211. "41: movl 20(%4), %%edx\n"
  212. " movnti %%eax, 16(%3)\n"
  213. " movnti %%edx, 20(%3)\n"
  214. "10: movl 24(%4), %%eax\n"
  215. "51: movl 28(%4), %%edx\n"
  216. " movnti %%eax, 24(%3)\n"
  217. " movnti %%edx, 28(%3)\n"
  218. "11: movl 32(%4), %%eax\n"
  219. "61: movl 36(%4), %%edx\n"
  220. " movnti %%eax, 32(%3)\n"
  221. " movnti %%edx, 36(%3)\n"
  222. "12: movl 40(%4), %%eax\n"
  223. "71: movl 44(%4), %%edx\n"
  224. " movnti %%eax, 40(%3)\n"
  225. " movnti %%edx, 44(%3)\n"
  226. "13: movl 48(%4), %%eax\n"
  227. "81: movl 52(%4), %%edx\n"
  228. " movnti %%eax, 48(%3)\n"
  229. " movnti %%edx, 52(%3)\n"
  230. "14: movl 56(%4), %%eax\n"
  231. "91: movl 60(%4), %%edx\n"
  232. " movnti %%eax, 56(%3)\n"
  233. " movnti %%edx, 60(%3)\n"
  234. " addl $-64, %0\n"
  235. " addl $64, %4\n"
  236. " addl $64, %3\n"
  237. " cmpl $63, %0\n"
  238. " ja 0b\n"
  239. " sfence \n"
  240. "5: movl %0, %%eax\n"
  241. " shrl $2, %0\n"
  242. " andl $3, %%eax\n"
  243. " cld\n"
  244. "6: rep; movsl\n"
  245. " movl %%eax,%0\n"
  246. "7: rep; movsb\n"
  247. "8:\n"
  248. ".section .fixup,\"ax\"\n"
  249. "9: lea 0(%%eax,%0,4),%0\n"
  250. "16: jmp 8b\n"
  251. ".previous\n"
  252. _ASM_EXTABLE(0b,16b)
  253. _ASM_EXTABLE(1b,16b)
  254. _ASM_EXTABLE(2b,16b)
  255. _ASM_EXTABLE(21b,16b)
  256. _ASM_EXTABLE(3b,16b)
  257. _ASM_EXTABLE(31b,16b)
  258. _ASM_EXTABLE(4b,16b)
  259. _ASM_EXTABLE(41b,16b)
  260. _ASM_EXTABLE(10b,16b)
  261. _ASM_EXTABLE(51b,16b)
  262. _ASM_EXTABLE(11b,16b)
  263. _ASM_EXTABLE(61b,16b)
  264. _ASM_EXTABLE(12b,16b)
  265. _ASM_EXTABLE(71b,16b)
  266. _ASM_EXTABLE(13b,16b)
  267. _ASM_EXTABLE(81b,16b)
  268. _ASM_EXTABLE(14b,16b)
  269. _ASM_EXTABLE(91b,16b)
  270. _ASM_EXTABLE(6b,9b)
  271. _ASM_EXTABLE(7b,16b)
  272. : "=&c"(size), "=&D" (d0), "=&S" (d1)
  273. : "1"(to), "2"(from), "0"(size)
  274. : "eax", "edx", "memory");
  275. return size;
  276. }
  277. #else
  278. /*
  279. * Leave these declared but undefined. They should not be any references to
  280. * them
  281. */
  282. unsigned long __copy_user_intel(void __user *to, const void *from,
  283. unsigned long size);
  284. #endif /* CONFIG_X86_INTEL_USERCOPY */
  285. /* Generic arbitrary sized copy. */
  286. #define __copy_user(to, from, size) \
  287. do { \
  288. int __d0, __d1, __d2; \
  289. __asm__ __volatile__( \
  290. " cmp $7,%0\n" \
  291. " jbe 1f\n" \
  292. " movl %1,%0\n" \
  293. " negl %0\n" \
  294. " andl $7,%0\n" \
  295. " subl %0,%3\n" \
  296. "4: rep; movsb\n" \
  297. " movl %3,%0\n" \
  298. " shrl $2,%0\n" \
  299. " andl $3,%3\n" \
  300. " .align 2,0x90\n" \
  301. "0: rep; movsl\n" \
  302. " movl %3,%0\n" \
  303. "1: rep; movsb\n" \
  304. "2:\n" \
  305. ".section .fixup,\"ax\"\n" \
  306. "5: addl %3,%0\n" \
  307. " jmp 2b\n" \
  308. "3: lea 0(%3,%0,4),%0\n" \
  309. " jmp 2b\n" \
  310. ".previous\n" \
  311. _ASM_EXTABLE(4b,5b) \
  312. _ASM_EXTABLE(0b,3b) \
  313. _ASM_EXTABLE(1b,2b) \
  314. : "=&c"(size), "=&D" (__d0), "=&S" (__d1), "=r"(__d2) \
  315. : "3"(size), "0"(size), "1"(to), "2"(from) \
  316. : "memory"); \
  317. } while (0)
  318. unsigned long __copy_user_ll(void *to, const void *from, unsigned long n)
  319. {
  320. stac();
  321. if (movsl_is_ok(to, from, n))
  322. __copy_user(to, from, n);
  323. else
  324. n = __copy_user_intel(to, from, n);
  325. clac();
  326. return n;
  327. }
  328. EXPORT_SYMBOL(__copy_user_ll);
  329. unsigned long __copy_from_user_ll_nocache_nozero(void *to, const void __user *from,
  330. unsigned long n)
  331. {
  332. stac();
  333. #ifdef CONFIG_X86_INTEL_USERCOPY
  334. if (n > 64 && static_cpu_has(X86_FEATURE_XMM2))
  335. n = __copy_user_intel_nocache(to, from, n);
  336. else
  337. __copy_user(to, from, n);
  338. #else
  339. __copy_user(to, from, n);
  340. #endif
  341. clac();
  342. return n;
  343. }
  344. EXPORT_SYMBOL(__copy_from_user_ll_nocache_nozero);