getuser.S 2.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127
  1. /*
  2. * __get_user functions.
  3. *
  4. * (C) Copyright 1998 Linus Torvalds
  5. * (C) Copyright 2005 Andi Kleen
  6. * (C) Copyright 2008 Glauber Costa
  7. *
  8. * These functions have a non-standard call interface
  9. * to make them more efficient, especially as they
  10. * return an error value in addition to the "real"
  11. * return value.
  12. */
  13. /*
  14. * __get_user_X
  15. *
  16. * Inputs: %[r|e]ax contains the address.
  17. *
  18. * Outputs: %[r|e]ax is error code (0 or -EFAULT)
  19. * %[r|e]dx contains zero-extended value
  20. * %ecx contains the high half for 32-bit __get_user_8
  21. *
  22. *
  23. * These functions should not modify any other registers,
  24. * as they get called from within inline assembly.
  25. */
  26. #include <linux/linkage.h>
  27. #include <asm/page_types.h>
  28. #include <asm/errno.h>
  29. #include <asm/asm-offsets.h>
  30. #include <asm/thread_info.h>
  31. #include <asm/asm.h>
  32. #include <asm/smap.h>
  33. .text
  34. ENTRY(__get_user_1)
  35. mov PER_CPU_VAR(current_task), %_ASM_DX
  36. cmp TASK_addr_limit(%_ASM_DX),%_ASM_AX
  37. jae bad_get_user
  38. ASM_STAC
  39. 1: movzbl (%_ASM_AX),%edx
  40. xor %eax,%eax
  41. ASM_CLAC
  42. ret
  43. ENDPROC(__get_user_1)
  44. ENTRY(__get_user_2)
  45. add $1,%_ASM_AX
  46. jc bad_get_user
  47. mov PER_CPU_VAR(current_task), %_ASM_DX
  48. cmp TASK_addr_limit(%_ASM_DX),%_ASM_AX
  49. jae bad_get_user
  50. ASM_STAC
  51. 2: movzwl -1(%_ASM_AX),%edx
  52. xor %eax,%eax
  53. ASM_CLAC
  54. ret
  55. ENDPROC(__get_user_2)
  56. ENTRY(__get_user_4)
  57. add $3,%_ASM_AX
  58. jc bad_get_user
  59. mov PER_CPU_VAR(current_task), %_ASM_DX
  60. cmp TASK_addr_limit(%_ASM_DX),%_ASM_AX
  61. jae bad_get_user
  62. ASM_STAC
  63. 3: movl -3(%_ASM_AX),%edx
  64. xor %eax,%eax
  65. ASM_CLAC
  66. ret
  67. ENDPROC(__get_user_4)
  68. ENTRY(__get_user_8)
  69. #ifdef CONFIG_X86_64
  70. add $7,%_ASM_AX
  71. jc bad_get_user
  72. mov PER_CPU_VAR(current_task), %_ASM_DX
  73. cmp TASK_addr_limit(%_ASM_DX),%_ASM_AX
  74. jae bad_get_user
  75. ASM_STAC
  76. 4: movq -7(%_ASM_AX),%rdx
  77. xor %eax,%eax
  78. ASM_CLAC
  79. ret
  80. #else
  81. add $7,%_ASM_AX
  82. jc bad_get_user_8
  83. mov PER_CPU_VAR(current_task), %_ASM_DX
  84. cmp TASK_addr_limit(%_ASM_DX),%_ASM_AX
  85. jae bad_get_user_8
  86. ASM_STAC
  87. 4: movl -7(%_ASM_AX),%edx
  88. 5: movl -3(%_ASM_AX),%ecx
  89. xor %eax,%eax
  90. ASM_CLAC
  91. ret
  92. #endif
  93. ENDPROC(__get_user_8)
  94. bad_get_user:
  95. xor %edx,%edx
  96. mov $(-EFAULT),%_ASM_AX
  97. ASM_CLAC
  98. ret
  99. END(bad_get_user)
  100. #ifdef CONFIG_X86_32
  101. bad_get_user_8:
  102. xor %edx,%edx
  103. xor %ecx,%ecx
  104. mov $(-EFAULT),%_ASM_AX
  105. ASM_CLAC
  106. ret
  107. END(bad_get_user_8)
  108. #endif
  109. _ASM_EXTABLE(1b,bad_get_user)
  110. _ASM_EXTABLE(2b,bad_get_user)
  111. _ASM_EXTABLE(3b,bad_get_user)
  112. #ifdef CONFIG_X86_64
  113. _ASM_EXTABLE(4b,bad_get_user)
  114. #else
  115. _ASM_EXTABLE(4b,bad_get_user_8)
  116. _ASM_EXTABLE(5b,bad_get_user_8)
  117. #endif