getuser.S 3.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143
  1. /*
  2. * __get_user functions.
  3. *
  4. * (C) Copyright 1998 Linus Torvalds
  5. * (C) Copyright 2005 Andi Kleen
  6. * (C) Copyright 2008 Glauber Costa
  7. *
  8. * These functions have a non-standard call interface
  9. * to make them more efficient, especially as they
  10. * return an error value in addition to the "real"
  11. * return value.
  12. */
  13. /*
  14. * __get_user_X
  15. *
  16. * Inputs: %[r|e]ax contains the address.
  17. *
  18. * Outputs: %[r|e]ax is error code (0 or -EFAULT)
  19. * %[r|e]dx contains zero-extended value
  20. * %ecx contains the high half for 32-bit __get_user_8
  21. *
  22. *
  23. * These functions should not modify any other registers,
  24. * as they get called from within inline assembly.
  25. */
  26. #include <linux/linkage.h>
  27. #include <asm/page_types.h>
  28. #include <asm/errno.h>
  29. #include <asm/asm-offsets.h>
  30. #include <asm/thread_info.h>
  31. #include <asm/asm.h>
  32. #include <asm/smap.h>
  33. #include <asm/export.h>
  34. .text
  35. ENTRY(__get_user_1)
  36. mov PER_CPU_VAR(current_task), %_ASM_DX
  37. cmp TASK_addr_limit(%_ASM_DX),%_ASM_AX
  38. jae bad_get_user
  39. sbb %_ASM_DX, %_ASM_DX /* array_index_mask_nospec() */
  40. and %_ASM_DX, %_ASM_AX
  41. ASM_STAC
  42. 1: movzbl (%_ASM_AX),%edx
  43. xor %eax,%eax
  44. ASM_CLAC
  45. ret
  46. ENDPROC(__get_user_1)
  47. EXPORT_SYMBOL(__get_user_1)
  48. ENTRY(__get_user_2)
  49. add $1,%_ASM_AX
  50. jc bad_get_user
  51. mov PER_CPU_VAR(current_task), %_ASM_DX
  52. cmp TASK_addr_limit(%_ASM_DX),%_ASM_AX
  53. jae bad_get_user
  54. sbb %_ASM_DX, %_ASM_DX /* array_index_mask_nospec() */
  55. and %_ASM_DX, %_ASM_AX
  56. ASM_STAC
  57. 2: movzwl -1(%_ASM_AX),%edx
  58. xor %eax,%eax
  59. ASM_CLAC
  60. ret
  61. ENDPROC(__get_user_2)
  62. EXPORT_SYMBOL(__get_user_2)
  63. ENTRY(__get_user_4)
  64. add $3,%_ASM_AX
  65. jc bad_get_user
  66. mov PER_CPU_VAR(current_task), %_ASM_DX
  67. cmp TASK_addr_limit(%_ASM_DX),%_ASM_AX
  68. jae bad_get_user
  69. sbb %_ASM_DX, %_ASM_DX /* array_index_mask_nospec() */
  70. and %_ASM_DX, %_ASM_AX
  71. ASM_STAC
  72. 3: movl -3(%_ASM_AX),%edx
  73. xor %eax,%eax
  74. ASM_CLAC
  75. ret
  76. ENDPROC(__get_user_4)
  77. EXPORT_SYMBOL(__get_user_4)
  78. ENTRY(__get_user_8)
  79. #ifdef CONFIG_X86_64
  80. add $7,%_ASM_AX
  81. jc bad_get_user
  82. mov PER_CPU_VAR(current_task), %_ASM_DX
  83. cmp TASK_addr_limit(%_ASM_DX),%_ASM_AX
  84. jae bad_get_user
  85. sbb %_ASM_DX, %_ASM_DX /* array_index_mask_nospec() */
  86. and %_ASM_DX, %_ASM_AX
  87. ASM_STAC
  88. 4: movq -7(%_ASM_AX),%rdx
  89. xor %eax,%eax
  90. ASM_CLAC
  91. ret
  92. #else
  93. add $7,%_ASM_AX
  94. jc bad_get_user_8
  95. mov PER_CPU_VAR(current_task), %_ASM_DX
  96. cmp TASK_addr_limit(%_ASM_DX),%_ASM_AX
  97. jae bad_get_user_8
  98. sbb %_ASM_DX, %_ASM_DX /* array_index_mask_nospec() */
  99. and %_ASM_DX, %_ASM_AX
  100. ASM_STAC
  101. 4: movl -7(%_ASM_AX),%edx
  102. 5: movl -3(%_ASM_AX),%ecx
  103. xor %eax,%eax
  104. ASM_CLAC
  105. ret
  106. #endif
  107. ENDPROC(__get_user_8)
  108. EXPORT_SYMBOL(__get_user_8)
  109. bad_get_user:
  110. xor %edx,%edx
  111. mov $(-EFAULT),%_ASM_AX
  112. ASM_CLAC
  113. ret
  114. END(bad_get_user)
  115. #ifdef CONFIG_X86_32
  116. bad_get_user_8:
  117. xor %edx,%edx
  118. xor %ecx,%ecx
  119. mov $(-EFAULT),%_ASM_AX
  120. ASM_CLAC
  121. ret
  122. END(bad_get_user_8)
  123. #endif
  124. _ASM_EXTABLE(1b,bad_get_user)
  125. _ASM_EXTABLE(2b,bad_get_user)
  126. _ASM_EXTABLE(3b,bad_get_user)
  127. #ifdef CONFIG_X86_64
  128. _ASM_EXTABLE(4b,bad_get_user)
  129. #else
  130. _ASM_EXTABLE(4b,bad_get_user_8)
  131. _ASM_EXTABLE(5b,bad_get_user_8)
  132. #endif