futex.h 4.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202
  1. /*
  2. * This file is subject to the terms and conditions of the GNU General Public
  3. * License. See the file "COPYING" in the main directory of this archive
  4. * for more details.
  5. *
  6. * Copyright (c) 2006 Ralf Baechle (ralf@linux-mips.org)
  7. */
  8. #ifndef _ASM_FUTEX_H
  9. #define _ASM_FUTEX_H
  10. #ifdef __KERNEL__
  11. #include <linux/futex.h>
  12. #include <linux/uaccess.h>
  13. #include <asm/asm-eva.h>
  14. #include <asm/barrier.h>
  15. #include <asm/compiler.h>
  16. #include <asm/errno.h>
  17. #include <asm/war.h>
  18. #define __futex_atomic_op(insn, ret, oldval, uaddr, oparg) \
  19. { \
  20. if (cpu_has_llsc && R10000_LLSC_WAR) { \
  21. __asm__ __volatile__( \
  22. " .set push \n" \
  23. " .set noat \n" \
  24. " .set arch=r4000 \n" \
  25. "1: ll %1, %4 # __futex_atomic_op \n" \
  26. " .set mips0 \n" \
  27. " " insn " \n" \
  28. " .set arch=r4000 \n" \
  29. "2: sc $1, %2 \n" \
  30. " beqzl $1, 1b \n" \
  31. __WEAK_LLSC_MB \
  32. "3: \n" \
  33. " .insn \n" \
  34. " .set pop \n" \
  35. " .set mips0 \n" \
  36. " .section .fixup,\"ax\" \n" \
  37. "4: li %0, %6 \n" \
  38. " j 3b \n" \
  39. " .previous \n" \
  40. " .section __ex_table,\"a\" \n" \
  41. " "__UA_ADDR "\t1b, 4b \n" \
  42. " "__UA_ADDR "\t2b, 4b \n" \
  43. " .previous \n" \
  44. : "=r" (ret), "=&r" (oldval), \
  45. "=" GCC_OFF_SMALL_ASM() (*uaddr) \
  46. : "0" (0), GCC_OFF_SMALL_ASM() (*uaddr), "Jr" (oparg), \
  47. "i" (-EFAULT) \
  48. : "memory"); \
  49. } else if (cpu_has_llsc) { \
  50. __asm__ __volatile__( \
  51. " .set push \n" \
  52. " .set noat \n" \
  53. " .set "MIPS_ISA_ARCH_LEVEL" \n" \
  54. "1: "user_ll("%1", "%4")" # __futex_atomic_op\n" \
  55. " .set mips0 \n" \
  56. " " insn " \n" \
  57. " .set "MIPS_ISA_ARCH_LEVEL" \n" \
  58. "2: "user_sc("$1", "%2")" \n" \
  59. " beqz $1, 1b \n" \
  60. __WEAK_LLSC_MB \
  61. "3: \n" \
  62. " .insn \n" \
  63. " .set pop \n" \
  64. " .set mips0 \n" \
  65. " .section .fixup,\"ax\" \n" \
  66. "4: li %0, %6 \n" \
  67. " j 3b \n" \
  68. " .previous \n" \
  69. " .section __ex_table,\"a\" \n" \
  70. " "__UA_ADDR "\t1b, 4b \n" \
  71. " "__UA_ADDR "\t2b, 4b \n" \
  72. " .previous \n" \
  73. : "=r" (ret), "=&r" (oldval), \
  74. "=" GCC_OFF_SMALL_ASM() (*uaddr) \
  75. : "0" (0), GCC_OFF_SMALL_ASM() (*uaddr), "Jr" (oparg), \
  76. "i" (-EFAULT) \
  77. : "memory"); \
  78. } else \
  79. ret = -ENOSYS; \
  80. }
  81. static inline int
  82. arch_futex_atomic_op_inuser(int op, int oparg, int *oval, u32 __user *uaddr)
  83. {
  84. int oldval = 0, ret;
  85. pagefault_disable();
  86. switch (op) {
  87. case FUTEX_OP_SET:
  88. __futex_atomic_op("move $1, %z5", ret, oldval, uaddr, oparg);
  89. break;
  90. case FUTEX_OP_ADD:
  91. __futex_atomic_op("addu $1, %1, %z5",
  92. ret, oldval, uaddr, oparg);
  93. break;
  94. case FUTEX_OP_OR:
  95. __futex_atomic_op("or $1, %1, %z5",
  96. ret, oldval, uaddr, oparg);
  97. break;
  98. case FUTEX_OP_ANDN:
  99. __futex_atomic_op("and $1, %1, %z5",
  100. ret, oldval, uaddr, ~oparg);
  101. break;
  102. case FUTEX_OP_XOR:
  103. __futex_atomic_op("xor $1, %1, %z5",
  104. ret, oldval, uaddr, oparg);
  105. break;
  106. default:
  107. ret = -ENOSYS;
  108. }
  109. pagefault_enable();
  110. if (!ret)
  111. *oval = oldval;
  112. return ret;
  113. }
  114. static inline int
  115. futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
  116. u32 oldval, u32 newval)
  117. {
  118. int ret = 0;
  119. u32 val;
  120. if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
  121. return -EFAULT;
  122. if (cpu_has_llsc && R10000_LLSC_WAR) {
  123. __asm__ __volatile__(
  124. "# futex_atomic_cmpxchg_inatomic \n"
  125. " .set push \n"
  126. " .set noat \n"
  127. " .set arch=r4000 \n"
  128. "1: ll %1, %3 \n"
  129. " bne %1, %z4, 3f \n"
  130. " .set mips0 \n"
  131. " move $1, %z5 \n"
  132. " .set arch=r4000 \n"
  133. "2: sc $1, %2 \n"
  134. " beqzl $1, 1b \n"
  135. __WEAK_LLSC_MB
  136. "3: \n"
  137. " .insn \n"
  138. " .set pop \n"
  139. " .section .fixup,\"ax\" \n"
  140. "4: li %0, %6 \n"
  141. " j 3b \n"
  142. " .previous \n"
  143. " .section __ex_table,\"a\" \n"
  144. " "__UA_ADDR "\t1b, 4b \n"
  145. " "__UA_ADDR "\t2b, 4b \n"
  146. " .previous \n"
  147. : "+r" (ret), "=&r" (val), "=" GCC_OFF_SMALL_ASM() (*uaddr)
  148. : GCC_OFF_SMALL_ASM() (*uaddr), "Jr" (oldval), "Jr" (newval),
  149. "i" (-EFAULT)
  150. : "memory");
  151. } else if (cpu_has_llsc) {
  152. __asm__ __volatile__(
  153. "# futex_atomic_cmpxchg_inatomic \n"
  154. " .set push \n"
  155. " .set noat \n"
  156. " .set "MIPS_ISA_ARCH_LEVEL" \n"
  157. "1: "user_ll("%1", "%3")" \n"
  158. " bne %1, %z4, 3f \n"
  159. " .set mips0 \n"
  160. " move $1, %z5 \n"
  161. " .set "MIPS_ISA_ARCH_LEVEL" \n"
  162. "2: "user_sc("$1", "%2")" \n"
  163. " beqz $1, 1b \n"
  164. __WEAK_LLSC_MB
  165. "3: \n"
  166. " .insn \n"
  167. " .set pop \n"
  168. " .section .fixup,\"ax\" \n"
  169. "4: li %0, %6 \n"
  170. " j 3b \n"
  171. " .previous \n"
  172. " .section __ex_table,\"a\" \n"
  173. " "__UA_ADDR "\t1b, 4b \n"
  174. " "__UA_ADDR "\t2b, 4b \n"
  175. " .previous \n"
  176. : "+r" (ret), "=&r" (val), "=" GCC_OFF_SMALL_ASM() (*uaddr)
  177. : GCC_OFF_SMALL_ASM() (*uaddr), "Jr" (oldval), "Jr" (newval),
  178. "i" (-EFAULT)
  179. : "memory");
  180. } else
  181. return -ENOSYS;
  182. *uval = val;
  183. return ret;
  184. }
  185. #endif
  186. #endif /* _ASM_FUTEX_H */