futex.h 2.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138
  1. #ifndef _ASM_HEXAGON_FUTEX_H
  2. #define _ASM_HEXAGON_FUTEX_H
  3. #ifdef __KERNEL__
  4. #include <linux/futex.h>
  5. #include <linux/uaccess.h>
  6. #include <asm/errno.h>
  7. /* XXX TODO-- need to add sync barriers! */
  8. #define __futex_atomic_op(insn, ret, oldval, uaddr, oparg) \
  9. __asm__ __volatile( \
  10. "1: %0 = memw_locked(%3);\n" \
  11. /* For example: %1 = %4 */ \
  12. insn \
  13. "2: memw_locked(%3,p2) = %1;\n" \
  14. " if !p2 jump 1b;\n" \
  15. " %1 = #0;\n" \
  16. "3:\n" \
  17. ".section .fixup,\"ax\"\n" \
  18. "4: %1 = #%5;\n" \
  19. " jump 3b\n" \
  20. ".previous\n" \
  21. ".section __ex_table,\"a\"\n" \
  22. ".long 1b,4b,2b,4b\n" \
  23. ".previous\n" \
  24. : "=&r" (oldval), "=&r" (ret), "+m" (*uaddr) \
  25. : "r" (uaddr), "r" (oparg), "i" (-EFAULT) \
  26. : "p2", "memory")
  27. static inline int
  28. futex_atomic_op_inuser(int encoded_op, int __user *uaddr)
  29. {
  30. int op = (encoded_op >> 28) & 7;
  31. int cmp = (encoded_op >> 24) & 15;
  32. int oparg = (encoded_op << 8) >> 20;
  33. int cmparg = (encoded_op << 20) >> 20;
  34. int oldval = 0, ret;
  35. if (encoded_op & (FUTEX_OP_OPARG_SHIFT << 28))
  36. oparg = 1 << oparg;
  37. if (!access_ok(VERIFY_WRITE, uaddr, sizeof(int)))
  38. return -EFAULT;
  39. pagefault_disable();
  40. switch (op) {
  41. case FUTEX_OP_SET:
  42. __futex_atomic_op("%1 = %4\n", ret, oldval, uaddr, oparg);
  43. break;
  44. case FUTEX_OP_ADD:
  45. __futex_atomic_op("%1 = add(%0,%4)\n", ret, oldval, uaddr,
  46. oparg);
  47. break;
  48. case FUTEX_OP_OR:
  49. __futex_atomic_op("%1 = or(%0,%4)\n", ret, oldval, uaddr,
  50. oparg);
  51. break;
  52. case FUTEX_OP_ANDN:
  53. __futex_atomic_op("%1 = not(%4); %1 = and(%0,%1)\n", ret,
  54. oldval, uaddr, oparg);
  55. break;
  56. case FUTEX_OP_XOR:
  57. __futex_atomic_op("%1 = xor(%0,%4)\n", ret, oldval, uaddr,
  58. oparg);
  59. break;
  60. default:
  61. ret = -ENOSYS;
  62. }
  63. pagefault_enable();
  64. if (!ret) {
  65. switch (cmp) {
  66. case FUTEX_OP_CMP_EQ:
  67. ret = (oldval == cmparg);
  68. break;
  69. case FUTEX_OP_CMP_NE:
  70. ret = (oldval != cmparg);
  71. break;
  72. case FUTEX_OP_CMP_LT:
  73. ret = (oldval < cmparg);
  74. break;
  75. case FUTEX_OP_CMP_GE:
  76. ret = (oldval >= cmparg);
  77. break;
  78. case FUTEX_OP_CMP_LE:
  79. ret = (oldval <= cmparg);
  80. break;
  81. case FUTEX_OP_CMP_GT:
  82. ret = (oldval > cmparg);
  83. break;
  84. default:
  85. ret = -ENOSYS;
  86. }
  87. }
  88. return ret;
  89. }
  90. static inline int
  91. futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr, u32 oldval,
  92. u32 newval)
  93. {
  94. int prev;
  95. int ret;
  96. if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
  97. return -EFAULT;
  98. __asm__ __volatile__ (
  99. "1: %1 = memw_locked(%3)\n"
  100. " {\n"
  101. " p2 = cmp.eq(%1,%4)\n"
  102. " if !p2.new jump:NT 3f\n"
  103. " }\n"
  104. "2: memw_locked(%3,p2) = %5\n"
  105. " if !p2 jump 1b\n"
  106. "3:\n"
  107. ".section .fixup,\"ax\"\n"
  108. "4: %0 = #%6\n"
  109. " jump 3b\n"
  110. ".previous\n"
  111. ".section __ex_table,\"a\"\n"
  112. ".long 1b,4b,2b,4b\n"
  113. ".previous\n"
  114. : "+r" (ret), "=&r" (prev), "+m" (*uaddr)
  115. : "r" (uaddr), "r" (oldval), "r" (newval), "i"(-EFAULT)
  116. : "p2", "memory");
  117. *uval = prev;
  118. return ret;
  119. }
  120. #endif /* __KERNEL__ */
  121. #endif /* _ASM_HEXAGON_FUTEX_H */