123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173 |
- #ifndef _ASM_ARM_FUTEX_H
- #define _ASM_ARM_FUTEX_H
- #ifdef __KERNEL__
- #include <linux/futex.h>
- #include <linux/uaccess.h>
- #include <asm/errno.h>
- #define __futex_atomic_ex_table(err_reg) \
- "3:\n" \
- " .pushsection __ex_table,\"a\"\n" \
- " .align 3\n" \
- " .long 1b, 4f, 2b, 4f\n" \
- " .popsection\n" \
- " .pushsection .text.fixup,\"ax\"\n" \
- " .align 2\n" \
- "4: mov %0, " err_reg "\n" \
- " b 3b\n" \
- " .popsection"
- #ifdef CONFIG_SMP
- #define __futex_atomic_op(insn, ret, oldval, tmp, uaddr, oparg) \
- ({ \
- unsigned int __ua_flags; \
- smp_mb(); \
- prefetchw(uaddr); \
- __ua_flags = uaccess_save_and_enable(); \
- __asm__ __volatile__( \
- "1: ldrex %1, [%3]\n" \
- " " insn "\n" \
- "2: strex %2, %0, [%3]\n" \
- " teq %2, #0\n" \
- " bne 1b\n" \
- " mov %0, #0\n" \
- __futex_atomic_ex_table("%5") \
- : "=&r" (ret), "=&r" (oldval), "=&r" (tmp) \
- : "r" (uaddr), "r" (oparg), "Ir" (-EFAULT) \
- : "cc", "memory"); \
- uaccess_restore(__ua_flags); \
- })
- static inline int
- futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
- u32 oldval, u32 newval)
- {
- unsigned int __ua_flags;
- int ret;
- u32 val;
- if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
- return -EFAULT;
- smp_mb();
- /* Prefetching cannot fault */
- prefetchw(uaddr);
- __ua_flags = uaccess_save_and_enable();
- __asm__ __volatile__("@futex_atomic_cmpxchg_inatomic\n"
- "1: ldrex %1, [%4]\n"
- " teq %1, %2\n"
- " ite eq @ explicit IT needed for the 2b label\n"
- "2: strexeq %0, %3, [%4]\n"
- " movne %0, #0\n"
- " teq %0, #0\n"
- " bne 1b\n"
- __futex_atomic_ex_table("%5")
- : "=&r" (ret), "=&r" (val)
- : "r" (oldval), "r" (newval), "r" (uaddr), "Ir" (-EFAULT)
- : "cc", "memory");
- uaccess_restore(__ua_flags);
- smp_mb();
- *uval = val;
- return ret;
- }
- #else /* !SMP, we can work around lack of atomic ops by disabling preemption */
- #include <linux/preempt.h>
- #include <asm/domain.h>
- #define __futex_atomic_op(insn, ret, oldval, tmp, uaddr, oparg) \
- ({ \
- unsigned int __ua_flags = uaccess_save_and_enable(); \
- __asm__ __volatile__( \
- "1: " TUSER(ldr) " %1, [%3]\n" \
- " " insn "\n" \
- "2: " TUSER(str) " %0, [%3]\n" \
- " mov %0, #0\n" \
- __futex_atomic_ex_table("%5") \
- : "=&r" (ret), "=&r" (oldval), "=&r" (tmp) \
- : "r" (uaddr), "r" (oparg), "Ir" (-EFAULT) \
- : "cc", "memory"); \
- uaccess_restore(__ua_flags); \
- })
- static inline int
- futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
- u32 oldval, u32 newval)
- {
- unsigned int __ua_flags;
- int ret = 0;
- u32 val;
- if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
- return -EFAULT;
- preempt_disable();
- __ua_flags = uaccess_save_and_enable();
- __asm__ __volatile__("@futex_atomic_cmpxchg_inatomic\n"
- "1: " TUSER(ldr) " %1, [%4]\n"
- " teq %1, %2\n"
- " it eq @ explicit IT needed for the 2b label\n"
- "2: " TUSER(streq) " %3, [%4]\n"
- __futex_atomic_ex_table("%5")
- : "+r" (ret), "=&r" (val)
- : "r" (oldval), "r" (newval), "r" (uaddr), "Ir" (-EFAULT)
- : "cc", "memory");
- uaccess_restore(__ua_flags);
- *uval = val;
- preempt_enable();
- return ret;
- }
- #endif /* !SMP */
- static inline int
- arch_futex_atomic_op_inuser(int op, int oparg, int *oval, u32 __user *uaddr)
- {
- int oldval = 0, ret, tmp;
- #ifndef CONFIG_SMP
- preempt_disable();
- #endif
- pagefault_disable();
- switch (op) {
- case FUTEX_OP_SET:
- __futex_atomic_op("mov %0, %4", ret, oldval, tmp, uaddr, oparg);
- break;
- case FUTEX_OP_ADD:
- __futex_atomic_op("add %0, %1, %4", ret, oldval, tmp, uaddr, oparg);
- break;
- case FUTEX_OP_OR:
- __futex_atomic_op("orr %0, %1, %4", ret, oldval, tmp, uaddr, oparg);
- break;
- case FUTEX_OP_ANDN:
- __futex_atomic_op("and %0, %1, %4", ret, oldval, tmp, uaddr, ~oparg);
- break;
- case FUTEX_OP_XOR:
- __futex_atomic_op("eor %0, %1, %4", ret, oldval, tmp, uaddr, oparg);
- break;
- default:
- ret = -ENOSYS;
- }
- pagefault_enable();
- #ifndef CONFIG_SMP
- preempt_enable();
- #endif
- if (!ret)
- *oval = oldval;
- return ret;
- }
- #endif /* __KERNEL__ */
- #endif /* _ASM_ARM_FUTEX_H */
|