local.h 5.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198
  1. #ifndef _ARCH_MIPS_LOCAL_H
  2. #define _ARCH_MIPS_LOCAL_H
  3. #include <linux/percpu.h>
  4. #include <linux/bitops.h>
  5. #include <linux/atomic.h>
  6. #include <asm/cmpxchg.h>
  7. #include <asm/compiler.h>
  8. #include <asm/war.h>
  9. typedef struct
  10. {
  11. atomic_long_t a;
  12. } local_t;
  13. #define LOCAL_INIT(i) { ATOMIC_LONG_INIT(i) }
  14. #define local_read(l) atomic_long_read(&(l)->a)
  15. #define local_set(l, i) atomic_long_set(&(l)->a, (i))
  16. #define local_add(i, l) atomic_long_add((i), (&(l)->a))
  17. #define local_sub(i, l) atomic_long_sub((i), (&(l)->a))
  18. #define local_inc(l) atomic_long_inc(&(l)->a)
  19. #define local_dec(l) atomic_long_dec(&(l)->a)
  20. /*
  21. * Same as above, but return the result value
  22. */
  23. static __inline__ long local_add_return(long i, local_t * l)
  24. {
  25. unsigned long result;
  26. if (kernel_uses_llsc && R10000_LLSC_WAR) {
  27. unsigned long temp;
  28. __asm__ __volatile__(
  29. " .set arch=r4000 \n"
  30. "1:" __LL "%1, %2 # local_add_return \n"
  31. " addu %0, %1, %3 \n"
  32. __SC "%0, %2 \n"
  33. " beqzl %0, 1b \n"
  34. " addu %0, %1, %3 \n"
  35. " .set mips0 \n"
  36. : "=&r" (result), "=&r" (temp), "=m" (l->a.counter)
  37. : "Ir" (i), "m" (l->a.counter)
  38. : "memory");
  39. } else if (kernel_uses_llsc) {
  40. unsigned long temp;
  41. __asm__ __volatile__(
  42. " .set "MIPS_ISA_ARCH_LEVEL" \n"
  43. "1:" __LL "%1, %2 # local_add_return \n"
  44. " addu %0, %1, %3 \n"
  45. __SC "%0, %2 \n"
  46. " beqz %0, 1b \n"
  47. " addu %0, %1, %3 \n"
  48. " .set mips0 \n"
  49. : "=&r" (result), "=&r" (temp), "=m" (l->a.counter)
  50. : "Ir" (i), "m" (l->a.counter)
  51. : "memory");
  52. } else {
  53. unsigned long flags;
  54. local_irq_save(flags);
  55. result = l->a.counter;
  56. result += i;
  57. l->a.counter = result;
  58. local_irq_restore(flags);
  59. }
  60. return result;
  61. }
  62. static __inline__ long local_sub_return(long i, local_t * l)
  63. {
  64. unsigned long result;
  65. if (kernel_uses_llsc && R10000_LLSC_WAR) {
  66. unsigned long temp;
  67. __asm__ __volatile__(
  68. " .set arch=r4000 \n"
  69. "1:" __LL "%1, %2 # local_sub_return \n"
  70. " subu %0, %1, %3 \n"
  71. __SC "%0, %2 \n"
  72. " beqzl %0, 1b \n"
  73. " subu %0, %1, %3 \n"
  74. " .set mips0 \n"
  75. : "=&r" (result), "=&r" (temp), "=m" (l->a.counter)
  76. : "Ir" (i), "m" (l->a.counter)
  77. : "memory");
  78. } else if (kernel_uses_llsc) {
  79. unsigned long temp;
  80. __asm__ __volatile__(
  81. " .set "MIPS_ISA_ARCH_LEVEL" \n"
  82. "1:" __LL "%1, %2 # local_sub_return \n"
  83. " subu %0, %1, %3 \n"
  84. __SC "%0, %2 \n"
  85. " beqz %0, 1b \n"
  86. " subu %0, %1, %3 \n"
  87. " .set mips0 \n"
  88. : "=&r" (result), "=&r" (temp), "=m" (l->a.counter)
  89. : "Ir" (i), "m" (l->a.counter)
  90. : "memory");
  91. } else {
  92. unsigned long flags;
  93. local_irq_save(flags);
  94. result = l->a.counter;
  95. result -= i;
  96. l->a.counter = result;
  97. local_irq_restore(flags);
  98. }
  99. return result;
  100. }
  101. #define local_cmpxchg(l, o, n) \
  102. ((long)cmpxchg_local(&((l)->a.counter), (o), (n)))
  103. #define local_xchg(l, n) (atomic_long_xchg((&(l)->a), (n)))
  104. /**
  105. * local_add_unless - add unless the number is a given value
  106. * @l: pointer of type local_t
  107. * @a: the amount to add to l...
  108. * @u: ...unless l is equal to u.
  109. *
  110. * Atomically adds @a to @l, so long as it was not @u.
  111. * Returns non-zero if @l was not @u, and zero otherwise.
  112. */
  113. #define local_add_unless(l, a, u) \
  114. ({ \
  115. long c, old; \
  116. c = local_read(l); \
  117. while (c != (u) && (old = local_cmpxchg((l), c, c + (a))) != c) \
  118. c = old; \
  119. c != (u); \
  120. })
  121. #define local_inc_not_zero(l) local_add_unless((l), 1, 0)
  122. #define local_dec_return(l) local_sub_return(1, (l))
  123. #define local_inc_return(l) local_add_return(1, (l))
  124. /*
  125. * local_sub_and_test - subtract value from variable and test result
  126. * @i: integer value to subtract
  127. * @l: pointer of type local_t
  128. *
  129. * Atomically subtracts @i from @l and returns
  130. * true if the result is zero, or false for all
  131. * other cases.
  132. */
  133. #define local_sub_and_test(i, l) (local_sub_return((i), (l)) == 0)
  134. /*
  135. * local_inc_and_test - increment and test
  136. * @l: pointer of type local_t
  137. *
  138. * Atomically increments @l by 1
  139. * and returns true if the result is zero, or false for all
  140. * other cases.
  141. */
  142. #define local_inc_and_test(l) (local_inc_return(l) == 0)
  143. /*
  144. * local_dec_and_test - decrement by 1 and test
  145. * @l: pointer of type local_t
  146. *
  147. * Atomically decrements @l by 1 and
  148. * returns true if the result is 0, or false for all other
  149. * cases.
  150. */
  151. #define local_dec_and_test(l) (local_sub_return(1, (l)) == 0)
  152. /*
  153. * local_add_negative - add and test if negative
  154. * @l: pointer of type local_t
  155. * @i: integer value to add
  156. *
  157. * Atomically adds @i to @l and returns true
  158. * if the result is negative, or false when
  159. * result is greater than or equal to zero.
  160. */
  161. #define local_add_negative(i, l) (local_add_return(i, (l)) < 0)
  162. /* Use these for per-cpu local_t variables: on some archs they are
  163. * much more efficient than these naive implementations. Note they take
  164. * a variable, not an address.
  165. */
  166. #define __local_inc(l) ((l)->a.counter++)
  167. #define __local_dec(l) ((l)->a.counter++)
  168. #define __local_add(i, l) ((l)->a.counter+=(i))
  169. #define __local_sub(i, l) ((l)->a.counter-=(i))
  170. #endif /* _ARCH_MIPS_LOCAL_H */