atomic.h 6.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275
  1. #ifndef _ASM_M32R_ATOMIC_H
  2. #define _ASM_M32R_ATOMIC_H
  3. /*
  4. * linux/include/asm-m32r/atomic.h
  5. *
  6. * M32R version:
  7. * Copyright (C) 2001, 2002 Hitoshi Yamamoto
  8. * Copyright (C) 2004 Hirokazu Takata <takata at linux-m32r.org>
  9. */
  10. #include <linux/types.h>
  11. #include <asm/assembler.h>
  12. #include <asm/cmpxchg.h>
  13. #include <asm/dcache_clear.h>
  14. #include <asm/barrier.h>
  15. /*
  16. * Atomic operations that C can't guarantee us. Useful for
  17. * resource counting etc..
  18. */
  19. #define ATOMIC_INIT(i) { (i) }
  20. /**
  21. * atomic_read - read atomic variable
  22. * @v: pointer of type atomic_t
  23. *
  24. * Atomically reads the value of @v.
  25. */
  26. #define atomic_read(v) READ_ONCE((v)->counter)
  27. /**
  28. * atomic_set - set atomic variable
  29. * @v: pointer of type atomic_t
  30. * @i: required value
  31. *
  32. * Atomically sets the value of @v to @i.
  33. */
  34. #define atomic_set(v,i) WRITE_ONCE(((v)->counter), (i))
  35. #ifdef CONFIG_CHIP_M32700_TS1
  36. #define __ATOMIC_CLOBBER , "r4"
  37. #else
  38. #define __ATOMIC_CLOBBER
  39. #endif
  40. #define ATOMIC_OP(op) \
  41. static __inline__ void atomic_##op(int i, atomic_t *v) \
  42. { \
  43. unsigned long flags; \
  44. int result; \
  45. \
  46. local_irq_save(flags); \
  47. __asm__ __volatile__ ( \
  48. "# atomic_" #op " \n\t" \
  49. DCACHE_CLEAR("%0", "r4", "%1") \
  50. M32R_LOCK" %0, @%1; \n\t" \
  51. #op " %0, %2; \n\t" \
  52. M32R_UNLOCK" %0, @%1; \n\t" \
  53. : "=&r" (result) \
  54. : "r" (&v->counter), "r" (i) \
  55. : "memory" \
  56. __ATOMIC_CLOBBER \
  57. ); \
  58. local_irq_restore(flags); \
  59. } \
  60. #define ATOMIC_OP_RETURN(op) \
  61. static __inline__ int atomic_##op##_return(int i, atomic_t *v) \
  62. { \
  63. unsigned long flags; \
  64. int result; \
  65. \
  66. local_irq_save(flags); \
  67. __asm__ __volatile__ ( \
  68. "# atomic_" #op "_return \n\t" \
  69. DCACHE_CLEAR("%0", "r4", "%1") \
  70. M32R_LOCK" %0, @%1; \n\t" \
  71. #op " %0, %2; \n\t" \
  72. M32R_UNLOCK" %0, @%1; \n\t" \
  73. : "=&r" (result) \
  74. : "r" (&v->counter), "r" (i) \
  75. : "memory" \
  76. __ATOMIC_CLOBBER \
  77. ); \
  78. local_irq_restore(flags); \
  79. \
  80. return result; \
  81. }
  82. #define ATOMIC_FETCH_OP(op) \
  83. static __inline__ int atomic_fetch_##op(int i, atomic_t *v) \
  84. { \
  85. unsigned long flags; \
  86. int result, val; \
  87. \
  88. local_irq_save(flags); \
  89. __asm__ __volatile__ ( \
  90. "# atomic_fetch_" #op " \n\t" \
  91. DCACHE_CLEAR("%0", "r4", "%2") \
  92. M32R_LOCK" %1, @%2; \n\t" \
  93. "mv %0, %1 \n\t" \
  94. #op " %1, %3; \n\t" \
  95. M32R_UNLOCK" %1, @%2; \n\t" \
  96. : "=&r" (result), "=&r" (val) \
  97. : "r" (&v->counter), "r" (i) \
  98. : "memory" \
  99. __ATOMIC_CLOBBER \
  100. ); \
  101. local_irq_restore(flags); \
  102. \
  103. return result; \
  104. }
  105. #define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_OP_RETURN(op) ATOMIC_FETCH_OP(op)
  106. ATOMIC_OPS(add)
  107. ATOMIC_OPS(sub)
  108. #undef ATOMIC_OPS
  109. #define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_FETCH_OP(op)
  110. ATOMIC_OPS(and)
  111. ATOMIC_OPS(or)
  112. ATOMIC_OPS(xor)
  113. #undef ATOMIC_OPS
  114. #undef ATOMIC_FETCH_OP
  115. #undef ATOMIC_OP_RETURN
  116. #undef ATOMIC_OP
  117. /**
  118. * atomic_sub_and_test - subtract value from variable and test result
  119. * @i: integer value to subtract
  120. * @v: pointer of type atomic_t
  121. *
  122. * Atomically subtracts @i from @v and returns
  123. * true if the result is zero, or false for all
  124. * other cases.
  125. */
  126. #define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
  127. /**
  128. * atomic_inc_return - increment atomic variable and return it
  129. * @v: pointer of type atomic_t
  130. *
  131. * Atomically increments @v by 1 and returns the result.
  132. */
  133. static __inline__ int atomic_inc_return(atomic_t *v)
  134. {
  135. unsigned long flags;
  136. int result;
  137. local_irq_save(flags);
  138. __asm__ __volatile__ (
  139. "# atomic_inc_return \n\t"
  140. DCACHE_CLEAR("%0", "r4", "%1")
  141. M32R_LOCK" %0, @%1; \n\t"
  142. "addi %0, #1; \n\t"
  143. M32R_UNLOCK" %0, @%1; \n\t"
  144. : "=&r" (result)
  145. : "r" (&v->counter)
  146. : "memory"
  147. __ATOMIC_CLOBBER
  148. );
  149. local_irq_restore(flags);
  150. return result;
  151. }
  152. /**
  153. * atomic_dec_return - decrement atomic variable and return it
  154. * @v: pointer of type atomic_t
  155. *
  156. * Atomically decrements @v by 1 and returns the result.
  157. */
  158. static __inline__ int atomic_dec_return(atomic_t *v)
  159. {
  160. unsigned long flags;
  161. int result;
  162. local_irq_save(flags);
  163. __asm__ __volatile__ (
  164. "# atomic_dec_return \n\t"
  165. DCACHE_CLEAR("%0", "r4", "%1")
  166. M32R_LOCK" %0, @%1; \n\t"
  167. "addi %0, #-1; \n\t"
  168. M32R_UNLOCK" %0, @%1; \n\t"
  169. : "=&r" (result)
  170. : "r" (&v->counter)
  171. : "memory"
  172. __ATOMIC_CLOBBER
  173. );
  174. local_irq_restore(flags);
  175. return result;
  176. }
  177. /**
  178. * atomic_inc - increment atomic variable
  179. * @v: pointer of type atomic_t
  180. *
  181. * Atomically increments @v by 1.
  182. */
  183. #define atomic_inc(v) ((void)atomic_inc_return(v))
  184. /**
  185. * atomic_dec - decrement atomic variable
  186. * @v: pointer of type atomic_t
  187. *
  188. * Atomically decrements @v by 1.
  189. */
  190. #define atomic_dec(v) ((void)atomic_dec_return(v))
  191. /**
  192. * atomic_inc_and_test - increment and test
  193. * @v: pointer of type atomic_t
  194. *
  195. * Atomically increments @v by 1
  196. * and returns true if the result is zero, or false for all
  197. * other cases.
  198. */
  199. #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
  200. /**
  201. * atomic_dec_and_test - decrement and test
  202. * @v: pointer of type atomic_t
  203. *
  204. * Atomically decrements @v by 1 and
  205. * returns true if the result is 0, or false for all
  206. * other cases.
  207. */
  208. #define atomic_dec_and_test(v) (atomic_dec_return(v) == 0)
  209. /**
  210. * atomic_add_negative - add and test if negative
  211. * @v: pointer of type atomic_t
  212. * @i: integer value to add
  213. *
  214. * Atomically adds @i to @v and returns true
  215. * if the result is negative, or false when
  216. * result is greater than or equal to zero.
  217. */
  218. #define atomic_add_negative(i,v) (atomic_add_return((i), (v)) < 0)
  219. #define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
  220. #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
  221. /**
  222. * __atomic_add_unless - add unless the number is a given value
  223. * @v: pointer of type atomic_t
  224. * @a: the amount to add to v...
  225. * @u: ...unless v is equal to u.
  226. *
  227. * Atomically adds @a to @v, so long as it was not @u.
  228. * Returns the old value of @v.
  229. */
  230. static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
  231. {
  232. int c, old;
  233. c = atomic_read(v);
  234. for (;;) {
  235. if (unlikely(c == (u)))
  236. break;
  237. old = atomic_cmpxchg((v), c, c + (a));
  238. if (likely(old == c))
  239. break;
  240. c = old;
  241. }
  242. return c;
  243. }
  244. #endif /* _ASM_M32R_ATOMIC_H */