atomic.h 5.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238
  1. /*
  2. * Generic C implementation of atomic counter operations. Usable on
  3. * UP systems only. Do not include in machine independent code.
  4. *
  5. * Originally implemented for MN10300.
  6. *
  7. * Copyright (C) 2007 Red Hat, Inc. All Rights Reserved.
  8. * Written by David Howells (dhowells@redhat.com)
  9. *
  10. * This program is free software; you can redistribute it and/or
  11. * modify it under the terms of the GNU General Public Licence
  12. * as published by the Free Software Foundation; either version
  13. * 2 of the Licence, or (at your option) any later version.
  14. */
  15. #ifndef __ASM_GENERIC_ATOMIC_H
  16. #define __ASM_GENERIC_ATOMIC_H
  17. #include <asm/cmpxchg.h>
  18. #include <asm/barrier.h>
  19. /*
  20. * atomic_$op() - $op integer to atomic variable
  21. * @i: integer value to $op
  22. * @v: pointer to the atomic variable
  23. *
  24. * Atomically $ops @i to @v. Does not strictly guarantee a memory-barrier, use
  25. * smp_mb__{before,after}_atomic().
  26. */
  27. /*
  28. * atomic_$op_return() - $op interer to atomic variable and returns the result
  29. * @i: integer value to $op
  30. * @v: pointer to the atomic variable
  31. *
  32. * Atomically $ops @i to @v. Does imply a full memory barrier.
  33. */
  34. #ifdef CONFIG_SMP
  35. /* we can build all atomic primitives from cmpxchg */
  36. #define ATOMIC_OP(op, c_op) \
  37. static inline void atomic_##op(int i, atomic_t *v) \
  38. { \
  39. int c, old; \
  40. \
  41. c = v->counter; \
  42. while ((old = cmpxchg(&v->counter, c, c c_op i)) != c) \
  43. c = old; \
  44. }
  45. #define ATOMIC_OP_RETURN(op, c_op) \
  46. static inline int atomic_##op##_return(int i, atomic_t *v) \
  47. { \
  48. int c, old; \
  49. \
  50. c = v->counter; \
  51. while ((old = cmpxchg(&v->counter, c, c c_op i)) != c) \
  52. c = old; \
  53. \
  54. return c c_op i; \
  55. }
  56. #define ATOMIC_FETCH_OP(op, c_op) \
  57. static inline int atomic_fetch_##op(int i, atomic_t *v) \
  58. { \
  59. int c, old; \
  60. \
  61. c = v->counter; \
  62. while ((old = cmpxchg(&v->counter, c, c c_op i)) != c) \
  63. c = old; \
  64. \
  65. return c; \
  66. }
  67. #else
  68. #include <linux/irqflags.h>
  69. #define ATOMIC_OP(op, c_op) \
  70. static inline void atomic_##op(int i, atomic_t *v) \
  71. { \
  72. unsigned long flags; \
  73. \
  74. raw_local_irq_save(flags); \
  75. v->counter = v->counter c_op i; \
  76. raw_local_irq_restore(flags); \
  77. }
  78. #define ATOMIC_OP_RETURN(op, c_op) \
  79. static inline int atomic_##op##_return(int i, atomic_t *v) \
  80. { \
  81. unsigned long flags; \
  82. int ret; \
  83. \
  84. raw_local_irq_save(flags); \
  85. ret = (v->counter = v->counter c_op i); \
  86. raw_local_irq_restore(flags); \
  87. \
  88. return ret; \
  89. }
  90. #define ATOMIC_FETCH_OP(op, c_op) \
  91. static inline int atomic_fetch_##op(int i, atomic_t *v) \
  92. { \
  93. unsigned long flags; \
  94. int ret; \
  95. \
  96. raw_local_irq_save(flags); \
  97. ret = v->counter; \
  98. v->counter = v->counter c_op i; \
  99. raw_local_irq_restore(flags); \
  100. \
  101. return ret; \
  102. }
  103. #endif /* CONFIG_SMP */
  104. #ifndef atomic_add_return
  105. ATOMIC_OP_RETURN(add, +)
  106. #endif
  107. #ifndef atomic_sub_return
  108. ATOMIC_OP_RETURN(sub, -)
  109. #endif
  110. #ifndef atomic_fetch_add
  111. ATOMIC_FETCH_OP(add, +)
  112. #endif
  113. #ifndef atomic_fetch_sub
  114. ATOMIC_FETCH_OP(sub, -)
  115. #endif
  116. #ifndef atomic_fetch_and
  117. ATOMIC_FETCH_OP(and, &)
  118. #endif
  119. #ifndef atomic_fetch_or
  120. ATOMIC_FETCH_OP(or, |)
  121. #endif
  122. #ifndef atomic_fetch_xor
  123. ATOMIC_FETCH_OP(xor, ^)
  124. #endif
  125. #ifndef atomic_and
  126. ATOMIC_OP(and, &)
  127. #endif
  128. #ifndef atomic_or
  129. ATOMIC_OP(or, |)
  130. #endif
  131. #ifndef atomic_xor
  132. ATOMIC_OP(xor, ^)
  133. #endif
  134. #undef ATOMIC_FETCH_OP
  135. #undef ATOMIC_OP_RETURN
  136. #undef ATOMIC_OP
  137. /*
  138. * Atomic operations that C can't guarantee us. Useful for
  139. * resource counting etc..
  140. */
  141. #define ATOMIC_INIT(i) { (i) }
  142. /**
  143. * atomic_read - read atomic variable
  144. * @v: pointer of type atomic_t
  145. *
  146. * Atomically reads the value of @v.
  147. */
  148. #ifndef atomic_read
  149. #define atomic_read(v) READ_ONCE((v)->counter)
  150. #endif
  151. /**
  152. * atomic_set - set atomic variable
  153. * @v: pointer of type atomic_t
  154. * @i: required value
  155. *
  156. * Atomically sets the value of @v to @i.
  157. */
  158. #define atomic_set(v, i) WRITE_ONCE(((v)->counter), (i))
  159. #include <linux/irqflags.h>
  160. static inline int atomic_add_negative(int i, atomic_t *v)
  161. {
  162. return atomic_add_return(i, v) < 0;
  163. }
  164. static inline void atomic_add(int i, atomic_t *v)
  165. {
  166. atomic_add_return(i, v);
  167. }
  168. static inline void atomic_sub(int i, atomic_t *v)
  169. {
  170. atomic_sub_return(i, v);
  171. }
  172. static inline void atomic_inc(atomic_t *v)
  173. {
  174. atomic_add_return(1, v);
  175. }
  176. static inline void atomic_dec(atomic_t *v)
  177. {
  178. atomic_sub_return(1, v);
  179. }
  180. #define atomic_dec_return(v) atomic_sub_return(1, (v))
  181. #define atomic_inc_return(v) atomic_add_return(1, (v))
  182. #define atomic_sub_and_test(i, v) (atomic_sub_return((i), (v)) == 0)
  183. #define atomic_dec_and_test(v) (atomic_dec_return(v) == 0)
  184. #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
  185. #define atomic_xchg(ptr, v) (xchg(&(ptr)->counter, (v)))
  186. #define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), (old), (new)))
  187. #ifndef __atomic_add_unless
  188. static inline int __atomic_add_unless(atomic_t *v, int a, int u)
  189. {
  190. int c, old;
  191. c = atomic_read(v);
  192. while (c != u && (old = atomic_cmpxchg(v, c, c + a)) != c)
  193. c = old;
  194. return c;
  195. }
  196. #endif
  197. #endif /* __ASM_GENERIC_ATOMIC_H */