atomic.h 3.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163
  1. /* $Id: atomic.h,v 1.3 2001/07/25 16:15:19 bjornw Exp $ */
  2. #ifndef __ASM_CRIS_ATOMIC__
  3. #define __ASM_CRIS_ATOMIC__
  4. #include <linux/compiler.h>
  5. #include <linux/types.h>
  6. #include <asm/system.h>
  7. #include <arch/atomic.h>
  8. /*
  9. * Atomic operations that C can't guarantee us. Useful for
  10. * resource counting etc..
  11. */
  12. #define ATOMIC_INIT(i) { (i) }
  13. #define atomic_read(v) (*(volatile int *)&(v)->counter)
  14. #define atomic_set(v,i) (((v)->counter) = (i))
  15. /* These should be written in asm but we do it in C for now. */
  16. static inline void atomic_add(int i, volatile atomic_t *v)
  17. {
  18. unsigned long flags;
  19. cris_atomic_save(v, flags);
  20. v->counter += i;
  21. cris_atomic_restore(v, flags);
  22. }
  23. static inline void atomic_sub(int i, volatile atomic_t *v)
  24. {
  25. unsigned long flags;
  26. cris_atomic_save(v, flags);
  27. v->counter -= i;
  28. cris_atomic_restore(v, flags);
  29. }
  30. static inline int atomic_add_return(int i, volatile atomic_t *v)
  31. {
  32. unsigned long flags;
  33. int retval;
  34. cris_atomic_save(v, flags);
  35. retval = (v->counter += i);
  36. cris_atomic_restore(v, flags);
  37. return retval;
  38. }
  39. #define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)
  40. static inline int atomic_sub_return(int i, volatile atomic_t *v)
  41. {
  42. unsigned long flags;
  43. int retval;
  44. cris_atomic_save(v, flags);
  45. retval = (v->counter -= i);
  46. cris_atomic_restore(v, flags);
  47. return retval;
  48. }
  49. static inline int atomic_sub_and_test(int i, volatile atomic_t *v)
  50. {
  51. int retval;
  52. unsigned long flags;
  53. cris_atomic_save(v, flags);
  54. retval = (v->counter -= i) == 0;
  55. cris_atomic_restore(v, flags);
  56. return retval;
  57. }
  58. static inline void atomic_inc(volatile atomic_t *v)
  59. {
  60. unsigned long flags;
  61. cris_atomic_save(v, flags);
  62. (v->counter)++;
  63. cris_atomic_restore(v, flags);
  64. }
  65. static inline void atomic_dec(volatile atomic_t *v)
  66. {
  67. unsigned long flags;
  68. cris_atomic_save(v, flags);
  69. (v->counter)--;
  70. cris_atomic_restore(v, flags);
  71. }
  72. static inline int atomic_inc_return(volatile atomic_t *v)
  73. {
  74. unsigned long flags;
  75. int retval;
  76. cris_atomic_save(v, flags);
  77. retval = ++(v->counter);
  78. cris_atomic_restore(v, flags);
  79. return retval;
  80. }
  81. static inline int atomic_dec_return(volatile atomic_t *v)
  82. {
  83. unsigned long flags;
  84. int retval;
  85. cris_atomic_save(v, flags);
  86. retval = --(v->counter);
  87. cris_atomic_restore(v, flags);
  88. return retval;
  89. }
  90. static inline int atomic_dec_and_test(volatile atomic_t *v)
  91. {
  92. int retval;
  93. unsigned long flags;
  94. cris_atomic_save(v, flags);
  95. retval = --(v->counter) == 0;
  96. cris_atomic_restore(v, flags);
  97. return retval;
  98. }
  99. static inline int atomic_inc_and_test(volatile atomic_t *v)
  100. {
  101. int retval;
  102. unsigned long flags;
  103. cris_atomic_save(v, flags);
  104. retval = ++(v->counter) == 0;
  105. cris_atomic_restore(v, flags);
  106. return retval;
  107. }
  108. static inline int atomic_cmpxchg(atomic_t *v, int old, int new)
  109. {
  110. int ret;
  111. unsigned long flags;
  112. cris_atomic_save(v, flags);
  113. ret = v->counter;
  114. if (likely(ret == old))
  115. v->counter = new;
  116. cris_atomic_restore(v, flags);
  117. return ret;
  118. }
  119. #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
  120. static inline int atomic_add_unless(atomic_t *v, int a, int u)
  121. {
  122. int ret;
  123. unsigned long flags;
  124. cris_atomic_save(v, flags);
  125. ret = v->counter;
  126. if (ret != u)
  127. v->counter += a;
  128. cris_atomic_restore(v, flags);
  129. return ret != u;
  130. }
  131. #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
  132. /* Atomic operations are already serializing */
  133. #define smp_mb__before_atomic_dec() barrier()
  134. #define smp_mb__after_atomic_dec() barrier()
  135. #define smp_mb__before_atomic_inc() barrier()
  136. #define smp_mb__after_atomic_inc() barrier()
  137. #include <asm-generic/atomic-long.h>
  138. #endif