atomic-grb.h 3.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124
  1. #ifndef __ASM_SH_ATOMIC_GRB_H
  2. #define __ASM_SH_ATOMIC_GRB_H
  3. static inline void atomic_add(int i, atomic_t *v)
  4. {
  5. int tmp;
  6. __asm__ __volatile__ (
  7. " .align 2 \n\t"
  8. " mova 1f, r0 \n\t" /* r0 = end point */
  9. " mov r15, r1 \n\t" /* r1 = saved sp */
  10. " mov #-6, r15 \n\t" /* LOGIN: r15 = size */
  11. " mov.l @%1, %0 \n\t" /* load old value */
  12. " add %2, %0 \n\t" /* add */
  13. " mov.l %0, @%1 \n\t" /* store new value */
  14. "1: mov r1, r15 \n\t" /* LOGOUT */
  15. : "=&r" (tmp),
  16. "+r" (v)
  17. : "r" (i)
  18. : "memory" , "r0", "r1");
  19. }
  20. static inline void atomic_sub(int i, atomic_t *v)
  21. {
  22. int tmp;
  23. __asm__ __volatile__ (
  24. " .align 2 \n\t"
  25. " mova 1f, r0 \n\t" /* r0 = end point */
  26. " mov r15, r1 \n\t" /* r1 = saved sp */
  27. " mov #-6, r15 \n\t" /* LOGIN: r15 = size */
  28. " mov.l @%1, %0 \n\t" /* load old value */
  29. " sub %2, %0 \n\t" /* sub */
  30. " mov.l %0, @%1 \n\t" /* store new value */
  31. "1: mov r1, r15 \n\t" /* LOGOUT */
  32. : "=&r" (tmp),
  33. "+r" (v)
  34. : "r" (i)
  35. : "memory" , "r0", "r1");
  36. }
  37. static inline int atomic_add_return(int i, atomic_t *v)
  38. {
  39. int tmp;
  40. __asm__ __volatile__ (
  41. " .align 2 \n\t"
  42. " mova 1f, r0 \n\t" /* r0 = end point */
  43. " mov r15, r1 \n\t" /* r1 = saved sp */
  44. " mov #-6, r15 \n\t" /* LOGIN: r15 = size */
  45. " mov.l @%1, %0 \n\t" /* load old value */
  46. " add %2, %0 \n\t" /* add */
  47. " mov.l %0, @%1 \n\t" /* store new value */
  48. "1: mov r1, r15 \n\t" /* LOGOUT */
  49. : "=&r" (tmp),
  50. "+r" (v)
  51. : "r" (i)
  52. : "memory" , "r0", "r1");
  53. return tmp;
  54. }
  55. static inline int atomic_sub_return(int i, atomic_t *v)
  56. {
  57. int tmp;
  58. __asm__ __volatile__ (
  59. " .align 2 \n\t"
  60. " mova 1f, r0 \n\t" /* r0 = end point */
  61. " mov r15, r1 \n\t" /* r1 = saved sp */
  62. " mov #-6, r15 \n\t" /* LOGIN: r15 = size */
  63. " mov.l @%1, %0 \n\t" /* load old value */
  64. " sub %2, %0 \n\t" /* sub */
  65. " mov.l %0, @%1 \n\t" /* store new value */
  66. "1: mov r1, r15 \n\t" /* LOGOUT */
  67. : "=&r" (tmp),
  68. "+r" (v)
  69. : "r" (i)
  70. : "memory", "r0", "r1");
  71. return tmp;
  72. }
  73. static inline void atomic_clear_mask(unsigned int mask, atomic_t *v)
  74. {
  75. int tmp;
  76. unsigned int _mask = ~mask;
  77. __asm__ __volatile__ (
  78. " .align 2 \n\t"
  79. " mova 1f, r0 \n\t" /* r0 = end point */
  80. " mov r15, r1 \n\t" /* r1 = saved sp */
  81. " mov #-6, r15 \n\t" /* LOGIN: r15 = size */
  82. " mov.l @%1, %0 \n\t" /* load old value */
  83. " and %2, %0 \n\t" /* add */
  84. " mov.l %0, @%1 \n\t" /* store new value */
  85. "1: mov r1, r15 \n\t" /* LOGOUT */
  86. : "=&r" (tmp),
  87. "+r" (v)
  88. : "r" (_mask)
  89. : "memory" , "r0", "r1");
  90. }
  91. static inline void atomic_set_mask(unsigned int mask, atomic_t *v)
  92. {
  93. int tmp;
  94. __asm__ __volatile__ (
  95. " .align 2 \n\t"
  96. " mova 1f, r0 \n\t" /* r0 = end point */
  97. " mov r15, r1 \n\t" /* r1 = saved sp */
  98. " mov #-6, r15 \n\t" /* LOGIN: r15 = size */
  99. " mov.l @%1, %0 \n\t" /* load old value */
  100. " or %2, %0 \n\t" /* or */
  101. " mov.l %0, @%1 \n\t" /* store new value */
  102. "1: mov r1, r15 \n\t" /* LOGOUT */
  103. : "=&r" (tmp),
  104. "+r" (v)
  105. : "r" (mask)
  106. : "memory" , "r0", "r1");
  107. }
  108. #endif /* __ASM_SH_ATOMIC_GRB_H */