cmpxchg.h 3.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133
  1. /*
  2. * Copyright 2004-2011 Analog Devices Inc.
  3. *
  4. * Licensed under the GPL-2 or later.
  5. */
  6. #ifndef __ARCH_BLACKFIN_CMPXCHG__
  7. #define __ARCH_BLACKFIN_CMPXCHG__
  8. #ifdef CONFIG_SMP
  9. #include <linux/linkage.h>
  10. asmlinkage unsigned long __raw_xchg_1_asm(volatile void *ptr, unsigned long value);
  11. asmlinkage unsigned long __raw_xchg_2_asm(volatile void *ptr, unsigned long value);
  12. asmlinkage unsigned long __raw_xchg_4_asm(volatile void *ptr, unsigned long value);
  13. asmlinkage unsigned long __raw_cmpxchg_1_asm(volatile void *ptr,
  14. unsigned long new, unsigned long old);
  15. asmlinkage unsigned long __raw_cmpxchg_2_asm(volatile void *ptr,
  16. unsigned long new, unsigned long old);
  17. asmlinkage unsigned long __raw_cmpxchg_4_asm(volatile void *ptr,
  18. unsigned long new, unsigned long old);
  19. static inline unsigned long __xchg(unsigned long x, volatile void *ptr,
  20. int size)
  21. {
  22. unsigned long tmp;
  23. switch (size) {
  24. case 1:
  25. tmp = __raw_xchg_1_asm(ptr, x);
  26. break;
  27. case 2:
  28. tmp = __raw_xchg_2_asm(ptr, x);
  29. break;
  30. case 4:
  31. tmp = __raw_xchg_4_asm(ptr, x);
  32. break;
  33. }
  34. return tmp;
  35. }
  36. /*
  37. * Atomic compare and exchange. Compare OLD with MEM, if identical,
  38. * store NEW in MEM. Return the initial value in MEM. Success is
  39. * indicated by comparing RETURN with OLD.
  40. */
  41. static inline unsigned long __cmpxchg(volatile void *ptr, unsigned long old,
  42. unsigned long new, int size)
  43. {
  44. unsigned long tmp;
  45. switch (size) {
  46. case 1:
  47. tmp = __raw_cmpxchg_1_asm(ptr, new, old);
  48. break;
  49. case 2:
  50. tmp = __raw_cmpxchg_2_asm(ptr, new, old);
  51. break;
  52. case 4:
  53. tmp = __raw_cmpxchg_4_asm(ptr, new, old);
  54. break;
  55. }
  56. return tmp;
  57. }
  58. #define cmpxchg(ptr, o, n) \
  59. ((__typeof__(*(ptr)))__cmpxchg((ptr), (unsigned long)(o), \
  60. (unsigned long)(n), sizeof(*(ptr))))
  61. #else /* !CONFIG_SMP */
  62. #include <mach/blackfin.h>
  63. #include <asm/irqflags.h>
  64. struct __xchg_dummy {
  65. unsigned long a[100];
  66. };
  67. #define __xg(x) ((volatile struct __xchg_dummy *)(x))
  68. static inline unsigned long __xchg(unsigned long x, volatile void *ptr,
  69. int size)
  70. {
  71. unsigned long tmp = 0;
  72. unsigned long flags;
  73. flags = hard_local_irq_save();
  74. switch (size) {
  75. case 1:
  76. __asm__ __volatile__
  77. ("%0 = b%2 (z);\n\t"
  78. "b%2 = %1;\n\t"
  79. : "=&d" (tmp) : "d" (x), "m" (*__xg(ptr)) : "memory");
  80. break;
  81. case 2:
  82. __asm__ __volatile__
  83. ("%0 = w%2 (z);\n\t"
  84. "w%2 = %1;\n\t"
  85. : "=&d" (tmp) : "d" (x), "m" (*__xg(ptr)) : "memory");
  86. break;
  87. case 4:
  88. __asm__ __volatile__
  89. ("%0 = %2;\n\t"
  90. "%2 = %1;\n\t"
  91. : "=&d" (tmp) : "d" (x), "m" (*__xg(ptr)) : "memory");
  92. break;
  93. }
  94. hard_local_irq_restore(flags);
  95. return tmp;
  96. }
  97. #include <asm-generic/cmpxchg-local.h>
  98. /*
  99. * cmpxchg_local and cmpxchg64_local are atomic wrt current CPU. Always make
  100. * them available.
  101. */
  102. #define cmpxchg_local(ptr, o, n) \
  103. ((__typeof__(*(ptr)))__cmpxchg_local_generic((ptr), (unsigned long)(o),\
  104. (unsigned long)(n), sizeof(*(ptr))))
  105. #define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
  106. #define cmpxchg(ptr, o, n) cmpxchg_local((ptr), (o), (n))
  107. #define cmpxchg64(ptr, o, n) cmpxchg64_local((ptr), (o), (n))
  108. #endif /* !CONFIG_SMP */
  109. #define xchg(ptr, x) ((__typeof__(*(ptr)))__xchg((unsigned long)(x), (ptr), sizeof(*(ptr))))
  110. #endif /* __ARCH_BLACKFIN_CMPXCHG__ */