msp_regops.h 6.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238
  1. /*
  2. * SMP/VPE-safe functions to access "registers" (see note).
  3. *
  4. * NOTES:
  5. * - These macros use ll/sc instructions, so it is your responsibility to
  6. * ensure these are available on your platform before including this file.
  7. * - The MIPS32 spec states that ll/sc results are undefined for uncached
  8. * accesses. This means they can't be used on HW registers accessed
  9. * through kseg1. Code which requires these macros for this purpose must
  10. * front-end the registers with cached memory "registers" and have a single
  11. * thread update the actual HW registers.
  12. * - A maximum of 2k of code can be inserted between ll and sc. Every
  13. * memory accesses between the instructions will increase the chance of
  14. * sc failing and having to loop.
  15. * - When using custom_read_reg32/custom_write_reg32 only perform the
  16. * necessary logical operations on the register value in between these
  17. * two calls. All other logic should be performed before the first call.
  18. * - There is a bug on the R10000 chips which has a workaround. If you
  19. * are affected by this bug, make sure to define the symbol 'R10000_LLSC_WAR'
  20. * to be non-zero. If you are using this header from within linux, you may
  21. * include <asm/war.h> before including this file to have this defined
  22. * appropriately for you.
  23. *
  24. * Copyright 2005-2007 PMC-Sierra, Inc.
  25. *
  26. * This program is free software; you can redistribute it and/or modify it
  27. * under the terms of the GNU General Public License as published by the
  28. * Free Software Foundation; either version 2 of the License, or (at your
  29. * option) any later version.
  30. *
  31. * THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESS OR IMPLIED
  32. * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
  33. * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
  34. * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
  35. * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
  36. * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
  37. * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
  38. * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
  39. * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
  40. * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
  41. *
  42. * You should have received a copy of the GNU General Public License along
  43. * with this program; if not, write to the Free Software Foundation, Inc., 675
  44. * Mass Ave, Cambridge, MA 02139, USA.
  45. */
  46. #ifndef __ASM_REGOPS_H__
  47. #define __ASM_REGOPS_H__
  48. #include <linux/types.h>
  49. #include <asm/compiler.h>
  50. #include <asm/war.h>
  51. #ifndef R10000_LLSC_WAR
  52. #define R10000_LLSC_WAR 0
  53. #endif
  54. #if R10000_LLSC_WAR == 1
  55. #define __beqz "beqzl "
  56. #else
  57. #define __beqz "beqz "
  58. #endif
  59. #ifndef _LINUX_TYPES_H
  60. typedef unsigned int u32;
  61. #endif
  62. /*
  63. * Sets all the masked bits to the corresponding value bits
  64. */
  65. static inline void set_value_reg32(volatile u32 *const addr,
  66. u32 const mask,
  67. u32 const value)
  68. {
  69. u32 temp;
  70. __asm__ __volatile__(
  71. " .set push \n"
  72. " .set arch=r4000 \n"
  73. "1: ll %0, %1 # set_value_reg32 \n"
  74. " and %0, %2 \n"
  75. " or %0, %3 \n"
  76. " sc %0, %1 \n"
  77. " "__beqz"%0, 1b \n"
  78. " nop \n"
  79. " .set pop \n"
  80. : "=&r" (temp), "=" GCC_OFF_SMALL_ASM() (*addr)
  81. : "ir" (~mask), "ir" (value), GCC_OFF_SMALL_ASM() (*addr));
  82. }
  83. /*
  84. * Sets all the masked bits to '1'
  85. */
  86. static inline void set_reg32(volatile u32 *const addr,
  87. u32 const mask)
  88. {
  89. u32 temp;
  90. __asm__ __volatile__(
  91. " .set push \n"
  92. " .set arch=r4000 \n"
  93. "1: ll %0, %1 # set_reg32 \n"
  94. " or %0, %2 \n"
  95. " sc %0, %1 \n"
  96. " "__beqz"%0, 1b \n"
  97. " nop \n"
  98. " .set pop \n"
  99. : "=&r" (temp), "=" GCC_OFF_SMALL_ASM() (*addr)
  100. : "ir" (mask), GCC_OFF_SMALL_ASM() (*addr));
  101. }
  102. /*
  103. * Sets all the masked bits to '0'
  104. */
  105. static inline void clear_reg32(volatile u32 *const addr,
  106. u32 const mask)
  107. {
  108. u32 temp;
  109. __asm__ __volatile__(
  110. " .set push \n"
  111. " .set arch=r4000 \n"
  112. "1: ll %0, %1 # clear_reg32 \n"
  113. " and %0, %2 \n"
  114. " sc %0, %1 \n"
  115. " "__beqz"%0, 1b \n"
  116. " nop \n"
  117. " .set pop \n"
  118. : "=&r" (temp), "=" GCC_OFF_SMALL_ASM() (*addr)
  119. : "ir" (~mask), GCC_OFF_SMALL_ASM() (*addr));
  120. }
  121. /*
  122. * Toggles all masked bits from '0' to '1' and '1' to '0'
  123. */
  124. static inline void toggle_reg32(volatile u32 *const addr,
  125. u32 const mask)
  126. {
  127. u32 temp;
  128. __asm__ __volatile__(
  129. " .set push \n"
  130. " .set arch=r4000 \n"
  131. "1: ll %0, %1 # toggle_reg32 \n"
  132. " xor %0, %2 \n"
  133. " sc %0, %1 \n"
  134. " "__beqz"%0, 1b \n"
  135. " nop \n"
  136. " .set pop \n"
  137. : "=&r" (temp), "=" GCC_OFF_SMALL_ASM() (*addr)
  138. : "ir" (mask), GCC_OFF_SMALL_ASM() (*addr));
  139. }
  140. /*
  141. * Read all masked bits others are returned as '0'
  142. */
  143. static inline u32 read_reg32(volatile u32 *const addr,
  144. u32 const mask)
  145. {
  146. u32 temp;
  147. __asm__ __volatile__(
  148. " .set push \n"
  149. " .set noreorder \n"
  150. " lw %0, %1 # read \n"
  151. " and %0, %2 # mask \n"
  152. " .set pop \n"
  153. : "=&r" (temp)
  154. : "m" (*addr), "ir" (mask));
  155. return temp;
  156. }
  157. /*
  158. * blocking_read_reg32 - Read address with blocking load
  159. *
  160. * Uncached writes need to be read back to ensure they reach RAM.
  161. * The returned value must be 'used' to prevent from becoming a
  162. * non-blocking load.
  163. */
  164. static inline u32 blocking_read_reg32(volatile u32 *const addr)
  165. {
  166. u32 temp;
  167. __asm__ __volatile__(
  168. " .set push \n"
  169. " .set noreorder \n"
  170. " lw %0, %1 # read \n"
  171. " move %0, %0 # block \n"
  172. " .set pop \n"
  173. : "=&r" (temp)
  174. : "m" (*addr));
  175. return temp;
  176. }
  177. /*
  178. * For special strange cases only:
  179. *
  180. * If you need custom processing within a ll/sc loop, use the following macros
  181. * VERY CAREFULLY:
  182. *
  183. * u32 tmp; <-- Define a variable to hold the data
  184. *
  185. * custom_read_reg32(address, tmp); <-- Reads the address and put the value
  186. * in the 'tmp' variable given
  187. *
  188. * From here on out, you are (basically) atomic, so don't do anything too
  189. * fancy!
  190. * Also, this code may loop if the end of this block fails to write
  191. * everything back safely due do the other CPU, so do NOT do anything
  192. * with side-effects!
  193. *
  194. * custom_write_reg32(address, tmp); <-- Writes back 'tmp' safely.
  195. */
  196. #define custom_read_reg32(address, tmp) \
  197. __asm__ __volatile__( \
  198. " .set push \n" \
  199. " .set arch=r4000 \n" \
  200. "1: ll %0, %1 #custom_read_reg32 \n" \
  201. " .set pop \n" \
  202. : "=r" (tmp), "=" GCC_OFF_SMALL_ASM() (*address) \
  203. : GCC_OFF_SMALL_ASM() (*address))
  204. #define custom_write_reg32(address, tmp) \
  205. __asm__ __volatile__( \
  206. " .set push \n" \
  207. " .set arch=r4000 \n" \
  208. " sc %0, %1 #custom_write_reg32 \n" \
  209. " "__beqz"%0, 1b \n" \
  210. " nop \n" \
  211. " .set pop \n" \
  212. : "=&r" (tmp), "=" GCC_OFF_SMALL_ASM() (*address) \
  213. : "0" (tmp), GCC_OFF_SMALL_ASM() (*address))
  214. #endif /* __ASM_REGOPS_H__ */