reg_8xx.h 4.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140
  1. /*
  2. * Contains register definitions common to PowerPC 8xx CPUs. Notice
  3. */
  4. #ifndef _ASM_POWERPC_REG_8xx_H
  5. #define _ASM_POWERPC_REG_8xx_H
  6. #include <asm/mmu-8xx.h>
  7. /* Cache control on the MPC8xx is provided through some additional
  8. * special purpose registers.
  9. */
  10. #define SPRN_IC_CST 560 /* Instruction cache control/status */
  11. #define SPRN_IC_ADR 561 /* Address needed for some commands */
  12. #define SPRN_IC_DAT 562 /* Read-only data register */
  13. #define SPRN_DC_CST 568 /* Data cache control/status */
  14. #define SPRN_DC_ADR 569 /* Address needed for some commands */
  15. #define SPRN_DC_DAT 570 /* Read-only data register */
  16. /* Misc Debug */
  17. #define SPRN_DPDR 630
  18. #define SPRN_MI_CAM 816
  19. #define SPRN_MI_RAM0 817
  20. #define SPRN_MI_RAM1 818
  21. #define SPRN_MD_CAM 824
  22. #define SPRN_MD_RAM0 825
  23. #define SPRN_MD_RAM1 826
  24. /* Special MSR manipulation registers */
  25. #define SPRN_EIE 80 /* External interrupt enable (EE=1, RI=1) */
  26. #define SPRN_EID 81 /* External interrupt disable (EE=0, RI=1) */
  27. /* Commands. Only the first few are available to the instruction cache.
  28. */
  29. #define IDC_ENABLE 0x02000000 /* Cache enable */
  30. #define IDC_DISABLE 0x04000000 /* Cache disable */
  31. #define IDC_LDLCK 0x06000000 /* Load and lock */
  32. #define IDC_UNLINE 0x08000000 /* Unlock line */
  33. #define IDC_UNALL 0x0a000000 /* Unlock all */
  34. #define IDC_INVALL 0x0c000000 /* Invalidate all */
  35. #define DC_FLINE 0x0e000000 /* Flush data cache line */
  36. #define DC_SFWT 0x01000000 /* Set forced writethrough mode */
  37. #define DC_CFWT 0x03000000 /* Clear forced writethrough mode */
  38. #define DC_SLES 0x05000000 /* Set little endian swap mode */
  39. #define DC_CLES 0x07000000 /* Clear little endian swap mode */
  40. /* Status.
  41. */
  42. #define IDC_ENABLED 0x80000000 /* Cache is enabled */
  43. #define IDC_CERR1 0x00200000 /* Cache error 1 */
  44. #define IDC_CERR2 0x00100000 /* Cache error 2 */
  45. #define IDC_CERR3 0x00080000 /* Cache error 3 */
  46. #define DC_DFWT 0x40000000 /* Data cache is forced write through */
  47. #define DC_LES 0x20000000 /* Caches are little endian mode */
  48. #ifdef CONFIG_8xx_CPU6
  49. #define do_mtspr_cpu6(rn, rn_addr, v) \
  50. do { \
  51. int _reg_cpu6 = rn_addr, _tmp_cpu6; \
  52. asm volatile("stw %0, %1;" \
  53. "lwz %0, %1;" \
  54. "mtspr " __stringify(rn) ",%2" : \
  55. : "r" (_reg_cpu6), "m"(_tmp_cpu6), \
  56. "r" ((unsigned long)(v)) \
  57. : "memory"); \
  58. } while (0)
  59. #define do_mtspr(rn, v) asm volatile("mtspr " __stringify(rn) ",%0" : \
  60. : "r" ((unsigned long)(v)) \
  61. : "memory")
  62. #define mtspr(rn, v) \
  63. do { \
  64. if (rn == SPRN_IMMR) \
  65. do_mtspr_cpu6(rn, 0x3d30, v); \
  66. else if (rn == SPRN_IC_CST) \
  67. do_mtspr_cpu6(rn, 0x2110, v); \
  68. else if (rn == SPRN_IC_ADR) \
  69. do_mtspr_cpu6(rn, 0x2310, v); \
  70. else if (rn == SPRN_IC_DAT) \
  71. do_mtspr_cpu6(rn, 0x2510, v); \
  72. else if (rn == SPRN_DC_CST) \
  73. do_mtspr_cpu6(rn, 0x3110, v); \
  74. else if (rn == SPRN_DC_ADR) \
  75. do_mtspr_cpu6(rn, 0x3310, v); \
  76. else if (rn == SPRN_DC_DAT) \
  77. do_mtspr_cpu6(rn, 0x3510, v); \
  78. else if (rn == SPRN_MI_CTR) \
  79. do_mtspr_cpu6(rn, 0x2180, v); \
  80. else if (rn == SPRN_MI_AP) \
  81. do_mtspr_cpu6(rn, 0x2580, v); \
  82. else if (rn == SPRN_MI_EPN) \
  83. do_mtspr_cpu6(rn, 0x2780, v); \
  84. else if (rn == SPRN_MI_TWC) \
  85. do_mtspr_cpu6(rn, 0x2b80, v); \
  86. else if (rn == SPRN_MI_RPN) \
  87. do_mtspr_cpu6(rn, 0x2d80, v); \
  88. else if (rn == SPRN_MI_CAM) \
  89. do_mtspr_cpu6(rn, 0x2190, v); \
  90. else if (rn == SPRN_MI_RAM0) \
  91. do_mtspr_cpu6(rn, 0x2390, v); \
  92. else if (rn == SPRN_MI_RAM1) \
  93. do_mtspr_cpu6(rn, 0x2590, v); \
  94. else if (rn == SPRN_MD_CTR) \
  95. do_mtspr_cpu6(rn, 0x3180, v); \
  96. else if (rn == SPRN_M_CASID) \
  97. do_mtspr_cpu6(rn, 0x3380, v); \
  98. else if (rn == SPRN_MD_AP) \
  99. do_mtspr_cpu6(rn, 0x3580, v); \
  100. else if (rn == SPRN_MD_EPN) \
  101. do_mtspr_cpu6(rn, 0x3780, v); \
  102. else if (rn == SPRN_M_TWB) \
  103. do_mtspr_cpu6(rn, 0x3980, v); \
  104. else if (rn == SPRN_MD_TWC) \
  105. do_mtspr_cpu6(rn, 0x3b80, v); \
  106. else if (rn == SPRN_MD_RPN) \
  107. do_mtspr_cpu6(rn, 0x3d80, v); \
  108. else if (rn == SPRN_M_TW) \
  109. do_mtspr_cpu6(rn, 0x3f80, v); \
  110. else if (rn == SPRN_MD_CAM) \
  111. do_mtspr_cpu6(rn, 0x3190, v); \
  112. else if (rn == SPRN_MD_RAM0) \
  113. do_mtspr_cpu6(rn, 0x3390, v); \
  114. else if (rn == SPRN_MD_RAM1) \
  115. do_mtspr_cpu6(rn, 0x3590, v); \
  116. else if (rn == SPRN_DEC) \
  117. do_mtspr_cpu6(rn, 0x2c00, v); \
  118. else if (rn == SPRN_TBWL) \
  119. do_mtspr_cpu6(rn, 0x3880, v); \
  120. else if (rn == SPRN_TBWU) \
  121. do_mtspr_cpu6(rn, 0x3a80, v); \
  122. else if (rn == SPRN_DPDR) \
  123. do_mtspr_cpu6(rn, 0x2d30, v); \
  124. else \
  125. do_mtspr(rn, v); \
  126. } while (0)
  127. #endif
  128. #endif /* _ASM_POWERPC_REG_8xx_H */