cp15.h 5.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195
  1. #ifndef __ASM_ARM_CP15_H
  2. #define __ASM_ARM_CP15_H
  3. #include <asm/barrier.h>
  4. /*
  5. * CR1 bits (CP#15 CR1)
  6. */
  7. #define CR_M (1 << 0) /* MMU enable */
  8. #define CR_A (1 << 1) /* Alignment abort enable */
  9. #define CR_C (1 << 2) /* Dcache enable */
  10. #define CR_W (1 << 3) /* Write buffer enable */
  11. #define CR_P (1 << 4) /* 32-bit exception handler */
  12. #define CR_D (1 << 5) /* 32-bit data address range */
  13. #define CR_L (1 << 6) /* Implementation defined */
  14. #define CR_B (1 << 7) /* Big endian */
  15. #define CR_S (1 << 8) /* System MMU protection */
  16. #define CR_R (1 << 9) /* ROM MMU protection */
  17. #define CR_F (1 << 10) /* Implementation defined */
  18. #define CR_Z (1 << 11) /* Implementation defined */
  19. #define CR_I (1 << 12) /* Icache enable */
  20. #define CR_V (1 << 13) /* Vectors relocated to 0xffff0000 */
  21. #define CR_RR (1 << 14) /* Round Robin cache replacement */
  22. #define CR_L4 (1 << 15) /* LDR pc can set T bit */
  23. #define CR_DT (1 << 16)
  24. #define CR_IT (1 << 18)
  25. #define CR_ST (1 << 19)
  26. #define CR_FI (1 << 21) /* Fast interrupt (lower latency mode) */
  27. #define CR_U (1 << 22) /* Unaligned access operation */
  28. #define CR_XP (1 << 23) /* Extended page tables */
  29. #define CR_VE (1 << 24) /* Vectored interrupts */
  30. #define CR_EE (1 << 25) /* Exception (Big) Endian */
  31. #define CR_TRE (1 << 28) /* TEX remap enable */
  32. #define CR_AFE (1 << 29) /* Access flag enable */
  33. #define CR_TE (1 << 30) /* Thumb exception enable */
  34. #ifndef __ASSEMBLY__
  35. #if __LINUX_ARM_ARCH__ >= 4
  36. #define vectors_high() (cr_alignment & CR_V)
  37. #else
  38. #define vectors_high() (0)
  39. #endif
  40. extern unsigned long cr_no_alignment; /* defined in entry-armv.S */
  41. extern unsigned long cr_alignment; /* defined in entry-armv.S */
  42. static inline unsigned int get_cr(void)
  43. {
  44. unsigned int val;
  45. asm("mrc p15, 0, %0, c1, c0, 0 @ get CR" : "=r" (val) : : "cc");
  46. return val;
  47. }
  48. #ifdef CONFIG_TIMA_RKP
  49. void tima_dump_log2(void);
  50. void tima_verify_state(unsigned long pmdp, unsigned long val, unsigned long rd_only, unsigned long caller);
  51. int tima_is_pg_protected(unsigned long va);
  52. #define BUILD_CMD_ID(cmdid) ((0x3f8<<20)|(cmdid <<12)|0x221)
  53. static inline void tima_send_cmd (unsigned int r2val, unsigned int cmdid)
  54. {
  55. volatile unsigned int tima_cmdid = BUILD_CMD_ID(cmdid);
  56. asm volatile (
  57. #if __GNUC__ >= 4 && __GNUC_MINOR__ >= 6
  58. ".arch_extension sec\n"
  59. #endif
  60. "stmfd sp!, {r0-r3, r11}\n"
  61. "mov r11, r0\n"
  62. "mov r2, %0\n"
  63. "mov r0, %1\n"
  64. "smc #1\n"
  65. "ldmfd sp!, {r0-r3, r11}" : : "r" (r2val), "r" (tima_cmdid) : "r0","r2","cc");
  66. }
  67. static inline void tima_send_cmd2 (unsigned int p1, unsigned int p2, unsigned int cmdid)
  68. {
  69. volatile unsigned int tima_cmdid = BUILD_CMD_ID(cmdid);
  70. asm volatile (
  71. #if __GNUC__ >= 4 && __GNUC_MINOR__ >= 6
  72. ".arch_extension sec\n"
  73. #endif
  74. "stmfd sp!, {r0-r3, r11}\n"
  75. "mov r11, r0\n"
  76. "mov r2, %0\n"
  77. "mov r3, %1\n"
  78. "mov r0, %2\n"
  79. "smc #1\n"
  80. "ldmfd sp!, {r0-r3, r11}" : : "r" (p1), "r" (p2), "r" (tima_cmdid) : "r0","r2","r3","cc");
  81. }
  82. static inline void tima_send_cmd3 (unsigned int p1, unsigned int p2, unsigned int p3, unsigned int cmdid)
  83. {
  84. volatile unsigned int tima_cmdid = BUILD_CMD_ID(cmdid);
  85. asm volatile (
  86. #if __GNUC__ >= 4 && __GNUC_MINOR__ >= 6
  87. ".arch_extension sec\n"
  88. #endif
  89. "stmfd sp!, {r0-r4, r11}\n"
  90. "mov r11, r0\n"
  91. "mov r2, %0\n"
  92. "mov r3, %1\n"
  93. "mov r4, %2\n"
  94. "mov r0, %3\n"
  95. "smc #1\n"
  96. "ldmfd sp!, {r0-r4, r11}" : : "r" (p1), "r" (p2), "r" (p3), "r" (tima_cmdid) : "r0","r2","r3","r4","cc");
  97. }
  98. static inline void tima_send_cmd4 (unsigned int p1, unsigned int p2, unsigned int p3, unsigned int p4, unsigned int cmdid)
  99. {
  100. volatile unsigned int tima_cmdid = BUILD_CMD_ID(cmdid);
  101. asm volatile (
  102. #if __GNUC__ >= 4 && __GNUC_MINOR__ >= 6
  103. ".arch_extension sec\n"
  104. #endif
  105. "stmfd sp!, {r0-r5, r11}\n"
  106. "mov r11, r0\n"
  107. "mov r2, %0\n"
  108. "mov r3, %1\n"
  109. "mov r4, %2\n"
  110. "mov r5, %3\n"
  111. "mov r0, %4\n"
  112. "smc #1\n"
  113. "ldmfd sp!, {r0-r5, r11}" : : "r" (p1), "r" (p2), "r" (p3), "r" (p4), "r" (tima_cmdid) : "r0","r2","r3","r4","r5","cc");
  114. }
  115. static inline void tima_send_cmd5 (unsigned int p1, unsigned int p2, unsigned int p3, unsigned int p4, unsigned int p5,unsigned int cmdid)
  116. {
  117. volatile unsigned int tima_cmdid = BUILD_CMD_ID(cmdid);
  118. asm volatile (
  119. #if __GNUC__ >= 4 && __GNUC_MINOR__ >= 6
  120. ".arch_extension sec\n"
  121. #endif
  122. "stmfd sp!, {r0-r6, r11}\n"
  123. "mov r11, r0\n"
  124. "mov r2, %0\n"
  125. "mov r3, %1\n"
  126. "mov r4, %2\n"
  127. "mov r5, %3\n"
  128. "mov r6, %4\n"
  129. "mov r0, %5\n"
  130. "smc #1\n"
  131. "ldmfd sp!, {r0-r6, r11}" : : "r" (p1), "r" (p2), "r" (p3), "r" (p4), "r" (p5),"r" (tima_cmdid) : "r0","r2","r3","r4","r5","r6","cc");
  132. }
  133. #define tima_cache_flush(x) \
  134. __asm__ __volatile__( "mcr p15, 0, %0, c7, c14, 1\n" \
  135. "dsb\n" \
  136. "isb\n" \
  137. : : "r" (x))
  138. #define tima_cache_inval(x) \
  139. __asm__ __volatile__( "mcr p15, 0, %0, c7, c6, 1\n" \
  140. "dsb\n" \
  141. "isb\n" \
  142. : : "r" (x))
  143. #define tima_tlb_inval_is(x) \
  144. __asm__ __volatile__( "mcr p15, 0, %0, c8, c3, 0\n" \
  145. "dsb\n" \
  146. "isb\n" \
  147. : : "r" (x))
  148. #endif /* CONFIG_TIMA_RKP */
  149. static inline void set_cr(unsigned int val)
  150. {
  151. asm volatile("mcr p15, 0, %0, c1, c0, 0 @ set CR"
  152. : : "r" (val) : "cc");
  153. isb();
  154. }
  155. #ifndef CONFIG_SMP
  156. extern void adjust_cr(unsigned long mask, unsigned long set);
  157. #endif
  158. #define CPACC_FULL(n) (3 << (n * 2))
  159. #define CPACC_SVC(n) (1 << (n * 2))
  160. #define CPACC_DISABLE(n) (0 << (n * 2))
  161. static inline unsigned int get_copro_access(void)
  162. {
  163. unsigned int val;
  164. asm("mrc p15, 0, %0, c1, c0, 2 @ get copro access"
  165. : "=r" (val) : : "cc");
  166. return val;
  167. }
  168. static inline void set_copro_access(unsigned int val)
  169. {
  170. asm volatile("mcr p15, 0, %0, c1, c0, 2 @ set copro access"
  171. : : "r" (val) : "cc");
  172. isb();
  173. }
  174. #endif
  175. #endif