cache-v4wb.S 6.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261
  1. /*
  2. * linux/arch/arm/mm/cache-v4wb.S
  3. *
  4. * Copyright (C) 1997-2002 Russell king
  5. *
  6. * This program is free software; you can redistribute it and/or modify
  7. * it under the terms of the GNU General Public License version 2 as
  8. * published by the Free Software Foundation.
  9. */
  10. #include <linux/linkage.h>
  11. #include <linux/init.h>
  12. #include <asm/memory.h>
  13. #include <asm/page.h>
  14. #include "proc-macros.S"
  15. /*
  16. * The size of one data cache line.
  17. */
  18. #define CACHE_DLINESIZE 32
  19. /*
  20. * The total size of the data cache.
  21. */
  22. #if defined(CONFIG_CPU_SA110)
  23. # define CACHE_DSIZE 16384
  24. #elif defined(CONFIG_CPU_SA1100)
  25. # define CACHE_DSIZE 8192
  26. #else
  27. # error Unknown cache size
  28. #endif
  29. /*
  30. * This is the size at which it becomes more efficient to
  31. * clean the whole cache, rather than using the individual
  32. * cache line maintenance instructions.
  33. *
  34. * Size Clean (ticks) Dirty (ticks)
  35. * 4096 21 20 21 53 55 54
  36. * 8192 40 41 40 106 100 102
  37. * 16384 77 77 76 140 140 138
  38. * 32768 150 149 150 214 216 212 <---
  39. * 65536 296 297 296 351 358 361
  40. * 131072 591 591 591 656 657 651
  41. * Whole 132 136 132 221 217 207 <---
  42. */
  43. #define CACHE_DLIMIT (CACHE_DSIZE * 4)
  44. .data
  45. flush_base:
  46. .long FLUSH_BASE
  47. .text
  48. /*
  49. * flush_icache_all()
  50. *
  51. * Unconditionally clean and invalidate the entire icache.
  52. */
  53. ENTRY(v4wb_flush_icache_all)
  54. mov r0, #0
  55. mcr p15, 0, r0, c7, c5, 0 @ invalidate I cache
  56. mov pc, lr
  57. ENDPROC(v4wb_flush_icache_all)
  58. /*
  59. * flush_user_cache_all()
  60. *
  61. * Clean and invalidate all cache entries in a particular address
  62. * space.
  63. */
  64. ENTRY(v4wb_flush_user_cache_all)
  65. /* FALLTHROUGH */
  66. /*
  67. * flush_kern_cache_all()
  68. *
  69. * Clean and invalidate the entire cache.
  70. */
  71. ENTRY(v4wb_flush_kern_cache_all)
  72. mov ip, #0
  73. mcr p15, 0, ip, c7, c5, 0 @ invalidate I cache
  74. __flush_whole_cache:
  75. ldr r3, =flush_base
  76. ldr r1, [r3, #0]
  77. eor r1, r1, #CACHE_DSIZE
  78. str r1, [r3, #0]
  79. add r2, r1, #CACHE_DSIZE
  80. 1: ldr r3, [r1], #32
  81. cmp r1, r2
  82. blo 1b
  83. #ifdef FLUSH_BASE_MINICACHE
  84. add r2, r2, #FLUSH_BASE_MINICACHE - FLUSH_BASE
  85. sub r1, r2, #512 @ only 512 bytes
  86. 1: ldr r3, [r1], #32
  87. cmp r1, r2
  88. blo 1b
  89. #endif
  90. mcr p15, 0, ip, c7, c10, 4 @ drain write buffer
  91. mov pc, lr
  92. /*
  93. * flush_user_cache_range(start, end, flags)
  94. *
  95. * Invalidate a range of cache entries in the specified
  96. * address space.
  97. *
  98. * - start - start address (inclusive, page aligned)
  99. * - end - end address (exclusive, page aligned)
  100. * - flags - vma_area_struct flags describing address space
  101. */
  102. ENTRY(v4wb_flush_user_cache_range)
  103. mov ip, #0
  104. sub r3, r1, r0 @ calculate total size
  105. tst r2, #VM_EXEC @ executable region?
  106. mcrne p15, 0, ip, c7, c5, 0 @ invalidate I cache
  107. cmp r3, #CACHE_DLIMIT @ total size >= limit?
  108. bhs __flush_whole_cache @ flush whole D cache
  109. 1: mcr p15, 0, r0, c7, c10, 1 @ clean D entry
  110. mcr p15, 0, r0, c7, c6, 1 @ invalidate D entry
  111. add r0, r0, #CACHE_DLINESIZE
  112. cmp r0, r1
  113. blo 1b
  114. tst r2, #VM_EXEC
  115. mcrne p15, 0, ip, c7, c10, 4 @ drain write buffer
  116. mov pc, lr
  117. /*
  118. * flush_kern_dcache_area(void *addr, size_t size)
  119. *
  120. * Ensure no D cache aliasing occurs, either with itself or
  121. * the I cache
  122. *
  123. * - addr - kernel address
  124. * - size - region size
  125. */
  126. ENTRY(v4wb_flush_kern_dcache_area)
  127. add r1, r0, r1
  128. /* fall through */
  129. /*
  130. * coherent_kern_range(start, end)
  131. *
  132. * Ensure coherency between the Icache and the Dcache in the
  133. * region described by start. If you have non-snooping
  134. * Harvard caches, you need to implement this function.
  135. *
  136. * - start - virtual start address
  137. * - end - virtual end address
  138. */
  139. ENTRY(v4wb_coherent_kern_range)
  140. /* fall through */
  141. /*
  142. * coherent_user_range(start, end)
  143. *
  144. * Ensure coherency between the Icache and the Dcache in the
  145. * region described by start. If you have non-snooping
  146. * Harvard caches, you need to implement this function.
  147. *
  148. * - start - virtual start address
  149. * - end - virtual end address
  150. */
  151. ENTRY(v4wb_coherent_user_range)
  152. bic r0, r0, #CACHE_DLINESIZE - 1
  153. 1: mcr p15, 0, r0, c7, c10, 1 @ clean D entry
  154. mcr p15, 0, r0, c7, c6, 1 @ invalidate D entry
  155. add r0, r0, #CACHE_DLINESIZE
  156. cmp r0, r1
  157. blo 1b
  158. mov ip, #0
  159. mcr p15, 0, ip, c7, c5, 0 @ invalidate I cache
  160. mcr p15, 0, ip, c7, c10, 4 @ drain WB
  161. mov pc, lr
  162. /*
  163. * dma_inv_range(start, end)
  164. *
  165. * Invalidate (discard) the specified virtual address range.
  166. * May not write back any entries. If 'start' or 'end'
  167. * are not cache line aligned, those lines must be written
  168. * back.
  169. *
  170. * - start - virtual start address
  171. * - end - virtual end address
  172. */
  173. v4wb_dma_inv_range:
  174. tst r0, #CACHE_DLINESIZE - 1
  175. bic r0, r0, #CACHE_DLINESIZE - 1
  176. mcrne p15, 0, r0, c7, c10, 1 @ clean D entry
  177. tst r1, #CACHE_DLINESIZE - 1
  178. mcrne p15, 0, r1, c7, c10, 1 @ clean D entry
  179. 1: mcr p15, 0, r0, c7, c6, 1 @ invalidate D entry
  180. add r0, r0, #CACHE_DLINESIZE
  181. cmp r0, r1
  182. blo 1b
  183. mcr p15, 0, r0, c7, c10, 4 @ drain write buffer
  184. mov pc, lr
  185. /*
  186. * dma_clean_range(start, end)
  187. *
  188. * Clean (write back) the specified virtual address range.
  189. *
  190. * - start - virtual start address
  191. * - end - virtual end address
  192. */
  193. v4wb_dma_clean_range:
  194. bic r0, r0, #CACHE_DLINESIZE - 1
  195. 1: mcr p15, 0, r0, c7, c10, 1 @ clean D entry
  196. add r0, r0, #CACHE_DLINESIZE
  197. cmp r0, r1
  198. blo 1b
  199. mcr p15, 0, r0, c7, c10, 4 @ drain write buffer
  200. mov pc, lr
  201. /*
  202. * dma_flush_range(start, end)
  203. *
  204. * Clean and invalidate the specified virtual address range.
  205. *
  206. * - start - virtual start address
  207. * - end - virtual end address
  208. *
  209. * This is actually the same as v4wb_coherent_kern_range()
  210. */
  211. .globl v4wb_dma_flush_range
  212. .set v4wb_dma_flush_range, v4wb_coherent_kern_range
  213. /*
  214. * dma_map_area(start, size, dir)
  215. * - start - kernel virtual start address
  216. * - size - size of region
  217. * - dir - DMA direction
  218. */
  219. ENTRY(v4wb_dma_map_area)
  220. add r1, r1, r0
  221. cmp r2, #DMA_TO_DEVICE
  222. beq v4wb_dma_clean_range
  223. bcs v4wb_dma_inv_range
  224. b v4wb_dma_flush_range
  225. ENDPROC(v4wb_dma_map_area)
  226. /*
  227. * dma_unmap_area(start, size, dir)
  228. * - start - kernel virtual start address
  229. * - size - size of region
  230. * - dir - DMA direction
  231. */
  232. ENTRY(v4wb_dma_unmap_area)
  233. mov pc, lr
  234. ENDPROC(v4wb_dma_unmap_area)
  235. .globl v4wb_flush_kern_cache_louis
  236. .equ v4wb_flush_kern_cache_louis, v4wb_flush_kern_cache_all
  237. __INITDATA
  238. @ define struct cpu_cache_fns (see <asm/cacheflush.h> and proc-macros.S)
  239. define_cache_functions v4wb