cache-v4wb.S 6.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262
  1. /*
  2. * linux/arch/arm/mm/cache-v4wb.S
  3. *
  4. * Copyright (C) 1997-2002 Russell king
  5. *
  6. * This program is free software; you can redistribute it and/or modify
  7. * it under the terms of the GNU General Public License version 2 as
  8. * published by the Free Software Foundation.
  9. */
  10. #include <linux/linkage.h>
  11. #include <linux/init.h>
  12. #include <asm/assembler.h>
  13. #include <asm/memory.h>
  14. #include <asm/page.h>
  15. #include "proc-macros.S"
  16. /*
  17. * The size of one data cache line.
  18. */
  19. #define CACHE_DLINESIZE 32
  20. /*
  21. * The total size of the data cache.
  22. */
  23. #if defined(CONFIG_CPU_SA110)
  24. # define CACHE_DSIZE 16384
  25. #elif defined(CONFIG_CPU_SA1100)
  26. # define CACHE_DSIZE 8192
  27. #else
  28. # error Unknown cache size
  29. #endif
  30. /*
  31. * This is the size at which it becomes more efficient to
  32. * clean the whole cache, rather than using the individual
  33. * cache line maintenance instructions.
  34. *
  35. * Size Clean (ticks) Dirty (ticks)
  36. * 4096 21 20 21 53 55 54
  37. * 8192 40 41 40 106 100 102
  38. * 16384 77 77 76 140 140 138
  39. * 32768 150 149 150 214 216 212 <---
  40. * 65536 296 297 296 351 358 361
  41. * 131072 591 591 591 656 657 651
  42. * Whole 132 136 132 221 217 207 <---
  43. */
  44. #define CACHE_DLIMIT (CACHE_DSIZE * 4)
  45. .data
  46. flush_base:
  47. .long FLUSH_BASE
  48. .text
  49. /*
  50. * flush_icache_all()
  51. *
  52. * Unconditionally clean and invalidate the entire icache.
  53. */
  54. ENTRY(v4wb_flush_icache_all)
  55. mov r0, #0
  56. mcr p15, 0, r0, c7, c5, 0 @ invalidate I cache
  57. ret lr
  58. ENDPROC(v4wb_flush_icache_all)
  59. /*
  60. * flush_user_cache_all()
  61. *
  62. * Clean and invalidate all cache entries in a particular address
  63. * space.
  64. */
  65. ENTRY(v4wb_flush_user_cache_all)
  66. /* FALLTHROUGH */
  67. /*
  68. * flush_kern_cache_all()
  69. *
  70. * Clean and invalidate the entire cache.
  71. */
  72. ENTRY(v4wb_flush_kern_cache_all)
  73. mov ip, #0
  74. mcr p15, 0, ip, c7, c5, 0 @ invalidate I cache
  75. __flush_whole_cache:
  76. ldr r3, =flush_base
  77. ldr r1, [r3, #0]
  78. eor r1, r1, #CACHE_DSIZE
  79. str r1, [r3, #0]
  80. add r2, r1, #CACHE_DSIZE
  81. 1: ldr r3, [r1], #32
  82. cmp r1, r2
  83. blo 1b
  84. #ifdef FLUSH_BASE_MINICACHE
  85. add r2, r2, #FLUSH_BASE_MINICACHE - FLUSH_BASE
  86. sub r1, r2, #512 @ only 512 bytes
  87. 1: ldr r3, [r1], #32
  88. cmp r1, r2
  89. blo 1b
  90. #endif
  91. mcr p15, 0, ip, c7, c10, 4 @ drain write buffer
  92. ret lr
  93. /*
  94. * flush_user_cache_range(start, end, flags)
  95. *
  96. * Invalidate a range of cache entries in the specified
  97. * address space.
  98. *
  99. * - start - start address (inclusive, page aligned)
  100. * - end - end address (exclusive, page aligned)
  101. * - flags - vma_area_struct flags describing address space
  102. */
  103. ENTRY(v4wb_flush_user_cache_range)
  104. mov ip, #0
  105. sub r3, r1, r0 @ calculate total size
  106. tst r2, #VM_EXEC @ executable region?
  107. mcrne p15, 0, ip, c7, c5, 0 @ invalidate I cache
  108. cmp r3, #CACHE_DLIMIT @ total size >= limit?
  109. bhs __flush_whole_cache @ flush whole D cache
  110. 1: mcr p15, 0, r0, c7, c10, 1 @ clean D entry
  111. mcr p15, 0, r0, c7, c6, 1 @ invalidate D entry
  112. add r0, r0, #CACHE_DLINESIZE
  113. cmp r0, r1
  114. blo 1b
  115. tst r2, #VM_EXEC
  116. mcrne p15, 0, ip, c7, c10, 4 @ drain write buffer
  117. ret lr
  118. /*
  119. * flush_kern_dcache_area(void *addr, size_t size)
  120. *
  121. * Ensure no D cache aliasing occurs, either with itself or
  122. * the I cache
  123. *
  124. * - addr - kernel address
  125. * - size - region size
  126. */
  127. ENTRY(v4wb_flush_kern_dcache_area)
  128. add r1, r0, r1
  129. /* fall through */
  130. /*
  131. * coherent_kern_range(start, end)
  132. *
  133. * Ensure coherency between the Icache and the Dcache in the
  134. * region described by start. If you have non-snooping
  135. * Harvard caches, you need to implement this function.
  136. *
  137. * - start - virtual start address
  138. * - end - virtual end address
  139. */
  140. ENTRY(v4wb_coherent_kern_range)
  141. /* fall through */
  142. /*
  143. * coherent_user_range(start, end)
  144. *
  145. * Ensure coherency between the Icache and the Dcache in the
  146. * region described by start. If you have non-snooping
  147. * Harvard caches, you need to implement this function.
  148. *
  149. * - start - virtual start address
  150. * - end - virtual end address
  151. */
  152. ENTRY(v4wb_coherent_user_range)
  153. bic r0, r0, #CACHE_DLINESIZE - 1
  154. 1: mcr p15, 0, r0, c7, c10, 1 @ clean D entry
  155. mcr p15, 0, r0, c7, c6, 1 @ invalidate D entry
  156. add r0, r0, #CACHE_DLINESIZE
  157. cmp r0, r1
  158. blo 1b
  159. mov r0, #0
  160. mcr p15, 0, r0, c7, c5, 0 @ invalidate I cache
  161. mcr p15, 0, r0, c7, c10, 4 @ drain WB
  162. ret lr
  163. /*
  164. * dma_inv_range(start, end)
  165. *
  166. * Invalidate (discard) the specified virtual address range.
  167. * May not write back any entries. If 'start' or 'end'
  168. * are not cache line aligned, those lines must be written
  169. * back.
  170. *
  171. * - start - virtual start address
  172. * - end - virtual end address
  173. */
  174. v4wb_dma_inv_range:
  175. tst r0, #CACHE_DLINESIZE - 1
  176. bic r0, r0, #CACHE_DLINESIZE - 1
  177. mcrne p15, 0, r0, c7, c10, 1 @ clean D entry
  178. tst r1, #CACHE_DLINESIZE - 1
  179. mcrne p15, 0, r1, c7, c10, 1 @ clean D entry
  180. 1: mcr p15, 0, r0, c7, c6, 1 @ invalidate D entry
  181. add r0, r0, #CACHE_DLINESIZE
  182. cmp r0, r1
  183. blo 1b
  184. mcr p15, 0, r0, c7, c10, 4 @ drain write buffer
  185. ret lr
  186. /*
  187. * dma_clean_range(start, end)
  188. *
  189. * Clean (write back) the specified virtual address range.
  190. *
  191. * - start - virtual start address
  192. * - end - virtual end address
  193. */
  194. v4wb_dma_clean_range:
  195. bic r0, r0, #CACHE_DLINESIZE - 1
  196. 1: mcr p15, 0, r0, c7, c10, 1 @ clean D entry
  197. add r0, r0, #CACHE_DLINESIZE
  198. cmp r0, r1
  199. blo 1b
  200. mcr p15, 0, r0, c7, c10, 4 @ drain write buffer
  201. ret lr
  202. /*
  203. * dma_flush_range(start, end)
  204. *
  205. * Clean and invalidate the specified virtual address range.
  206. *
  207. * - start - virtual start address
  208. * - end - virtual end address
  209. *
  210. * This is actually the same as v4wb_coherent_kern_range()
  211. */
  212. .globl v4wb_dma_flush_range
  213. .set v4wb_dma_flush_range, v4wb_coherent_kern_range
  214. /*
  215. * dma_map_area(start, size, dir)
  216. * - start - kernel virtual start address
  217. * - size - size of region
  218. * - dir - DMA direction
  219. */
  220. ENTRY(v4wb_dma_map_area)
  221. add r1, r1, r0
  222. cmp r2, #DMA_TO_DEVICE
  223. beq v4wb_dma_clean_range
  224. bcs v4wb_dma_inv_range
  225. b v4wb_dma_flush_range
  226. ENDPROC(v4wb_dma_map_area)
  227. /*
  228. * dma_unmap_area(start, size, dir)
  229. * - start - kernel virtual start address
  230. * - size - size of region
  231. * - dir - DMA direction
  232. */
  233. ENTRY(v4wb_dma_unmap_area)
  234. ret lr
  235. ENDPROC(v4wb_dma_unmap_area)
  236. .globl v4wb_flush_kern_cache_louis
  237. .equ v4wb_flush_kern_cache_louis, v4wb_flush_kern_cache_all
  238. __INITDATA
  239. @ define struct cpu_cache_fns (see <asm/cacheflush.h> and proc-macros.S)
  240. define_cache_functions v4wb