cache.S 4.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201
  1. /*
  2. * Cache maintenance
  3. *
  4. * Copyright (C) 2001 Deep Blue Solutions Ltd.
  5. * Copyright (C) 2012 ARM Ltd.
  6. *
  7. * This program is free software; you can redistribute it and/or modify
  8. * it under the terms of the GNU General Public License version 2 as
  9. * published by the Free Software Foundation.
  10. *
  11. * This program is distributed in the hope that it will be useful,
  12. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  13. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  14. * GNU General Public License for more details.
  15. *
  16. * You should have received a copy of the GNU General Public License
  17. * along with this program. If not, see <http://www.gnu.org/licenses/>.
  18. */
  19. #include <linux/errno.h>
  20. #include <linux/linkage.h>
  21. #include <linux/init.h>
  22. #include <asm/assembler.h>
  23. #include <asm/cpufeature.h>
  24. #include <asm/alternative.h>
  25. /*
  26. * flush_icache_range(start,end)
  27. *
  28. * Ensure that the I and D caches are coherent within specified region.
  29. * This is typically used when code has been written to a memory region,
  30. * and will be executed.
  31. *
  32. * - start - virtual start address of region
  33. * - end - virtual end address of region
  34. */
  35. ENTRY(flush_icache_range)
  36. /* FALLTHROUGH */
  37. /*
  38. * __flush_cache_user_range(start,end)
  39. *
  40. * Ensure that the I and D caches are coherent within specified region.
  41. * This is typically used when code has been written to a memory region,
  42. * and will be executed.
  43. *
  44. * - start - virtual start address of region
  45. * - end - virtual end address of region
  46. */
  47. ENTRY(__flush_cache_user_range)
  48. dcache_line_size x2, x3
  49. sub x3, x2, #1
  50. bic x4, x0, x3
  51. 1:
  52. user_alt 9f, "dc cvau, x4", "dc civac, x4", ARM64_WORKAROUND_CLEAN_CACHE
  53. add x4, x4, x2
  54. cmp x4, x1
  55. b.lo 1b
  56. dsb ish
  57. icache_line_size x2, x3
  58. sub x3, x2, #1
  59. bic x4, x0, x3
  60. 1:
  61. USER(9f, ic ivau, x4 ) // invalidate I line PoU
  62. add x4, x4, x2
  63. cmp x4, x1
  64. b.lo 1b
  65. dsb ish
  66. isb
  67. mov x0, #0
  68. ret
  69. 9:
  70. mov x0, #-EFAULT
  71. ret
  72. ENDPROC(flush_icache_range)
  73. ENDPROC(__flush_cache_user_range)
  74. /*
  75. * __flush_dcache_area(kaddr, size)
  76. *
  77. * Ensure that any D-cache lines for the interval [kaddr, kaddr+size)
  78. * are cleaned and invalidated to the PoC.
  79. *
  80. * - kaddr - kernel address
  81. * - size - size in question
  82. */
  83. ENTRY(__flush_dcache_area)
  84. dcache_by_line_op civac, sy, x0, x1, x2, x3
  85. ret
  86. ENDPIPROC(__flush_dcache_area)
  87. /*
  88. * __clean_dcache_area_pou(kaddr, size)
  89. *
  90. * Ensure that any D-cache lines for the interval [kaddr, kaddr+size)
  91. * are cleaned to the PoU.
  92. *
  93. * - kaddr - kernel address
  94. * - size - size in question
  95. */
  96. ENTRY(__clean_dcache_area_pou)
  97. dcache_by_line_op cvau, ish, x0, x1, x2, x3
  98. ret
  99. ENDPROC(__clean_dcache_area_pou)
  100. /*
  101. * __dma_inv_area(start, size)
  102. * - start - virtual start address of region
  103. * - size - size in question
  104. */
  105. __dma_inv_area:
  106. add x1, x1, x0
  107. /* FALLTHROUGH */
  108. /*
  109. * __inval_cache_range(start, end)
  110. * - start - start address of region
  111. * - end - end address of region
  112. */
  113. ENTRY(__inval_cache_range)
  114. dcache_line_size x2, x3
  115. sub x3, x2, #1
  116. tst x1, x3 // end cache line aligned?
  117. bic x1, x1, x3
  118. b.eq 1f
  119. dc civac, x1 // clean & invalidate D / U line
  120. 1: tst x0, x3 // start cache line aligned?
  121. bic x0, x0, x3
  122. b.eq 2f
  123. dc civac, x0 // clean & invalidate D / U line
  124. b 3f
  125. 2: dc ivac, x0 // invalidate D / U line
  126. 3: add x0, x0, x2
  127. cmp x0, x1
  128. b.lo 2b
  129. dsb sy
  130. ret
  131. ENDPIPROC(__inval_cache_range)
  132. ENDPROC(__dma_inv_area)
  133. /*
  134. * __clean_dcache_area_poc(kaddr, size)
  135. *
  136. * Ensure that any D-cache lines for the interval [kaddr, kaddr+size)
  137. * are cleaned to the PoC.
  138. *
  139. * - kaddr - kernel address
  140. * - size - size in question
  141. */
  142. ENTRY(__clean_dcache_area_poc)
  143. /* FALLTHROUGH */
  144. /*
  145. * __dma_clean_area(start, size)
  146. * - start - virtual start address of region
  147. * - size - size in question
  148. */
  149. __dma_clean_area:
  150. dcache_by_line_op cvac, sy, x0, x1, x2, x3
  151. ret
  152. ENDPIPROC(__clean_dcache_area_poc)
  153. ENDPROC(__dma_clean_area)
  154. /*
  155. * __dma_flush_area(start, size)
  156. *
  157. * clean & invalidate D / U line
  158. *
  159. * - start - virtual start address of region
  160. * - size - size in question
  161. */
  162. ENTRY(__dma_flush_area)
  163. dcache_by_line_op civac, sy, x0, x1, x2, x3
  164. ret
  165. ENDPIPROC(__dma_flush_area)
  166. /*
  167. * __dma_map_area(start, size, dir)
  168. * - start - kernel virtual start address
  169. * - size - size of region
  170. * - dir - DMA direction
  171. */
  172. ENTRY(__dma_map_area)
  173. cmp w2, #DMA_FROM_DEVICE
  174. b.eq __dma_inv_area
  175. b __dma_clean_area
  176. ENDPIPROC(__dma_map_area)
  177. /*
  178. * __dma_unmap_area(start, size, dir)
  179. * - start - kernel virtual start address
  180. * - size - size of region
  181. * - dir - DMA direction
  182. */
  183. ENTRY(__dma_unmap_area)
  184. cmp w2, #DMA_TO_DEVICE
  185. b.ne __dma_inv_area
  186. ret
  187. ENDPIPROC(__dma_unmap_area)