cache.h 2.2 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394
  1. #ifndef _ASM_POWERPC_CACHE_H
  2. #define _ASM_POWERPC_CACHE_H
  3. #ifdef __KERNEL__
  4. /* bytes per L1 cache line */
  5. #if defined(CONFIG_8xx) || defined(CONFIG_403GCX)
  6. #define L1_CACHE_SHIFT 4
  7. #define MAX_COPY_PREFETCH 1
  8. #elif defined(CONFIG_PPC_E500MC)
  9. #define L1_CACHE_SHIFT 6
  10. #define MAX_COPY_PREFETCH 4
  11. #elif defined(CONFIG_PPC32)
  12. #define MAX_COPY_PREFETCH 4
  13. #if defined(CONFIG_PPC_47x)
  14. #define L1_CACHE_SHIFT 7
  15. #else
  16. #define L1_CACHE_SHIFT 5
  17. #endif
  18. #else /* CONFIG_PPC64 */
  19. #define L1_CACHE_SHIFT 7
  20. #endif
  21. #define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT)
  22. #define SMP_CACHE_BYTES L1_CACHE_BYTES
  23. #if defined(__powerpc64__) && !defined(__ASSEMBLY__)
  24. struct ppc64_caches {
  25. u32 dsize; /* L1 d-cache size */
  26. u32 dline_size; /* L1 d-cache line size */
  27. u32 log_dline_size;
  28. u32 dlines_per_page;
  29. u32 isize; /* L1 i-cache size */
  30. u32 iline_size; /* L1 i-cache line size */
  31. u32 log_iline_size;
  32. u32 ilines_per_page;
  33. };
  34. extern struct ppc64_caches ppc64_caches;
  35. #endif /* __powerpc64__ && ! __ASSEMBLY__ */
  36. #if defined(__ASSEMBLY__)
  37. /*
  38. * For a snooping icache, we still need a dummy icbi to purge all the
  39. * prefetched instructions from the ifetch buffers. We also need a sync
  40. * before the icbi to order the the actual stores to memory that might
  41. * have modified instructions with the icbi.
  42. */
  43. #define PURGE_PREFETCHED_INS \
  44. sync; \
  45. icbi 0,r3; \
  46. sync; \
  47. isync
  48. #else
  49. #define __read_mostly __attribute__((__section__(".data..read_mostly")))
  50. #ifdef CONFIG_6xx
  51. extern long _get_L2CR(void);
  52. extern long _get_L3CR(void);
  53. extern void _set_L2CR(unsigned long);
  54. extern void _set_L3CR(unsigned long);
  55. #else
  56. #define _get_L2CR() 0L
  57. #define _get_L3CR() 0L
  58. #define _set_L2CR(val) do { } while(0)
  59. #define _set_L3CR(val) do { } while(0)
  60. #endif
  61. static inline void dcbz(void *addr)
  62. {
  63. __asm__ __volatile__ ("dcbz 0, %0" : : "r"(addr) : "memory");
  64. }
  65. static inline void dcbi(void *addr)
  66. {
  67. __asm__ __volatile__ ("dcbi 0, %0" : : "r"(addr) : "memory");
  68. }
  69. static inline void dcbf(void *addr)
  70. {
  71. __asm__ __volatile__ ("dcbf 0, %0" : : "r"(addr) : "memory");
  72. }
  73. static inline void dcbst(void *addr)
  74. {
  75. __asm__ __volatile__ ("dcbst 0, %0" : : "r"(addr) : "memory");
  76. }
  77. #endif /* !__ASSEMBLY__ */
  78. #endif /* __KERNEL__ */
  79. #endif /* _ASM_POWERPC_CACHE_H */