system.h 5.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185
  1. #ifndef __ASM_SH_SYSTEM_H
  2. #define __ASM_SH_SYSTEM_H
  3. /*
  4. * Copyright (C) 1999, 2000 Niibe Yutaka & Kaz Kojima
  5. * Copyright (C) 2002 Paul Mundt
  6. */
  7. #include <linux/irqflags.h>
  8. #include <linux/compiler.h>
  9. #include <linux/linkage.h>
  10. #include <asm/types.h>
  11. #include <asm/uncached.h>
  12. #define AT_VECTOR_SIZE_ARCH 5 /* entries in ARCH_DLINFO */
  13. /*
  14. * A brief note on ctrl_barrier(), the control register write barrier.
  15. *
  16. * Legacy SH cores typically require a sequence of 8 nops after
  17. * modification of a control register in order for the changes to take
  18. * effect. On newer cores (like the sh4a and sh5) this is accomplished
  19. * with icbi.
  20. *
  21. * Also note that on sh4a in the icbi case we can forego a synco for the
  22. * write barrier, as it's not necessary for control registers.
  23. *
  24. * Historically we have only done this type of barrier for the MMUCR, but
  25. * it's also necessary for the CCR, so we make it generic here instead.
  26. */
  27. #if defined(CONFIG_CPU_SH4A) || defined(CONFIG_CPU_SH5)
  28. #define mb() __asm__ __volatile__ ("synco": : :"memory")
  29. #define rmb() mb()
  30. #define wmb() __asm__ __volatile__ ("synco": : :"memory")
  31. #define ctrl_barrier() __icbi(PAGE_OFFSET)
  32. #define read_barrier_depends() do { } while(0)
  33. #else
  34. #define mb() __asm__ __volatile__ ("": : :"memory")
  35. #define rmb() mb()
  36. #define wmb() __asm__ __volatile__ ("": : :"memory")
  37. #define ctrl_barrier() __asm__ __volatile__ ("nop;nop;nop;nop;nop;nop;nop;nop")
  38. #define read_barrier_depends() do { } while(0)
  39. #endif
  40. #ifdef CONFIG_SMP
  41. #define smp_mb() mb()
  42. #define smp_rmb() rmb()
  43. #define smp_wmb() wmb()
  44. #define smp_read_barrier_depends() read_barrier_depends()
  45. #else
  46. #define smp_mb() barrier()
  47. #define smp_rmb() barrier()
  48. #define smp_wmb() barrier()
  49. #define smp_read_barrier_depends() do { } while(0)
  50. #endif
  51. #define set_mb(var, value) do { (void)xchg(&var, value); } while (0)
  52. #ifdef CONFIG_GUSA_RB
  53. #include <asm/cmpxchg-grb.h>
  54. #elif defined(CONFIG_CPU_SH4A)
  55. #include <asm/cmpxchg-llsc.h>
  56. #else
  57. #include <asm/cmpxchg-irq.h>
  58. #endif
  59. extern void __xchg_called_with_bad_pointer(void);
  60. #define __xchg(ptr, x, size) \
  61. ({ \
  62. unsigned long __xchg__res; \
  63. volatile void *__xchg_ptr = (ptr); \
  64. switch (size) { \
  65. case 4: \
  66. __xchg__res = xchg_u32(__xchg_ptr, x); \
  67. break; \
  68. case 1: \
  69. __xchg__res = xchg_u8(__xchg_ptr, x); \
  70. break; \
  71. default: \
  72. __xchg_called_with_bad_pointer(); \
  73. __xchg__res = x; \
  74. break; \
  75. } \
  76. \
  77. __xchg__res; \
  78. })
  79. #define xchg(ptr,x) \
  80. ((__typeof__(*(ptr)))__xchg((ptr),(unsigned long)(x), sizeof(*(ptr))))
  81. /* This function doesn't exist, so you'll get a linker error
  82. * if something tries to do an invalid cmpxchg(). */
  83. extern void __cmpxchg_called_with_bad_pointer(void);
  84. #define __HAVE_ARCH_CMPXCHG 1
  85. static inline unsigned long __cmpxchg(volatile void * ptr, unsigned long old,
  86. unsigned long new, int size)
  87. {
  88. switch (size) {
  89. case 4:
  90. return __cmpxchg_u32(ptr, old, new);
  91. }
  92. __cmpxchg_called_with_bad_pointer();
  93. return old;
  94. }
  95. #define cmpxchg(ptr,o,n) \
  96. ({ \
  97. __typeof__(*(ptr)) _o_ = (o); \
  98. __typeof__(*(ptr)) _n_ = (n); \
  99. (__typeof__(*(ptr))) __cmpxchg((ptr), (unsigned long)_o_, \
  100. (unsigned long)_n_, sizeof(*(ptr))); \
  101. })
  102. struct pt_regs;
  103. extern void die(const char *str, struct pt_regs *regs, long err) __attribute__ ((noreturn));
  104. void free_initmem(void);
  105. void free_initrd_mem(unsigned long start, unsigned long end);
  106. extern void *set_exception_table_vec(unsigned int vec, void *handler);
  107. static inline void *set_exception_table_evt(unsigned int evt, void *handler)
  108. {
  109. return set_exception_table_vec(evt >> 5, handler);
  110. }
  111. /*
  112. * SH-2A has both 16 and 32-bit opcodes, do lame encoding checks.
  113. */
  114. #ifdef CONFIG_CPU_SH2A
  115. extern unsigned int instruction_size(unsigned int insn);
  116. #elif defined(CONFIG_SUPERH32)
  117. #define instruction_size(insn) (2)
  118. #else
  119. #define instruction_size(insn) (4)
  120. #endif
  121. void per_cpu_trap_init(void);
  122. void default_idle(void);
  123. void cpu_idle_wait(void);
  124. void stop_this_cpu(void *);
  125. #ifdef CONFIG_SUPERH32
  126. #define BUILD_TRAP_HANDLER(name) \
  127. asmlinkage void name##_trap_handler(unsigned long r4, unsigned long r5, \
  128. unsigned long r6, unsigned long r7, \
  129. struct pt_regs __regs)
  130. #define TRAP_HANDLER_DECL \
  131. struct pt_regs *regs = RELOC_HIDE(&__regs, 0); \
  132. unsigned int vec = regs->tra; \
  133. (void)vec;
  134. #else
  135. #define BUILD_TRAP_HANDLER(name) \
  136. asmlinkage void name##_trap_handler(unsigned int vec, struct pt_regs *regs)
  137. #define TRAP_HANDLER_DECL
  138. #endif
  139. BUILD_TRAP_HANDLER(address_error);
  140. BUILD_TRAP_HANDLER(debug);
  141. BUILD_TRAP_HANDLER(bug);
  142. BUILD_TRAP_HANDLER(breakpoint);
  143. BUILD_TRAP_HANDLER(singlestep);
  144. BUILD_TRAP_HANDLER(fpu_error);
  145. BUILD_TRAP_HANDLER(fpu_state_restore);
  146. BUILD_TRAP_HANDLER(nmi);
  147. #define arch_align_stack(x) (x)
  148. struct mem_access {
  149. unsigned long (*from)(void *dst, const void __user *src, unsigned long cnt);
  150. unsigned long (*to)(void __user *dst, const void *src, unsigned long cnt);
  151. };
  152. #ifdef CONFIG_SUPERH32
  153. # include "system_32.h"
  154. #else
  155. # include "system_64.h"
  156. #endif
  157. #endif