special_insns.h 3.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200
  1. #ifndef _ASM_X86_SPECIAL_INSNS_H
  2. #define _ASM_X86_SPECIAL_INSNS_H
  3. #ifdef __KERNEL__
  4. static inline void native_clts(void)
  5. {
  6. asm volatile("clts");
  7. }
  8. /*
  9. * Volatile isn't enough to prevent the compiler from reordering the
  10. * read/write functions for the control registers and messing everything up.
  11. * A memory clobber would solve the problem, but would prevent reordering of
  12. * all loads stores around it, which can hurt performance. Solution is to
  13. * use a variable and mimic reads and writes to it to enforce serialization
  14. */
  15. static unsigned long __force_order;
  16. static inline unsigned long native_read_cr0(void)
  17. {
  18. unsigned long val;
  19. asm volatile("mov %%cr0,%0\n\t" : "=r" (val), "=m" (__force_order));
  20. return val;
  21. }
  22. static inline void native_write_cr0(unsigned long val)
  23. {
  24. asm volatile("mov %0,%%cr0": : "r" (val), "m" (__force_order));
  25. }
  26. static inline unsigned long native_read_cr2(void)
  27. {
  28. unsigned long val;
  29. asm volatile("mov %%cr2,%0\n\t" : "=r" (val), "=m" (__force_order));
  30. return val;
  31. }
  32. static inline void native_write_cr2(unsigned long val)
  33. {
  34. asm volatile("mov %0,%%cr2": : "r" (val), "m" (__force_order));
  35. }
  36. static inline unsigned long native_read_cr3(void)
  37. {
  38. unsigned long val;
  39. asm volatile("mov %%cr3,%0\n\t" : "=r" (val), "=m" (__force_order));
  40. return val;
  41. }
  42. static inline void native_write_cr3(unsigned long val)
  43. {
  44. asm volatile("mov %0,%%cr3": : "r" (val), "m" (__force_order));
  45. }
  46. static inline unsigned long native_read_cr4(void)
  47. {
  48. unsigned long val;
  49. asm volatile("mov %%cr4,%0\n\t" : "=r" (val), "=m" (__force_order));
  50. return val;
  51. }
  52. static inline unsigned long native_read_cr4_safe(void)
  53. {
  54. unsigned long val;
  55. /* This could fault if %cr4 does not exist. In x86_64, a cr4 always
  56. * exists, so it will never fail. */
  57. #ifdef CONFIG_X86_32
  58. asm volatile("1: mov %%cr4, %0\n"
  59. "2:\n"
  60. _ASM_EXTABLE(1b, 2b)
  61. : "=r" (val), "=m" (__force_order) : "0" (0));
  62. #else
  63. val = native_read_cr4();
  64. #endif
  65. return val;
  66. }
  67. static inline void native_write_cr4(unsigned long val)
  68. {
  69. asm volatile("mov %0,%%cr4": : "r" (val), "m" (__force_order));
  70. }
  71. #ifdef CONFIG_X86_64
  72. static inline unsigned long native_read_cr8(void)
  73. {
  74. unsigned long cr8;
  75. asm volatile("movq %%cr8,%0" : "=r" (cr8));
  76. return cr8;
  77. }
  78. static inline void native_write_cr8(unsigned long val)
  79. {
  80. asm volatile("movq %0,%%cr8" :: "r" (val) : "memory");
  81. }
  82. #endif
  83. static inline void native_wbinvd(void)
  84. {
  85. asm volatile("wbinvd": : :"memory");
  86. }
  87. extern void native_load_gs_index(unsigned);
  88. #ifdef CONFIG_PARAVIRT
  89. #include <asm/paravirt.h>
  90. #else
  91. static inline unsigned long read_cr0(void)
  92. {
  93. return native_read_cr0();
  94. }
  95. static inline void write_cr0(unsigned long x)
  96. {
  97. native_write_cr0(x);
  98. }
  99. static inline unsigned long read_cr2(void)
  100. {
  101. return native_read_cr2();
  102. }
  103. static inline void write_cr2(unsigned long x)
  104. {
  105. native_write_cr2(x);
  106. }
  107. static inline unsigned long read_cr3(void)
  108. {
  109. return native_read_cr3();
  110. }
  111. static inline void write_cr3(unsigned long x)
  112. {
  113. native_write_cr3(x);
  114. }
  115. static inline unsigned long read_cr4(void)
  116. {
  117. return native_read_cr4();
  118. }
  119. static inline unsigned long read_cr4_safe(void)
  120. {
  121. return native_read_cr4_safe();
  122. }
  123. static inline void write_cr4(unsigned long x)
  124. {
  125. native_write_cr4(x);
  126. }
  127. static inline void wbinvd(void)
  128. {
  129. native_wbinvd();
  130. }
  131. #ifdef CONFIG_X86_64
  132. static inline unsigned long read_cr8(void)
  133. {
  134. return native_read_cr8();
  135. }
  136. static inline void write_cr8(unsigned long x)
  137. {
  138. native_write_cr8(x);
  139. }
  140. static inline void load_gs_index(unsigned selector)
  141. {
  142. native_load_gs_index(selector);
  143. }
  144. #endif
  145. /* Clear the 'TS' bit */
  146. static inline void clts(void)
  147. {
  148. native_clts();
  149. }
  150. #endif/* CONFIG_PARAVIRT */
  151. #define stts() write_cr0(read_cr0() | X86_CR0_TS)
  152. static inline void clflush(volatile void *__p)
  153. {
  154. asm volatile("clflush %0" : "+m" (*(volatile char __force *)__p));
  155. }
  156. #define nop() asm volatile ("nop")
  157. #endif /* __KERNEL__ */
  158. #endif /* _ASM_X86_SPECIAL_INSNS_H */