123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200 |
- #ifndef _ASM_X86_SPECIAL_INSNS_H
- #define _ASM_X86_SPECIAL_INSNS_H
- #ifdef __KERNEL__
- static inline void native_clts(void)
- {
- asm volatile("clts");
- }
- /*
- * Volatile isn't enough to prevent the compiler from reordering the
- * read/write functions for the control registers and messing everything up.
- * A memory clobber would solve the problem, but would prevent reordering of
- * all loads stores around it, which can hurt performance. Solution is to
- * use a variable and mimic reads and writes to it to enforce serialization
- */
- static unsigned long __force_order;
- static inline unsigned long native_read_cr0(void)
- {
- unsigned long val;
- asm volatile("mov %%cr0,%0\n\t" : "=r" (val), "=m" (__force_order));
- return val;
- }
- static inline void native_write_cr0(unsigned long val)
- {
- asm volatile("mov %0,%%cr0": : "r" (val), "m" (__force_order));
- }
- static inline unsigned long native_read_cr2(void)
- {
- unsigned long val;
- asm volatile("mov %%cr2,%0\n\t" : "=r" (val), "=m" (__force_order));
- return val;
- }
- static inline void native_write_cr2(unsigned long val)
- {
- asm volatile("mov %0,%%cr2": : "r" (val), "m" (__force_order));
- }
- static inline unsigned long native_read_cr3(void)
- {
- unsigned long val;
- asm volatile("mov %%cr3,%0\n\t" : "=r" (val), "=m" (__force_order));
- return val;
- }
- static inline void native_write_cr3(unsigned long val)
- {
- asm volatile("mov %0,%%cr3": : "r" (val), "m" (__force_order));
- }
- static inline unsigned long native_read_cr4(void)
- {
- unsigned long val;
- asm volatile("mov %%cr4,%0\n\t" : "=r" (val), "=m" (__force_order));
- return val;
- }
- static inline unsigned long native_read_cr4_safe(void)
- {
- unsigned long val;
- /* This could fault if %cr4 does not exist. In x86_64, a cr4 always
- * exists, so it will never fail. */
- #ifdef CONFIG_X86_32
- asm volatile("1: mov %%cr4, %0\n"
- "2:\n"
- _ASM_EXTABLE(1b, 2b)
- : "=r" (val), "=m" (__force_order) : "0" (0));
- #else
- val = native_read_cr4();
- #endif
- return val;
- }
- static inline void native_write_cr4(unsigned long val)
- {
- asm volatile("mov %0,%%cr4": : "r" (val), "m" (__force_order));
- }
- #ifdef CONFIG_X86_64
- static inline unsigned long native_read_cr8(void)
- {
- unsigned long cr8;
- asm volatile("movq %%cr8,%0" : "=r" (cr8));
- return cr8;
- }
- static inline void native_write_cr8(unsigned long val)
- {
- asm volatile("movq %0,%%cr8" :: "r" (val) : "memory");
- }
- #endif
- static inline void native_wbinvd(void)
- {
- asm volatile("wbinvd": : :"memory");
- }
- extern void native_load_gs_index(unsigned);
- #ifdef CONFIG_PARAVIRT
- #include <asm/paravirt.h>
- #else
- static inline unsigned long read_cr0(void)
- {
- return native_read_cr0();
- }
- static inline void write_cr0(unsigned long x)
- {
- native_write_cr0(x);
- }
- static inline unsigned long read_cr2(void)
- {
- return native_read_cr2();
- }
- static inline void write_cr2(unsigned long x)
- {
- native_write_cr2(x);
- }
- static inline unsigned long read_cr3(void)
- {
- return native_read_cr3();
- }
- static inline void write_cr3(unsigned long x)
- {
- native_write_cr3(x);
- }
- static inline unsigned long read_cr4(void)
- {
- return native_read_cr4();
- }
- static inline unsigned long read_cr4_safe(void)
- {
- return native_read_cr4_safe();
- }
- static inline void write_cr4(unsigned long x)
- {
- native_write_cr4(x);
- }
- static inline void wbinvd(void)
- {
- native_wbinvd();
- }
- #ifdef CONFIG_X86_64
- static inline unsigned long read_cr8(void)
- {
- return native_read_cr8();
- }
- static inline void write_cr8(unsigned long x)
- {
- native_write_cr8(x);
- }
- static inline void load_gs_index(unsigned selector)
- {
- native_load_gs_index(selector);
- }
- #endif
- /* Clear the 'TS' bit */
- static inline void clts(void)
- {
- native_clts();
- }
- #endif/* CONFIG_PARAVIRT */
- #define stts() write_cr0(read_cr0() | X86_CR0_TS)
- static inline void clflush(volatile void *__p)
- {
- asm volatile("clflush %0" : "+m" (*(volatile char __force *)__p));
- }
- #define nop() asm volatile ("nop")
- #endif /* __KERNEL__ */
- #endif /* _ASM_X86_SPECIAL_INSNS_H */
|