|
@@ -231,6 +231,20 @@ static inline void native_write_cr4(unsigned long val)
|
|
asm volatile("mov %0,%%cr4": :"r" (val), "m" (__force_order));
|
|
asm volatile("mov %0,%%cr4": :"r" (val), "m" (__force_order));
|
|
}
|
|
}
|
|
|
|
|
|
|
|
+#ifdef CONFIG_X86_64
|
|
|
|
+static inline unsigned long native_read_cr8(void)
|
|
|
|
+{
|
|
|
|
+ unsigned long cr8;
|
|
|
|
+ asm volatile("movq %%cr8,%0" : "=r" (cr8));
|
|
|
|
+ return cr8;
|
|
|
|
+}
|
|
|
|
+
|
|
|
|
+static inline void native_write_cr8(unsigned long val)
|
|
|
|
+{
|
|
|
|
+ asm volatile("movq %0,%%cr8" :: "r" (val) : "memory");
|
|
|
|
+}
|
|
|
|
+#endif
|
|
|
|
+
|
|
static inline void native_wbinvd(void)
|
|
static inline void native_wbinvd(void)
|
|
{
|
|
{
|
|
asm volatile("wbinvd": : :"memory");
|
|
asm volatile("wbinvd": : :"memory");
|
|
@@ -248,21 +262,9 @@ static inline void native_wbinvd(void)
|
|
#define read_cr4_safe() (native_read_cr4_safe())
|
|
#define read_cr4_safe() (native_read_cr4_safe())
|
|
#define write_cr4(x) (native_write_cr4(x))
|
|
#define write_cr4(x) (native_write_cr4(x))
|
|
#define wbinvd() (native_wbinvd())
|
|
#define wbinvd() (native_wbinvd())
|
|
-
|
|
|
|
#ifdef CONFIG_X86_64
|
|
#ifdef CONFIG_X86_64
|
|
-
|
|
|
|
-static inline unsigned long read_cr8(void)
|
|
|
|
-{
|
|
|
|
- unsigned long cr8;
|
|
|
|
- asm volatile("movq %%cr8,%0" : "=r" (cr8));
|
|
|
|
- return cr8;
|
|
|
|
-}
|
|
|
|
-
|
|
|
|
-static inline void write_cr8(unsigned long val)
|
|
|
|
-{
|
|
|
|
- asm volatile("movq %0,%%cr8" :: "r" (val) : "memory");
|
|
|
|
-}
|
|
|
|
-
|
|
|
|
|
|
+#define read_cr8() (native_read_cr8())
|
|
|
|
+#define write_cr8(x) (native_write_cr8(x))
|
|
#endif
|
|
#endif
|
|
|
|
|
|
/* Clear the 'TS' bit */
|
|
/* Clear the 'TS' bit */
|