|
@@ -38,35 +38,33 @@ do { \
|
|
|
*/ \
|
|
|
unsigned long ebx, ecx, edx, esi, edi; \
|
|
|
\
|
|
|
- asm volatile( \
|
|
|
- "pushfl \n\t" /* save flags */ \
|
|
|
- "pushl %%ebp \n\t" /* save EBP */ \
|
|
|
- "movl %%esp,%[prev_sp] \n\t" /* save ESP */ \
|
|
|
- "movl %[next_sp],%%esp \n\t" /* restore ESP */ \
|
|
|
- "movl $1f,%[prev_ip] \n\t" /* save EIP */ \
|
|
|
- "pushl %[next_ip] \n\t" /* restore EIP */ \
|
|
|
- "jmp __switch_to \n" /* regparm call */ \
|
|
|
- "1: \t" \
|
|
|
- "popl %%ebp \n\t" /* restore EBP */ \
|
|
|
- "popfl \n" /* restore flags */ \
|
|
|
+ asm volatile("pushfl\n\t" /* save flags */ \
|
|
|
+ "pushl %%ebp\n\t" /* save EBP */ \
|
|
|
+ "movl %%esp,%[prev_sp]\n\t" /* save ESP */ \
|
|
|
+ "movl %[next_sp],%%esp\n\t" /* restore ESP */ \
|
|
|
+ "movl $1f,%[prev_ip]\n\t" /* save EIP */ \
|
|
|
+ "pushl %[next_ip]\n\t" /* restore EIP */ \
|
|
|
+ "jmp __switch_to\n" /* regparm call */ \
|
|
|
+ "1:\t" \
|
|
|
+ "popl %%ebp\n\t" /* restore EBP */ \
|
|
|
+ "popfl\n" /* restore flags */ \
|
|
|
\
|
|
|
- /* output parameters */ \
|
|
|
- : [prev_sp] "=m" (prev->thread.sp), \
|
|
|
- [prev_ip] "=m" (prev->thread.ip), \
|
|
|
- "=a" (last), \
|
|
|
+ /* output parameters */ \
|
|
|
+ : [prev_sp] "=m" (prev->thread.sp), \
|
|
|
+ [prev_ip] "=m" (prev->thread.ip), \
|
|
|
+ "=a" (last), \
|
|
|
\
|
|
|
- /* clobbered output registers: */ \
|
|
|
- "=b" (ebx), "=c" (ecx), "=d" (edx), \
|
|
|
- "=S" (esi), "=D" (edi) \
|
|
|
- \
|
|
|
- /* input parameters: */ \
|
|
|
- : [next_sp] "m" (next->thread.sp), \
|
|
|
- [next_ip] "m" (next->thread.ip), \
|
|
|
- \
|
|
|
- /* regparm parameters for __switch_to(): */ \
|
|
|
- [prev] "a" (prev), \
|
|
|
- [next] "d" (next) \
|
|
|
- ); \
|
|
|
+ /* clobbered output registers: */ \
|
|
|
+ "=b" (ebx), "=c" (ecx), "=d" (edx), \
|
|
|
+ "=S" (esi), "=D" (edi) \
|
|
|
+ \
|
|
|
+ /* input parameters: */ \
|
|
|
+ : [next_sp] "m" (next->thread.sp), \
|
|
|
+ [next_ip] "m" (next->thread.ip), \
|
|
|
+ \
|
|
|
+ /* regparm parameters for __switch_to(): */ \
|
|
|
+ [prev] "a" (prev), \
|
|
|
+ [next] "d" (next)); \
|
|
|
} while (0)
|
|
|
|
|
|
/*
|
|
@@ -146,35 +144,34 @@ extern void load_gs_index(unsigned);
|
|
|
*/
|
|
|
#define loadsegment(seg, value) \
|
|
|
asm volatile("\n" \
|
|
|
- "1:\t" \
|
|
|
- "movl %k0,%%" #seg "\n" \
|
|
|
- "2:\n" \
|
|
|
- ".section .fixup,\"ax\"\n" \
|
|
|
- "3:\t" \
|
|
|
- "movl %k1, %%" #seg "\n\t" \
|
|
|
- "jmp 2b\n" \
|
|
|
- ".previous\n" \
|
|
|
- _ASM_EXTABLE(1b,3b) \
|
|
|
- : :"r" (value), "r" (0))
|
|
|
+ "1:\t" \
|
|
|
+ "movl %k0,%%" #seg "\n" \
|
|
|
+ "2:\n" \
|
|
|
+ ".section .fixup,\"ax\"\n" \
|
|
|
+ "3:\t" \
|
|
|
+ "movl %k1, %%" #seg "\n\t" \
|
|
|
+ "jmp 2b\n" \
|
|
|
+ ".previous\n" \
|
|
|
+ _ASM_EXTABLE(1b,3b) \
|
|
|
+ : :"r" (value), "r" (0))
|
|
|
|
|
|
|
|
|
/*
|
|
|
* Save a segment register away
|
|
|
*/
|
|
|
-#define savesegment(seg, value) \
|
|
|
+#define savesegment(seg, value) \
|
|
|
asm volatile("mov %%" #seg ",%0":"=rm" (value))
|
|
|
|
|
|
static inline unsigned long get_limit(unsigned long segment)
|
|
|
{
|
|
|
unsigned long __limit;
|
|
|
- __asm__("lsll %1,%0"
|
|
|
- :"=r" (__limit):"r" (segment));
|
|
|
- return __limit+1;
|
|
|
+ asm("lsll %1,%0" : "=r" (__limit) : "r" (segment));
|
|
|
+ return __limit + 1;
|
|
|
}
|
|
|
|
|
|
static inline void native_clts(void)
|
|
|
{
|
|
|
- asm volatile ("clts");
|
|
|
+ asm volatile("clts");
|
|
|
}
|
|
|
|
|
|
/*
|
|
@@ -189,43 +186,43 @@ static unsigned long __force_order;
|
|
|
static inline unsigned long native_read_cr0(void)
|
|
|
{
|
|
|
unsigned long val;
|
|
|
- asm volatile("mov %%cr0,%0\n\t" :"=r" (val), "=m" (__force_order));
|
|
|
+ asm volatile("mov %%cr0,%0\n\t" : "=r" (val), "=m" (__force_order));
|
|
|
return val;
|
|
|
}
|
|
|
|
|
|
static inline void native_write_cr0(unsigned long val)
|
|
|
{
|
|
|
- asm volatile("mov %0,%%cr0": :"r" (val), "m" (__force_order));
|
|
|
+ asm volatile("mov %0,%%cr0": : "r" (val), "m" (__force_order));
|
|
|
}
|
|
|
|
|
|
static inline unsigned long native_read_cr2(void)
|
|
|
{
|
|
|
unsigned long val;
|
|
|
- asm volatile("mov %%cr2,%0\n\t" :"=r" (val), "=m" (__force_order));
|
|
|
+ asm volatile("mov %%cr2,%0\n\t" : "=r" (val), "=m" (__force_order));
|
|
|
return val;
|
|
|
}
|
|
|
|
|
|
static inline void native_write_cr2(unsigned long val)
|
|
|
{
|
|
|
- asm volatile("mov %0,%%cr2": :"r" (val), "m" (__force_order));
|
|
|
+ asm volatile("mov %0,%%cr2": : "r" (val), "m" (__force_order));
|
|
|
}
|
|
|
|
|
|
static inline unsigned long native_read_cr3(void)
|
|
|
{
|
|
|
unsigned long val;
|
|
|
- asm volatile("mov %%cr3,%0\n\t" :"=r" (val), "=m" (__force_order));
|
|
|
+ asm volatile("mov %%cr3,%0\n\t" : "=r" (val), "=m" (__force_order));
|
|
|
return val;
|
|
|
}
|
|
|
|
|
|
static inline void native_write_cr3(unsigned long val)
|
|
|
{
|
|
|
- asm volatile("mov %0,%%cr3": :"r" (val), "m" (__force_order));
|
|
|
+ asm volatile("mov %0,%%cr3": : "r" (val), "m" (__force_order));
|
|
|
}
|
|
|
|
|
|
static inline unsigned long native_read_cr4(void)
|
|
|
{
|
|
|
unsigned long val;
|
|
|
- asm volatile("mov %%cr4,%0\n\t" :"=r" (val), "=m" (__force_order));
|
|
|
+ asm volatile("mov %%cr4,%0\n\t" : "=r" (val), "=m" (__force_order));
|
|
|
return val;
|
|
|
}
|
|
|
|
|
@@ -237,7 +234,7 @@ static inline unsigned long native_read_cr4_safe(void)
|
|
|
#ifdef CONFIG_X86_32
|
|
|
asm volatile("1: mov %%cr4, %0\n"
|
|
|
"2:\n"
|
|
|
- _ASM_EXTABLE(1b,2b)
|
|
|
+ _ASM_EXTABLE(1b, 2b)
|
|
|
: "=r" (val), "=m" (__force_order) : "0" (0));
|
|
|
#else
|
|
|
val = native_read_cr4();
|
|
@@ -247,7 +244,7 @@ static inline unsigned long native_read_cr4_safe(void)
|
|
|
|
|
|
static inline void native_write_cr4(unsigned long val)
|
|
|
{
|
|
|
- asm volatile("mov %0,%%cr4": :"r" (val), "m" (__force_order));
|
|
|
+ asm volatile("mov %0,%%cr4": : "r" (val), "m" (__force_order));
|
|
|
}
|
|
|
|
|
|
#ifdef CONFIG_X86_64
|
|
@@ -268,6 +265,7 @@ static inline void native_wbinvd(void)
|
|
|
{
|
|
|
asm volatile("wbinvd": : :"memory");
|
|
|
}
|
|
|
+
|
|
|
#ifdef CONFIG_PARAVIRT
|
|
|
#include <asm/paravirt.h>
|
|
|
#else
|
|
@@ -300,7 +298,7 @@ static inline void clflush(volatile void *__p)
|
|
|
asm volatile("clflush %0" : "+m" (*(volatile char __force *)__p));
|
|
|
}
|
|
|
|
|
|
-#define nop() __asm__ __volatile__ ("nop")
|
|
|
+#define nop() asm volatile ("nop")
|
|
|
|
|
|
void disable_hlt(void);
|
|
|
void enable_hlt(void);
|
|
@@ -399,7 +397,7 @@ void default_idle(void);
|
|
|
# define smp_wmb() barrier()
|
|
|
#endif
|
|
|
#define smp_read_barrier_depends() read_barrier_depends()
|
|
|
-#define set_mb(var, value) do { (void) xchg(&var, value); } while (0)
|
|
|
+#define set_mb(var, value) do { (void)xchg(&var, value); } while (0)
|
|
|
#else
|
|
|
#define smp_mb() barrier()
|
|
|
#define smp_rmb() barrier()
|