Bläddra i källkod

x86: unify load_segment macro

This patch unifies the load_segment() macro, making them equal in both
x86_64 and i386 architectures. The common version goes to system.h,
and the old are deleted.

Signed-off-by: Glauber de Oliveira Costa <gcosta@redhat.com>
Signed-off-by: Ingo Molnar <mingo@elte.hu>
Signed-off-by: Thomas Gleixner <tglx@linutronix.de>
Glauber de Oliveira Costa 17 år sedan
förälder
incheckning
a6b4655258
3 ändrade filer med 21 tillägg och 42 borttagningar
  1. 21 0
      include/asm-x86/system.h
  2. 0 22
      include/asm-x86/system_32.h
  3. 0 20
      include/asm-x86/system_64.h

+ 21 - 0
include/asm-x86/system.h

@@ -38,6 +38,27 @@ __asm__ __volatile__ ("movw %%dx,%1\n\t" \
 #define set_base(ldt, base) _set_base(((char *)&(ldt)) , (base))
 #define set_limit(ldt, limit) _set_limit(((char *)&(ldt)) , ((limit)-1))
 
+/*
+ * Load a segment. Fall back on loading the zero
+ * segment if something goes wrong..
+ */
+#define loadsegment(seg, value)			\
+	asm volatile("\n"			\
+		"1:\t"				\
+		"movl %k0,%%" #seg "\n"		\
+		"2:\n"				\
+		".section .fixup,\"ax\"\n"	\
+		"3:\t"				\
+		"movl %k1, %%" #seg "\n\t"	\
+		"jmp 2b\n"			\
+		".previous\n"			\
+		".section __ex_table,\"a\"\n\t"	\
+		_ASM_ALIGN "\n\t"		\
+		_ASM_PTR " 1b,3b\n"		\
+		".previous"			\
+		: :"r" (value), "r" (0))
+
+
 /*
  * Save a segment register away
  */

+ 0 - 22
include/asm-x86/system_32.h

@@ -34,28 +34,6 @@ extern struct task_struct * FASTCALL(__switch_to(struct task_struct *prev, struc
 		      "2" (prev), "d" (next));				\
 } while (0)
 
-/*
- * Load a segment. Fall back on loading the zero
- * segment if something goes wrong..
- */
-#define loadsegment(seg,value)			\
-	asm volatile("\n"			\
-		"1:\t"				\
-		"mov %0,%%" #seg "\n"		\
-		"2:\n"				\
-		".section .fixup,\"ax\"\n"	\
-		"3:\t"				\
-		"pushl $0\n\t"			\
-		"popl %%" #seg "\n\t"		\
-		"jmp 2b\n"			\
-		".previous\n"			\
-		".section __ex_table,\"a\"\n\t"	\
-		".align 4\n\t"			\
-		".long 1b,3b\n"			\
-		".previous"			\
-		: :"rm" (value))
-
-
 static inline void native_clts(void)
 {
 	asm volatile ("clts");

+ 0 - 20
include/asm-x86/system_64.h

@@ -49,26 +49,6 @@
     
 extern void load_gs_index(unsigned); 
 
-/*
- * Load a segment. Fall back on loading the zero
- * segment if something goes wrong..
- */
-#define loadsegment(seg,value)	\
-	asm volatile("\n"			\
-		"1:\t"				\
-		"movl %k0,%%" #seg "\n"		\
-		"2:\n"				\
-		".section .fixup,\"ax\"\n"	\
-		"3:\t"				\
-		"movl %1,%%" #seg "\n\t" 	\
-		"jmp 2b\n"			\
-		".previous\n"			\
-		".section __ex_table,\"a\"\n\t"	\
-		".align 8\n\t"			\
-		".quad 1b,3b\n"			\
-		".previous"			\
-		: :"r" (value), "r" (0))
-
 /*
  * Clear and set 'TS' bit respectively
  */