|
@@ -1,33 +1,29 @@
|
|
|
#ifndef _ASM_X86_ASM_H
|
|
|
#define _ASM_X86_ASM_H
|
|
|
|
|
|
-#ifdef CONFIG_X86_32
|
|
|
-/* 32 bits */
|
|
|
-
|
|
|
-# define _ASM_PTR " .long "
|
|
|
-# define _ASM_ALIGN " .balign 4 "
|
|
|
-# define _ASM_MOV_UL " movl "
|
|
|
-
|
|
|
-# define _ASM_INC " incl "
|
|
|
-# define _ASM_DEC " decl "
|
|
|
-# define _ASM_ADD " addl "
|
|
|
-# define _ASM_SUB " subl "
|
|
|
-# define _ASM_XADD " xaddl "
|
|
|
+#ifdef __ASSEMBLY__
|
|
|
+# define __ASM_FORM(x) x
|
|
|
+#else
|
|
|
+# define __ASM_FORM(x) " " #x " "
|
|
|
+#endif
|
|
|
|
|
|
+#ifdef CONFIG_X86_32
|
|
|
+# define __ASM_SEL(a,b) __ASM_FORM(a)
|
|
|
#else
|
|
|
-/* 64 bits */
|
|
|
+# define __ASM_SEL(a,b) __ASM_FORM(b)
|
|
|
+#endif
|
|
|
|
|
|
-# define _ASM_PTR " .quad "
|
|
|
-# define _ASM_ALIGN " .balign 8 "
|
|
|
-# define _ASM_MOV_UL " movq "
|
|
|
+#define __ASM_SIZE(inst) __ASM_SEL(inst##l, inst##q)
|
|
|
|
|
|
-# define _ASM_INC " incq "
|
|
|
-# define _ASM_DEC " decq "
|
|
|
-# define _ASM_ADD " addq "
|
|
|
-# define _ASM_SUB " subq "
|
|
|
-# define _ASM_XADD " xaddq "
|
|
|
+#define _ASM_PTR __ASM_SEL(.long, .quad)
|
|
|
+#define _ASM_ALIGN __ASM_SEL(.balign 4, .balign 8)
|
|
|
+#define _ASM_MOV_UL __ASM_SIZE(mov)
|
|
|
|
|
|
-#endif /* CONFIG_X86_32 */
|
|
|
+#define _ASM_INC __ASM_SIZE(inc)
|
|
|
+#define _ASM_DEC __ASM_SIZE(dec)
|
|
|
+#define _ASM_ADD __ASM_SIZE(add)
|
|
|
+#define _ASM_SUB __ASM_SIZE(sub)
|
|
|
+#define _ASM_XADD __ASM_SIZE(xadd)
|
|
|
|
|
|
/* Exception table entry */
|
|
|
# define _ASM_EXTABLE(from,to) \
|