Browse Source

ftrace: clean up function graph asm

Impact: clean up

There exists macros for x86 asm to handle x86_64 and i386.
This patch updates function graph asm to use them.

Signed-off-by: Steven Rostedt <srostedt@redhat.com>
Acked-by: Frederic Weisbecker <fweisbec@gmail.com>
Signed-off-by: Ingo Molnar <mingo@elte.hu>
Steven Rostedt 16 years ago
parent
commit
347fdd9dd4
1 changed files with 4 additions and 16 deletions
  1. 4 16
      arch/x86/kernel/ftrace.c

+ 4 - 16
arch/x86/kernel/ftrace.c

@@ -467,28 +467,16 @@ void prepare_ftrace_return(unsigned long *parent, unsigned long self_addr)
 	 * ignore such a protection.
 	 */
 	asm volatile(
-#ifdef CONFIG_X86_64
-		"1: movq (%[parent_old]), %[old]\n"
-		"2: movq %[return_hooker], (%[parent_replaced])\n"
-#else
-		"1: movl (%[parent_old]), %[old]\n"
-		"2: movl %[return_hooker], (%[parent_replaced])\n"
-#endif
+		"1: " _ASM_MOV " (%[parent_old]), %[old]\n"
+		"2: " _ASM_MOV " %[return_hooker], (%[parent_replaced])\n"
 		"   movl $0, %[faulted]\n"
 
 		".section .fixup, \"ax\"\n"
 		"3: movl $1, %[faulted]\n"
 		".previous\n"
 
-		".section __ex_table, \"a\"\n"
-#ifdef CONFIG_X86_64
-		"   .quad 1b, 3b\n"
-		"   .quad 2b, 3b\n"
-#else
-		"   .long 1b, 3b\n"
-		"   .long 2b, 3b\n"
-#endif
-		".previous\n"
+		_ASM_EXTABLE(1b, 3b)
+		_ASM_EXTABLE(2b, 3b)
 
 		: [parent_replaced] "=r" (parent), [old] "=r" (old),
 		  [faulted] "=r" (faulted)