|
@@ -467,28 +467,16 @@ void prepare_ftrace_return(unsigned long *parent, unsigned long self_addr)
|
|
|
* ignore such a protection.
|
|
|
*/
|
|
|
asm volatile(
|
|
|
-#ifdef CONFIG_X86_64
|
|
|
- "1: movq (%[parent_old]), %[old]\n"
|
|
|
- "2: movq %[return_hooker], (%[parent_replaced])\n"
|
|
|
-#else
|
|
|
- "1: movl (%[parent_old]), %[old]\n"
|
|
|
- "2: movl %[return_hooker], (%[parent_replaced])\n"
|
|
|
-#endif
|
|
|
+ "1: " _ASM_MOV " (%[parent_old]), %[old]\n"
|
|
|
+ "2: " _ASM_MOV " %[return_hooker], (%[parent_replaced])\n"
|
|
|
" movl $0, %[faulted]\n"
|
|
|
|
|
|
".section .fixup, \"ax\"\n"
|
|
|
"3: movl $1, %[faulted]\n"
|
|
|
".previous\n"
|
|
|
|
|
|
- ".section __ex_table, \"a\"\n"
|
|
|
-#ifdef CONFIG_X86_64
|
|
|
- " .quad 1b, 3b\n"
|
|
|
- " .quad 2b, 3b\n"
|
|
|
-#else
|
|
|
- " .long 1b, 3b\n"
|
|
|
- " .long 2b, 3b\n"
|
|
|
-#endif
|
|
|
- ".previous\n"
|
|
|
+ _ASM_EXTABLE(1b, 3b)
|
|
|
+ _ASM_EXTABLE(2b, 3b)
|
|
|
|
|
|
: [parent_replaced] "=r" (parent), [old] "=r" (old),
|
|
|
[faulted] "=r" (faulted)
|