Explorar o código

x86: cleanup: change _end to end_before_pgt

cleanup: change the _end in compressed vmlinux_64.lds.

also change _heap to _ebss that is not needed.

Signed-off-by: Yinghai Lu <yhlu.kernel@gmail.com>
Signed-off-by: Ingo Molnar <mingo@elte.hu>
Signed-off-by: Thomas Gleixner <tglx@linutronix.de>
Yinghai Lu %!s(int64=17) %!d(string=hai) anos
pai
achega
4a9f54cfd2

+ 4 - 4
arch/x86/boot/compressed/head_64.S

@@ -244,9 +244,9 @@ ENTRY(startup_64)
 /* Copy the compressed kernel to the end of our buffer
  * where decompression in place becomes safe.
  */
-	leaq	_end(%rip), %r8
-	leaq	_end(%rbx), %r9
-	movq	$_end /* - $startup_32 */, %rcx
+	leaq	_end_before_pgt(%rip), %r8
+	leaq	_end_before_pgt(%rbx), %r9
+	movq	$_end_before_pgt /* - $startup_32 */, %rcx
 1:	subq	$8, %r8
 	subq	$8, %r9
 	movq	0(%r8), %rax
@@ -268,7 +268,7 @@ relocated:
  */
 	xorq	%rax, %rax
 	leaq    _edata(%rbx), %rdi
-	leaq    _end(%rbx), %rcx
+	leaq    _end_before_pgt(%rbx), %rcx
 	subq	%rdi, %rcx
 	cld
 	rep

+ 2 - 2
arch/x86/boot/compressed/vmlinux_64.lds

@@ -39,10 +39,10 @@ SECTIONS
 		*(.bss.*)
 		*(COMMON)
 		. = ALIGN(8);
-		_end = . ;
+		_end_before_pgt = . ;
 		. = ALIGN(4096);
 		pgtable = . ;
 		. = . + 4096 * 6;
-		_heap = .;
+		_ebss = .;
 	}
 }