|
@@ -611,14 +611,14 @@ ldt_ss:
|
|
* compensating for the offset by changing to the ESPFIX segment with
|
|
* compensating for the offset by changing to the ESPFIX segment with
|
|
* a base address that matches for the difference.
|
|
* a base address that matches for the difference.
|
|
*/
|
|
*/
|
|
|
|
+#define GDT_ESPFIX_SS PER_CPU_VAR(gdt_page) + (GDT_ENTRY_ESPFIX_SS * 8)
|
|
mov %esp, %edx /* load kernel esp */
|
|
mov %esp, %edx /* load kernel esp */
|
|
mov PT_OLDESP(%esp), %eax /* load userspace esp */
|
|
mov PT_OLDESP(%esp), %eax /* load userspace esp */
|
|
mov %dx, %ax /* eax: new kernel esp */
|
|
mov %dx, %ax /* eax: new kernel esp */
|
|
sub %eax, %edx /* offset (low word is 0) */
|
|
sub %eax, %edx /* offset (low word is 0) */
|
|
- PER_CPU(gdt_page, %ebx)
|
|
|
|
shr $16, %edx
|
|
shr $16, %edx
|
|
- mov %dl, GDT_ENTRY_ESPFIX_SS * 8 + 4(%ebx) /* bits 16..23 */
|
|
|
|
- mov %dh, GDT_ENTRY_ESPFIX_SS * 8 + 7(%ebx) /* bits 24..31 */
|
|
|
|
|
|
+ mov %dl, GDT_ESPFIX_SS + 4 /* bits 16..23 */
|
|
|
|
+ mov %dh, GDT_ESPFIX_SS + 7 /* bits 24..31 */
|
|
pushl $__ESPFIX_SS
|
|
pushl $__ESPFIX_SS
|
|
CFI_ADJUST_CFA_OFFSET 4
|
|
CFI_ADJUST_CFA_OFFSET 4
|
|
push %eax /* new kernel esp */
|
|
push %eax /* new kernel esp */
|
|
@@ -791,9 +791,8 @@ ptregs_clone:
|
|
* normal stack and adjusts ESP with the matching offset.
|
|
* normal stack and adjusts ESP with the matching offset.
|
|
*/
|
|
*/
|
|
/* fixup the stack */
|
|
/* fixup the stack */
|
|
- PER_CPU(gdt_page, %ebx)
|
|
|
|
- mov GDT_ENTRY_ESPFIX_SS * 8 + 4(%ebx), %al /* bits 16..23 */
|
|
|
|
- mov GDT_ENTRY_ESPFIX_SS * 8 + 7(%ebx), %ah /* bits 24..31 */
|
|
|
|
|
|
+ mov GDT_ESPFIX_SS + 4, %al /* bits 16..23 */
|
|
|
|
+ mov GDT_ESPFIX_SS + 7, %ah /* bits 24..31 */
|
|
shl $16, %eax
|
|
shl $16, %eax
|
|
addl %esp, %eax /* the adjusted stack pointer */
|
|
addl %esp, %eax /* the adjusted stack pointer */
|
|
pushl $__KERNEL_DS
|
|
pushl $__KERNEL_DS
|