|
@@ -527,6 +527,7 @@ syscall_exit:
|
|
restore_all:
|
|
restore_all:
|
|
TRACE_IRQS_IRET
|
|
TRACE_IRQS_IRET
|
|
restore_all_notrace:
|
|
restore_all_notrace:
|
|
|
|
+#ifdef CONFIG_X86_ESPFIX32
|
|
movl PT_EFLAGS(%esp), %eax # mix EFLAGS, SS and CS
|
|
movl PT_EFLAGS(%esp), %eax # mix EFLAGS, SS and CS
|
|
# Warning: PT_OLDSS(%esp) contains the wrong/random values if we
|
|
# Warning: PT_OLDSS(%esp) contains the wrong/random values if we
|
|
# are returning to the kernel.
|
|
# are returning to the kernel.
|
|
@@ -537,6 +538,7 @@ restore_all_notrace:
|
|
cmpl $((SEGMENT_LDT << 8) | USER_RPL), %eax
|
|
cmpl $((SEGMENT_LDT << 8) | USER_RPL), %eax
|
|
CFI_REMEMBER_STATE
|
|
CFI_REMEMBER_STATE
|
|
je ldt_ss # returning to user-space with LDT SS
|
|
je ldt_ss # returning to user-space with LDT SS
|
|
|
|
+#endif
|
|
restore_nocheck:
|
|
restore_nocheck:
|
|
RESTORE_REGS 4 # skip orig_eax/error_code
|
|
RESTORE_REGS 4 # skip orig_eax/error_code
|
|
irq_return:
|
|
irq_return:
|
|
@@ -549,6 +551,7 @@ ENTRY(iret_exc)
|
|
.previous
|
|
.previous
|
|
_ASM_EXTABLE(irq_return,iret_exc)
|
|
_ASM_EXTABLE(irq_return,iret_exc)
|
|
|
|
|
|
|
|
+#ifdef CONFIG_X86_ESPFIX32
|
|
CFI_RESTORE_STATE
|
|
CFI_RESTORE_STATE
|
|
ldt_ss:
|
|
ldt_ss:
|
|
#ifdef CONFIG_PARAVIRT
|
|
#ifdef CONFIG_PARAVIRT
|
|
@@ -592,6 +595,7 @@ ldt_ss:
|
|
lss (%esp), %esp /* switch to espfix segment */
|
|
lss (%esp), %esp /* switch to espfix segment */
|
|
CFI_ADJUST_CFA_OFFSET -8
|
|
CFI_ADJUST_CFA_OFFSET -8
|
|
jmp restore_nocheck
|
|
jmp restore_nocheck
|
|
|
|
+#endif
|
|
CFI_ENDPROC
|
|
CFI_ENDPROC
|
|
ENDPROC(system_call)
|
|
ENDPROC(system_call)
|
|
|
|
|
|
@@ -699,6 +703,7 @@ END(syscall_badsys)
|
|
* the high word of the segment base from the GDT and swiches to the
|
|
* the high word of the segment base from the GDT and swiches to the
|
|
* normal stack and adjusts ESP with the matching offset.
|
|
* normal stack and adjusts ESP with the matching offset.
|
|
*/
|
|
*/
|
|
|
|
+#ifdef CONFIG_X86_ESPFIX32
|
|
/* fixup the stack */
|
|
/* fixup the stack */
|
|
mov GDT_ESPFIX_SS + 4, %al /* bits 16..23 */
|
|
mov GDT_ESPFIX_SS + 4, %al /* bits 16..23 */
|
|
mov GDT_ESPFIX_SS + 7, %ah /* bits 24..31 */
|
|
mov GDT_ESPFIX_SS + 7, %ah /* bits 24..31 */
|
|
@@ -708,8 +713,10 @@ END(syscall_badsys)
|
|
pushl_cfi %eax
|
|
pushl_cfi %eax
|
|
lss (%esp), %esp /* switch to the normal stack segment */
|
|
lss (%esp), %esp /* switch to the normal stack segment */
|
|
CFI_ADJUST_CFA_OFFSET -8
|
|
CFI_ADJUST_CFA_OFFSET -8
|
|
|
|
+#endif
|
|
.endm
|
|
.endm
|
|
.macro UNWIND_ESPFIX_STACK
|
|
.macro UNWIND_ESPFIX_STACK
|
|
|
|
+#ifdef CONFIG_X86_ESPFIX32
|
|
movl %ss, %eax
|
|
movl %ss, %eax
|
|
/* see if on espfix stack */
|
|
/* see if on espfix stack */
|
|
cmpw $__ESPFIX_SS, %ax
|
|
cmpw $__ESPFIX_SS, %ax
|
|
@@ -720,6 +727,7 @@ END(syscall_badsys)
|
|
/* switch to normal stack */
|
|
/* switch to normal stack */
|
|
FIXUP_ESPFIX_STACK
|
|
FIXUP_ESPFIX_STACK
|
|
27:
|
|
27:
|
|
|
|
+#endif
|
|
.endm
|
|
.endm
|
|
|
|
|
|
/*
|
|
/*
|
|
@@ -1350,11 +1358,13 @@ END(debug)
|
|
ENTRY(nmi)
|
|
ENTRY(nmi)
|
|
RING0_INT_FRAME
|
|
RING0_INT_FRAME
|
|
ASM_CLAC
|
|
ASM_CLAC
|
|
|
|
+#ifdef CONFIG_X86_ESPFIX32
|
|
pushl_cfi %eax
|
|
pushl_cfi %eax
|
|
movl %ss, %eax
|
|
movl %ss, %eax
|
|
cmpw $__ESPFIX_SS, %ax
|
|
cmpw $__ESPFIX_SS, %ax
|
|
popl_cfi %eax
|
|
popl_cfi %eax
|
|
je nmi_espfix_stack
|
|
je nmi_espfix_stack
|
|
|
|
+#endif
|
|
cmpl $ia32_sysenter_target,(%esp)
|
|
cmpl $ia32_sysenter_target,(%esp)
|
|
je nmi_stack_fixup
|
|
je nmi_stack_fixup
|
|
pushl_cfi %eax
|
|
pushl_cfi %eax
|
|
@@ -1394,6 +1404,7 @@ nmi_debug_stack_check:
|
|
FIX_STACK 24, nmi_stack_correct, 1
|
|
FIX_STACK 24, nmi_stack_correct, 1
|
|
jmp nmi_stack_correct
|
|
jmp nmi_stack_correct
|
|
|
|
|
|
|
|
+#ifdef CONFIG_X86_ESPFIX32
|
|
nmi_espfix_stack:
|
|
nmi_espfix_stack:
|
|
/* We have a RING0_INT_FRAME here.
|
|
/* We have a RING0_INT_FRAME here.
|
|
*
|
|
*
|
|
@@ -1415,6 +1426,7 @@ nmi_espfix_stack:
|
|
lss 12+4(%esp), %esp # back to espfix stack
|
|
lss 12+4(%esp), %esp # back to espfix stack
|
|
CFI_ADJUST_CFA_OFFSET -24
|
|
CFI_ADJUST_CFA_OFFSET -24
|
|
jmp irq_return
|
|
jmp irq_return
|
|
|
|
+#endif
|
|
CFI_ENDPROC
|
|
CFI_ENDPROC
|
|
END(nmi)
|
|
END(nmi)
|
|
|
|
|