|
@@ -65,7 +65,7 @@
|
|
|
# define preempt_stop(clobbers) DISABLE_INTERRUPTS(clobbers); TRACE_IRQS_OFF
|
|
|
#else
|
|
|
# define preempt_stop(clobbers)
|
|
|
-# define resume_kernel restore_all
|
|
|
+# define resume_kernel restore_all_kernel
|
|
|
#endif
|
|
|
|
|
|
.macro TRACE_IRQS_IRET
|
|
@@ -399,9 +399,9 @@ ENTRY(resume_kernel)
|
|
|
DISABLE_INTERRUPTS(CLBR_ANY)
|
|
|
.Lneed_resched:
|
|
|
cmpl $0, PER_CPU_VAR(__preempt_count)
|
|
|
- jnz restore_all
|
|
|
+ jnz restore_all_kernel
|
|
|
testl $X86_EFLAGS_IF, PT_EFLAGS(%esp) # interrupts off (exception path) ?
|
|
|
- jz restore_all
|
|
|
+ jz restore_all_kernel
|
|
|
call preempt_schedule_irq
|
|
|
jmp .Lneed_resched
|
|
|
END(resume_kernel)
|
|
@@ -606,6 +606,11 @@ restore_all:
|
|
|
*/
|
|
|
INTERRUPT_RETURN
|
|
|
|
|
|
+restore_all_kernel:
|
|
|
+ TRACE_IRQS_IRET
|
|
|
+ RESTORE_REGS 4
|
|
|
+ jmp .Lirq_return
|
|
|
+
|
|
|
.section .fixup, "ax"
|
|
|
ENTRY(iret_exc )
|
|
|
pushl $0 # no error code
|