|
@@ -307,13 +307,13 @@ END(ret_from_exception)
|
|
|
#ifdef CONFIG_PREEMPT
|
|
|
ENTRY(resume_kernel)
|
|
|
DISABLE_INTERRUPTS(CLBR_ANY)
|
|
|
-need_resched:
|
|
|
+.Lneed_resched:
|
|
|
cmpl $0, PER_CPU_VAR(__preempt_count)
|
|
|
jnz restore_all
|
|
|
testl $X86_EFLAGS_IF, PT_EFLAGS(%esp) # interrupts off (exception path) ?
|
|
|
jz restore_all
|
|
|
call preempt_schedule_irq
|
|
|
- jmp need_resched
|
|
|
+ jmp .Lneed_resched
|
|
|
END(resume_kernel)
|
|
|
#endif
|
|
|
|
|
@@ -334,7 +334,7 @@ GLOBAL(__begin_SYSENTER_singlestep_region)
|
|
|
*/
|
|
|
ENTRY(xen_sysenter_target)
|
|
|
addl $5*4, %esp /* remove xen-provided frame */
|
|
|
- jmp sysenter_past_esp
|
|
|
+ jmp .Lsysenter_past_esp
|
|
|
#endif
|
|
|
|
|
|
/*
|
|
@@ -371,7 +371,7 @@ ENTRY(xen_sysenter_target)
|
|
|
*/
|
|
|
ENTRY(entry_SYSENTER_32)
|
|
|
movl TSS_sysenter_sp0(%esp), %esp
|
|
|
-sysenter_past_esp:
|
|
|
+.Lsysenter_past_esp:
|
|
|
pushl $__USER_DS /* pt_regs->ss */
|
|
|
pushl %ebp /* pt_regs->sp (stashed in bp) */
|
|
|
pushfl /* pt_regs->flags (except IF = 0) */
|
|
@@ -504,9 +504,9 @@ ENTRY(entry_INT80_32)
|
|
|
|
|
|
restore_all:
|
|
|
TRACE_IRQS_IRET
|
|
|
-restore_all_notrace:
|
|
|
+.Lrestore_all_notrace:
|
|
|
#ifdef CONFIG_X86_ESPFIX32
|
|
|
- ALTERNATIVE "jmp restore_nocheck", "", X86_BUG_ESPFIX
|
|
|
+ ALTERNATIVE "jmp .Lrestore_nocheck", "", X86_BUG_ESPFIX
|
|
|
|
|
|
movl PT_EFLAGS(%esp), %eax # mix EFLAGS, SS and CS
|
|
|
/*
|
|
@@ -518,22 +518,23 @@ restore_all_notrace:
|
|
|
movb PT_CS(%esp), %al
|
|
|
andl $(X86_EFLAGS_VM | (SEGMENT_TI_MASK << 8) | SEGMENT_RPL_MASK), %eax
|
|
|
cmpl $((SEGMENT_LDT << 8) | USER_RPL), %eax
|
|
|
- je ldt_ss # returning to user-space with LDT SS
|
|
|
+ je .Lldt_ss # returning to user-space with LDT SS
|
|
|
#endif
|
|
|
-restore_nocheck:
|
|
|
+.Lrestore_nocheck:
|
|
|
RESTORE_REGS 4 # skip orig_eax/error_code
|
|
|
-irq_return:
|
|
|
+.Lirq_return:
|
|
|
INTERRUPT_RETURN
|
|
|
+
|
|
|
.section .fixup, "ax"
|
|
|
ENTRY(iret_exc )
|
|
|
pushl $0 # no error code
|
|
|
pushl $do_iret_error
|
|
|
jmp error_code
|
|
|
.previous
|
|
|
- _ASM_EXTABLE(irq_return, iret_exc)
|
|
|
+ _ASM_EXTABLE(.Lirq_return, iret_exc)
|
|
|
|
|
|
#ifdef CONFIG_X86_ESPFIX32
|
|
|
-ldt_ss:
|
|
|
+.Lldt_ss:
|
|
|
/*
|
|
|
* Setup and switch to ESPFIX stack
|
|
|
*
|
|
@@ -562,7 +563,7 @@ ldt_ss:
|
|
|
*/
|
|
|
DISABLE_INTERRUPTS(CLBR_EAX)
|
|
|
lss (%esp), %esp /* switch to espfix segment */
|
|
|
- jmp restore_nocheck
|
|
|
+ jmp .Lrestore_nocheck
|
|
|
#endif
|
|
|
ENDPROC(entry_INT80_32)
|
|
|
|
|
@@ -882,7 +883,7 @@ ftrace_call:
|
|
|
popl %edx
|
|
|
popl %ecx
|
|
|
popl %eax
|
|
|
-ftrace_ret:
|
|
|
+.Lftrace_ret:
|
|
|
#ifdef CONFIG_FUNCTION_GRAPH_TRACER
|
|
|
.globl ftrace_graph_call
|
|
|
ftrace_graph_call:
|
|
@@ -952,7 +953,7 @@ GLOBAL(ftrace_regs_call)
|
|
|
popl %gs
|
|
|
addl $8, %esp /* Skip orig_ax and ip */
|
|
|
popf /* Pop flags at end (no addl to corrupt flags) */
|
|
|
- jmp ftrace_ret
|
|
|
+ jmp .Lftrace_ret
|
|
|
|
|
|
popf
|
|
|
jmp ftrace_stub
|
|
@@ -963,7 +964,7 @@ ENTRY(mcount)
|
|
|
jb ftrace_stub /* Paging not enabled yet? */
|
|
|
|
|
|
cmpl $ftrace_stub, ftrace_trace_function
|
|
|
- jnz trace
|
|
|
+ jnz .Ltrace
|
|
|
#ifdef CONFIG_FUNCTION_GRAPH_TRACER
|
|
|
cmpl $ftrace_stub, ftrace_graph_return
|
|
|
jnz ftrace_graph_caller
|
|
@@ -976,7 +977,7 @@ ftrace_stub:
|
|
|
ret
|
|
|
|
|
|
/* taken from glibc */
|
|
|
-trace:
|
|
|
+.Ltrace:
|
|
|
pushl %eax
|
|
|
pushl %ecx
|
|
|
pushl %edx
|
|
@@ -1116,7 +1117,7 @@ ENTRY(nmi)
|
|
|
movl %ss, %eax
|
|
|
cmpw $__ESPFIX_SS, %ax
|
|
|
popl %eax
|
|
|
- je nmi_espfix_stack
|
|
|
+ je .Lnmi_espfix_stack
|
|
|
#endif
|
|
|
|
|
|
pushl %eax # pt_regs->orig_ax
|
|
@@ -1132,7 +1133,7 @@ ENTRY(nmi)
|
|
|
|
|
|
/* Not on SYSENTER stack. */
|
|
|
call do_nmi
|
|
|
- jmp restore_all_notrace
|
|
|
+ jmp .Lrestore_all_notrace
|
|
|
|
|
|
.Lnmi_from_sysenter_stack:
|
|
|
/*
|
|
@@ -1143,10 +1144,10 @@ ENTRY(nmi)
|
|
|
movl PER_CPU_VAR(cpu_current_top_of_stack), %esp
|
|
|
call do_nmi
|
|
|
movl %ebp, %esp
|
|
|
- jmp restore_all_notrace
|
|
|
+ jmp .Lrestore_all_notrace
|
|
|
|
|
|
#ifdef CONFIG_X86_ESPFIX32
|
|
|
-nmi_espfix_stack:
|
|
|
+.Lnmi_espfix_stack:
|
|
|
/*
|
|
|
* create the pointer to lss back
|
|
|
*/
|
|
@@ -1164,7 +1165,7 @@ nmi_espfix_stack:
|
|
|
call do_nmi
|
|
|
RESTORE_REGS
|
|
|
lss 12+4(%esp), %esp # back to espfix stack
|
|
|
- jmp irq_return
|
|
|
+ jmp .Lirq_return
|
|
|
#endif
|
|
|
END(nmi)
|
|
|
|