|
@@ -159,6 +159,34 @@ _PIF_WORK = (_PIF_PER_TRAP | _PIF_SYSCALL_RESTART)
|
|
|
tm off+\addr, \mask
|
|
|
.endm
|
|
|
|
|
|
+ .macro BPOFF
|
|
|
+ .pushsection .altinstr_replacement, "ax"
|
|
|
+660: .long 0xb2e8c000
|
|
|
+ .popsection
|
|
|
+661: .long 0x47000000
|
|
|
+ .pushsection .altinstructions, "a"
|
|
|
+ .long 661b - .
|
|
|
+ .long 660b - .
|
|
|
+ .word 82
|
|
|
+ .byte 4
|
|
|
+ .byte 4
|
|
|
+ .popsection
|
|
|
+ .endm
|
|
|
+
|
|
|
+ .macro BPON
|
|
|
+ .pushsection .altinstr_replacement, "ax"
|
|
|
+662: .long 0xb2e8d000
|
|
|
+ .popsection
|
|
|
+663: .long 0x47000000
|
|
|
+ .pushsection .altinstructions, "a"
|
|
|
+ .long 663b - .
|
|
|
+ .long 662b - .
|
|
|
+ .word 82
|
|
|
+ .byte 4
|
|
|
+ .byte 4
|
|
|
+ .popsection
|
|
|
+ .endm
|
|
|
+
|
|
|
.section .kprobes.text, "ax"
|
|
|
.Ldummy:
|
|
|
/*
|
|
@@ -171,6 +199,11 @@ _PIF_WORK = (_PIF_PER_TRAP | _PIF_SYSCALL_RESTART)
|
|
|
*/
|
|
|
nop 0
|
|
|
|
|
|
+ENTRY(__bpon)
|
|
|
+ .globl __bpon
|
|
|
+ BPON
|
|
|
+ br %r14
|
|
|
+
|
|
|
/*
|
|
|
* Scheduler resume function, called by switch_to
|
|
|
* gpr2 = (task_struct *) prev
|
|
@@ -226,8 +259,11 @@ ENTRY(sie64a)
|
|
|
jnz .Lsie_skip
|
|
|
TSTMSK __LC_CPU_FLAGS,_CIF_FPU
|
|
|
jo .Lsie_skip # exit if fp/vx regs changed
|
|
|
+ BPON
|
|
|
.Lsie_entry:
|
|
|
sie 0(%r14)
|
|
|
+.Lsie_exit:
|
|
|
+ BPOFF
|
|
|
.Lsie_skip:
|
|
|
ni __SIE_PROG0C+3(%r14),0xfe # no longer in SIE
|
|
|
lctlg %c1,%c1,__LC_USER_ASCE # load primary asce
|
|
@@ -279,6 +315,7 @@ ENTRY(system_call)
|
|
|
stpt __LC_SYNC_ENTER_TIMER
|
|
|
.Lsysc_stmg:
|
|
|
stmg %r8,%r15,__LC_SAVE_AREA_SYNC
|
|
|
+ BPOFF
|
|
|
lg %r12,__LC_CURRENT
|
|
|
lghi %r13,__TASK_thread
|
|
|
lghi %r14,_PIF_SYSCALL
|
|
@@ -325,6 +362,7 @@ ENTRY(system_call)
|
|
|
jnz .Lsysc_work # check for work
|
|
|
TSTMSK __LC_CPU_FLAGS,_CIF_WORK
|
|
|
jnz .Lsysc_work
|
|
|
+ BPON
|
|
|
.Lsysc_restore:
|
|
|
lg %r14,__LC_VDSO_PER_CPU
|
|
|
lmg %r0,%r10,__PT_R0(%r11)
|
|
@@ -530,6 +568,7 @@ ENTRY(kernel_thread_starter)
|
|
|
|
|
|
ENTRY(pgm_check_handler)
|
|
|
stpt __LC_SYNC_ENTER_TIMER
|
|
|
+ BPOFF
|
|
|
stmg %r8,%r15,__LC_SAVE_AREA_SYNC
|
|
|
lg %r10,__LC_LAST_BREAK
|
|
|
lg %r12,__LC_CURRENT
|
|
@@ -637,6 +676,7 @@ ENTRY(pgm_check_handler)
|
|
|
ENTRY(io_int_handler)
|
|
|
STCK __LC_INT_CLOCK
|
|
|
stpt __LC_ASYNC_ENTER_TIMER
|
|
|
+ BPOFF
|
|
|
stmg %r8,%r15,__LC_SAVE_AREA_ASYNC
|
|
|
lg %r12,__LC_CURRENT
|
|
|
larl %r13,cleanup_critical
|
|
@@ -687,9 +727,13 @@ ENTRY(io_int_handler)
|
|
|
lg %r14,__LC_VDSO_PER_CPU
|
|
|
lmg %r0,%r10,__PT_R0(%r11)
|
|
|
mvc __LC_RETURN_PSW(16),__PT_PSW(%r11)
|
|
|
+ tm __PT_PSW+1(%r11),0x01 # returning to user ?
|
|
|
+ jno .Lio_exit_kernel
|
|
|
+ BPON
|
|
|
.Lio_exit_timer:
|
|
|
stpt __LC_EXIT_TIMER
|
|
|
mvc __VDSO_ECTG_BASE(16,%r14),__LC_EXIT_TIMER
|
|
|
+.Lio_exit_kernel:
|
|
|
lmg %r11,%r15,__PT_R11(%r11)
|
|
|
lpswe __LC_RETURN_PSW
|
|
|
.Lio_done:
|
|
@@ -860,6 +904,7 @@ ENTRY(io_int_handler)
|
|
|
ENTRY(ext_int_handler)
|
|
|
STCK __LC_INT_CLOCK
|
|
|
stpt __LC_ASYNC_ENTER_TIMER
|
|
|
+ BPOFF
|
|
|
stmg %r8,%r15,__LC_SAVE_AREA_ASYNC
|
|
|
lg %r12,__LC_CURRENT
|
|
|
larl %r13,cleanup_critical
|
|
@@ -908,6 +953,7 @@ ENTRY(psw_idle)
|
|
|
.Lpsw_idle_stcctm:
|
|
|
#endif
|
|
|
oi __LC_CPU_FLAGS+7,_CIF_ENABLED_WAIT
|
|
|
+ BPON
|
|
|
STCK __CLOCK_IDLE_ENTER(%r2)
|
|
|
stpt __TIMER_IDLE_ENTER(%r2)
|
|
|
.Lpsw_idle_lpsw:
|
|
@@ -1008,6 +1054,7 @@ load_fpu_regs:
|
|
|
*/
|
|
|
ENTRY(mcck_int_handler)
|
|
|
STCK __LC_MCCK_CLOCK
|
|
|
+ BPOFF
|
|
|
la %r1,4095 # validate r1
|
|
|
spt __LC_CPU_TIMER_SAVE_AREA-4095(%r1) # validate cpu timer
|
|
|
sckc __LC_CLOCK_COMPARATOR # validate comparator
|
|
@@ -1118,6 +1165,7 @@ ENTRY(mcck_int_handler)
|
|
|
mvc __LC_RETURN_MCCK_PSW(16),__PT_PSW(%r11) # move return PSW
|
|
|
tm __LC_RETURN_MCCK_PSW+1,0x01 # returning to user ?
|
|
|
jno 0f
|
|
|
+ BPON
|
|
|
stpt __LC_EXIT_TIMER
|
|
|
mvc __VDSO_ECTG_BASE(16,%r14),__LC_EXIT_TIMER
|
|
|
0: lmg %r11,%r15,__PT_R11(%r11)
|