|
@@ -986,6 +986,8 @@ ENDPROC(fast_syscall_unrecoverable)
|
|
* j done
|
|
* j done
|
|
*/
|
|
*/
|
|
|
|
|
|
|
|
+#ifdef CONFIG_FAST_SYSCALL_XTENSA
|
|
|
|
+
|
|
#define TRY \
|
|
#define TRY \
|
|
.section __ex_table, "a"; \
|
|
.section __ex_table, "a"; \
|
|
.word 66f, 67f; \
|
|
.word 66f, 67f; \
|
|
@@ -1001,9 +1003,8 @@ ENTRY(fast_syscall_xtensa)
|
|
movi a7, 4 # sizeof(unsigned int)
|
|
movi a7, 4 # sizeof(unsigned int)
|
|
access_ok a3, a7, a0, a2, .Leac # a0: scratch reg, a2: sp
|
|
access_ok a3, a7, a0, a2, .Leac # a0: scratch reg, a2: sp
|
|
|
|
|
|
- addi a6, a6, -1 # assuming SYS_XTENSA_ATOMIC_SET = 1
|
|
|
|
- _bgeui a6, SYS_XTENSA_COUNT - 1, .Lill
|
|
|
|
- _bnei a6, SYS_XTENSA_ATOMIC_CMP_SWP - 1, .Lnswp
|
|
|
|
|
|
+ _bgeui a6, SYS_XTENSA_COUNT, .Lill
|
|
|
|
+ _bnei a6, SYS_XTENSA_ATOMIC_CMP_SWP, .Lnswp
|
|
|
|
|
|
/* Fall through for ATOMIC_CMP_SWP. */
|
|
/* Fall through for ATOMIC_CMP_SWP. */
|
|
|
|
|
|
@@ -1015,27 +1016,26 @@ TRY s32i a5, a3, 0 # different, modify value
|
|
l32i a7, a2, PT_AREG7 # restore a7
|
|
l32i a7, a2, PT_AREG7 # restore a7
|
|
l32i a0, a2, PT_AREG0 # restore a0
|
|
l32i a0, a2, PT_AREG0 # restore a0
|
|
movi a2, 1 # and return 1
|
|
movi a2, 1 # and return 1
|
|
- addi a6, a6, 1 # restore a6 (really necessary?)
|
|
|
|
rfe
|
|
rfe
|
|
|
|
|
|
1: l32i a7, a2, PT_AREG7 # restore a7
|
|
1: l32i a7, a2, PT_AREG7 # restore a7
|
|
l32i a0, a2, PT_AREG0 # restore a0
|
|
l32i a0, a2, PT_AREG0 # restore a0
|
|
movi a2, 0 # return 0 (note that we cannot set
|
|
movi a2, 0 # return 0 (note that we cannot set
|
|
- addi a6, a6, 1 # restore a6 (really necessary?)
|
|
|
|
rfe
|
|
rfe
|
|
|
|
|
|
.Lnswp: /* Atomic set, add, and exg_add. */
|
|
.Lnswp: /* Atomic set, add, and exg_add. */
|
|
|
|
|
|
TRY l32i a7, a3, 0 # orig
|
|
TRY l32i a7, a3, 0 # orig
|
|
|
|
+ addi a6, a6, -SYS_XTENSA_ATOMIC_SET
|
|
add a0, a4, a7 # + arg
|
|
add a0, a4, a7 # + arg
|
|
moveqz a0, a4, a6 # set
|
|
moveqz a0, a4, a6 # set
|
|
|
|
+ addi a6, a6, SYS_XTENSA_ATOMIC_SET
|
|
TRY s32i a0, a3, 0 # write new value
|
|
TRY s32i a0, a3, 0 # write new value
|
|
|
|
|
|
mov a0, a2
|
|
mov a0, a2
|
|
mov a2, a7
|
|
mov a2, a7
|
|
l32i a7, a0, PT_AREG7 # restore a7
|
|
l32i a7, a0, PT_AREG7 # restore a7
|
|
l32i a0, a0, PT_AREG0 # restore a0
|
|
l32i a0, a0, PT_AREG0 # restore a0
|
|
- addi a6, a6, 1 # restore a6 (really necessary?)
|
|
|
|
rfe
|
|
rfe
|
|
|
|
|
|
CATCH
|
|
CATCH
|
|
@@ -1044,13 +1044,25 @@ CATCH
|
|
movi a2, -EFAULT
|
|
movi a2, -EFAULT
|
|
rfe
|
|
rfe
|
|
|
|
|
|
-.Lill: l32i a7, a2, PT_AREG0 # restore a7
|
|
|
|
|
|
+.Lill: l32i a7, a2, PT_AREG7 # restore a7
|
|
l32i a0, a2, PT_AREG0 # restore a0
|
|
l32i a0, a2, PT_AREG0 # restore a0
|
|
movi a2, -EINVAL
|
|
movi a2, -EINVAL
|
|
rfe
|
|
rfe
|
|
|
|
|
|
ENDPROC(fast_syscall_xtensa)
|
|
ENDPROC(fast_syscall_xtensa)
|
|
|
|
|
|
|
|
+#else /* CONFIG_FAST_SYSCALL_XTENSA */
|
|
|
|
+
|
|
|
|
+ENTRY(fast_syscall_xtensa)
|
|
|
|
+
|
|
|
|
+ l32i a0, a2, PT_AREG0 # restore a0
|
|
|
|
+ movi a2, -ENOSYS
|
|
|
|
+ rfe
|
|
|
|
+
|
|
|
|
+ENDPROC(fast_syscall_xtensa)
|
|
|
|
+
|
|
|
|
+#endif /* CONFIG_FAST_SYSCALL_XTENSA */
|
|
|
|
+
|
|
|
|
|
|
/* fast_syscall_spill_registers.
|
|
/* fast_syscall_spill_registers.
|
|
*
|
|
*
|
|
@@ -1066,6 +1078,8 @@ ENDPROC(fast_syscall_xtensa)
|
|
* Note: We assume the stack pointer is EXC_TABLE_KSTK in the fixup handler.
|
|
* Note: We assume the stack pointer is EXC_TABLE_KSTK in the fixup handler.
|
|
*/
|
|
*/
|
|
|
|
|
|
|
|
+#ifdef CONFIG_FAST_SYSCALL_SPILL_REGISTERS
|
|
|
|
+
|
|
ENTRY(fast_syscall_spill_registers)
|
|
ENTRY(fast_syscall_spill_registers)
|
|
|
|
|
|
/* Register a FIXUP handler (pass current wb as a parameter) */
|
|
/* Register a FIXUP handler (pass current wb as a parameter) */
|
|
@@ -1400,6 +1414,18 @@ ENTRY(fast_syscall_spill_registers_fixup_return)
|
|
|
|
|
|
ENDPROC(fast_syscall_spill_registers_fixup_return)
|
|
ENDPROC(fast_syscall_spill_registers_fixup_return)
|
|
|
|
|
|
|
|
+#else /* CONFIG_FAST_SYSCALL_SPILL_REGISTERS */
|
|
|
|
+
|
|
|
|
+ENTRY(fast_syscall_spill_registers)
|
|
|
|
+
|
|
|
|
+ l32i a0, a2, PT_AREG0 # restore a0
|
|
|
|
+ movi a2, -ENOSYS
|
|
|
|
+ rfe
|
|
|
|
+
|
|
|
|
+ENDPROC(fast_syscall_spill_registers)
|
|
|
|
+
|
|
|
|
+#endif /* CONFIG_FAST_SYSCALL_SPILL_REGISTERS */
|
|
|
|
+
|
|
#ifdef CONFIG_MMU
|
|
#ifdef CONFIG_MMU
|
|
/*
|
|
/*
|
|
* We should never get here. Bail out!
|
|
* We should never get here. Bail out!
|
|
@@ -1565,7 +1591,7 @@ ENTRY(fast_second_level_miss)
|
|
rsr a0, excvaddr
|
|
rsr a0, excvaddr
|
|
bltu a0, a3, 2f
|
|
bltu a0, a3, 2f
|
|
|
|
|
|
- addi a1, a0, -(2 << (DCACHE_ALIAS_ORDER + PAGE_SHIFT))
|
|
|
|
|
|
+ addi a1, a0, -TLBTEMP_SIZE
|
|
bgeu a1, a3, 2f
|
|
bgeu a1, a3, 2f
|
|
|
|
|
|
/* Check if we have to restore an ITLB mapping. */
|
|
/* Check if we have to restore an ITLB mapping. */
|
|
@@ -1820,7 +1846,6 @@ ENTRY(_switch_to)
|
|
|
|
|
|
entry a1, 16
|
|
entry a1, 16
|
|
|
|
|
|
- mov a10, a2 # preserve 'prev' (a2)
|
|
|
|
mov a11, a3 # and 'next' (a3)
|
|
mov a11, a3 # and 'next' (a3)
|
|
|
|
|
|
l32i a4, a2, TASK_THREAD_INFO
|
|
l32i a4, a2, TASK_THREAD_INFO
|
|
@@ -1828,8 +1853,14 @@ ENTRY(_switch_to)
|
|
|
|
|
|
save_xtregs_user a4 a6 a8 a9 a12 a13 THREAD_XTREGS_USER
|
|
save_xtregs_user a4 a6 a8 a9 a12 a13 THREAD_XTREGS_USER
|
|
|
|
|
|
- s32i a0, a10, THREAD_RA # save return address
|
|
|
|
- s32i a1, a10, THREAD_SP # save stack pointer
|
|
|
|
|
|
+#if THREAD_RA > 1020 || THREAD_SP > 1020
|
|
|
|
+ addi a10, a2, TASK_THREAD
|
|
|
|
+ s32i a0, a10, THREAD_RA - TASK_THREAD # save return address
|
|
|
|
+ s32i a1, a10, THREAD_SP - TASK_THREAD # save stack pointer
|
|
|
|
+#else
|
|
|
|
+ s32i a0, a2, THREAD_RA # save return address
|
|
|
|
+ s32i a1, a2, THREAD_SP # save stack pointer
|
|
|
|
+#endif
|
|
|
|
|
|
/* Disable ints while we manipulate the stack pointer. */
|
|
/* Disable ints while we manipulate the stack pointer. */
|
|
|
|
|
|
@@ -1870,7 +1901,6 @@ ENTRY(_switch_to)
|
|
load_xtregs_user a5 a6 a8 a9 a12 a13 THREAD_XTREGS_USER
|
|
load_xtregs_user a5 a6 a8 a9 a12 a13 THREAD_XTREGS_USER
|
|
|
|
|
|
wsr a14, ps
|
|
wsr a14, ps
|
|
- mov a2, a10 # return 'prev'
|
|
|
|
rsync
|
|
rsync
|
|
|
|
|
|
retw
|
|
retw
|