|
@@ -8,6 +8,7 @@
|
|
|
* this archive for more details.
|
|
|
*
|
|
|
* Copyright (C) 2001 - 2005 Tensilica, Inc.
|
|
|
+ * Copyright (C) 2014 Cadence Design Systems Inc.
|
|
|
*
|
|
|
* Rewritten by Chris Zankel <chris@zankel.net>
|
|
|
*
|
|
@@ -174,6 +175,10 @@ ENTRY(fast_unaligned)
|
|
|
s32i a0, a2, PT_AREG2
|
|
|
s32i a3, a2, PT_AREG3
|
|
|
|
|
|
+ rsr a3, excsave1
|
|
|
+ movi a4, fast_unaligned_fixup
|
|
|
+ s32i a4, a3, EXC_TABLE_FIXUP
|
|
|
+
|
|
|
/* Keep value of SAR in a0 */
|
|
|
|
|
|
rsr a0, sar
|
|
@@ -225,10 +230,6 @@ ENTRY(fast_unaligned)
|
|
|
addx8 a5, a6, a5
|
|
|
jx a5 # jump into table
|
|
|
|
|
|
- /* Invalid instruction, CRITICAL! */
|
|
|
-.Linvalid_instruction_load:
|
|
|
- j .Linvalid_instruction
|
|
|
-
|
|
|
/* Load: Load memory address. */
|
|
|
|
|
|
.Lload: movi a3, ~3
|
|
@@ -272,18 +273,6 @@ ENTRY(fast_unaligned)
|
|
|
/* Set target register. */
|
|
|
|
|
|
1:
|
|
|
-
|
|
|
-#if XCHAL_HAVE_LOOPS
|
|
|
- rsr a5, lend # check if we reached LEND
|
|
|
- bne a7, a5, 1f
|
|
|
- rsr a5, lcount # and LCOUNT != 0
|
|
|
- beqz a5, 1f
|
|
|
- addi a5, a5, -1 # decrement LCOUNT and set
|
|
|
- rsr a7, lbeg # set PC to LBEGIN
|
|
|
- wsr a5, lcount
|
|
|
-#endif
|
|
|
-
|
|
|
-1: wsr a7, epc1 # skip load instruction
|
|
|
extui a4, a4, INSN_T, 4 # extract target register
|
|
|
movi a5, .Lload_table
|
|
|
addx8 a4, a4, a5
|
|
@@ -326,6 +315,35 @@ ENTRY(fast_unaligned)
|
|
|
mov a3, a14 ; _j 1f; .align 8
|
|
|
mov a3, a15 ; _j 1f; .align 8
|
|
|
|
|
|
+ /* We cannot handle this exception. */
|
|
|
+
|
|
|
+ .extern _kernel_exception
|
|
|
+.Linvalid_instruction_load:
|
|
|
+.Linvalid_instruction_store:
|
|
|
+
|
|
|
+ movi a4, 0
|
|
|
+ rsr a3, excsave1
|
|
|
+ s32i a4, a3, EXC_TABLE_FIXUP
|
|
|
+
|
|
|
+ /* Restore a4...a8 and SAR, set SP, and jump to default exception. */
|
|
|
+
|
|
|
+ l32i a8, a2, PT_AREG8
|
|
|
+ l32i a7, a2, PT_AREG7
|
|
|
+ l32i a6, a2, PT_AREG6
|
|
|
+ l32i a5, a2, PT_AREG5
|
|
|
+ l32i a4, a2, PT_AREG4
|
|
|
+ wsr a0, sar
|
|
|
+ mov a1, a2
|
|
|
+
|
|
|
+ rsr a0, ps
|
|
|
+ bbsi.l a0, PS_UM_BIT, 2f # jump if user mode
|
|
|
+
|
|
|
+ movi a0, _kernel_exception
|
|
|
+ jx a0
|
|
|
+
|
|
|
+2: movi a0, _user_exception
|
|
|
+ jx a0
|
|
|
+
|
|
|
1: # a7: instruction pointer, a4: instruction, a3: value
|
|
|
|
|
|
movi a6, 0 # mask: ffffffff:00000000
|
|
@@ -353,17 +371,6 @@ ENTRY(fast_unaligned)
|
|
|
/* Get memory address */
|
|
|
|
|
|
1:
|
|
|
-#if XCHAL_HAVE_LOOPS
|
|
|
- rsr a4, lend # check if we reached LEND
|
|
|
- bne a7, a4, 1f
|
|
|
- rsr a4, lcount # and LCOUNT != 0
|
|
|
- beqz a4, 1f
|
|
|
- addi a4, a4, -1 # decrement LCOUNT and set
|
|
|
- rsr a7, lbeg # set PC to LBEGIN
|
|
|
- wsr a4, lcount
|
|
|
-#endif
|
|
|
-
|
|
|
-1: wsr a7, epc1 # skip store instruction
|
|
|
movi a4, ~3
|
|
|
and a4, a4, a8 # align memory address
|
|
|
|
|
@@ -375,25 +382,25 @@ ENTRY(fast_unaligned)
|
|
|
#endif
|
|
|
|
|
|
__ssa8r a8
|
|
|
- __src_b a7, a5, a6 # lo-mask F..F0..0 (BE) 0..0F..F (LE)
|
|
|
+ __src_b a8, a5, a6 # lo-mask F..F0..0 (BE) 0..0F..F (LE)
|
|
|
__src_b a6, a6, a5 # hi-mask 0..0F..F (BE) F..F0..0 (LE)
|
|
|
#ifdef UNALIGNED_USER_EXCEPTION
|
|
|
l32e a5, a4, -8
|
|
|
#else
|
|
|
l32i a5, a4, 0 # load lower address word
|
|
|
#endif
|
|
|
- and a5, a5, a7 # mask
|
|
|
- __sh a7, a3 # shift value
|
|
|
- or a5, a5, a7 # or with original value
|
|
|
+ and a5, a5, a8 # mask
|
|
|
+ __sh a8, a3 # shift value
|
|
|
+ or a5, a5, a8 # or with original value
|
|
|
#ifdef UNALIGNED_USER_EXCEPTION
|
|
|
s32e a5, a4, -8
|
|
|
- l32e a7, a4, -4
|
|
|
+ l32e a8, a4, -4
|
|
|
#else
|
|
|
s32i a5, a4, 0 # store
|
|
|
- l32i a7, a4, 4 # same for upper address word
|
|
|
+ l32i a8, a4, 4 # same for upper address word
|
|
|
#endif
|
|
|
__sl a5, a3
|
|
|
- and a6, a7, a6
|
|
|
+ and a6, a8, a6
|
|
|
or a6, a6, a5
|
|
|
#ifdef UNALIGNED_USER_EXCEPTION
|
|
|
s32e a6, a4, -4
|
|
@@ -401,9 +408,27 @@ ENTRY(fast_unaligned)
|
|
|
s32i a6, a4, 4
|
|
|
#endif
|
|
|
|
|
|
- /* Done. restore stack and return */
|
|
|
-
|
|
|
.Lexit:
|
|
|
+#if XCHAL_HAVE_LOOPS
|
|
|
+ rsr a4, lend # check if we reached LEND
|
|
|
+ bne a7, a4, 1f
|
|
|
+ rsr a4, lcount # and LCOUNT != 0
|
|
|
+ beqz a4, 1f
|
|
|
+ addi a4, a4, -1 # decrement LCOUNT and set
|
|
|
+ rsr a7, lbeg # set PC to LBEGIN
|
|
|
+ wsr a4, lcount
|
|
|
+#endif
|
|
|
+
|
|
|
+1: wsr a7, epc1 # skip emulated instruction
|
|
|
+
|
|
|
+ /* Update icount if we're single-stepping in userspace. */
|
|
|
+ rsr a4, icountlevel
|
|
|
+ beqz a4, 1f
|
|
|
+ bgeui a4, LOCKLEVEL + 1, 1f
|
|
|
+ rsr a4, icount
|
|
|
+ addi a4, a4, 1
|
|
|
+ wsr a4, icount
|
|
|
+1:
|
|
|
movi a4, 0
|
|
|
rsr a3, excsave1
|
|
|
s32i a4, a3, EXC_TABLE_FIXUP
|
|
@@ -424,31 +449,40 @@ ENTRY(fast_unaligned)
|
|
|
l32i a2, a2, PT_AREG2
|
|
|
rfe
|
|
|
|
|
|
- /* We cannot handle this exception. */
|
|
|
+ENDPROC(fast_unaligned)
|
|
|
|
|
|
- .extern _kernel_exception
|
|
|
-.Linvalid_instruction_store:
|
|
|
-.Linvalid_instruction:
|
|
|
+ENTRY(fast_unaligned_fixup)
|
|
|
|
|
|
- /* Restore a4...a8 and SAR, set SP, and jump to default exception. */
|
|
|
+ l32i a2, a3, EXC_TABLE_DOUBLE_SAVE
|
|
|
+ wsr a3, excsave1
|
|
|
|
|
|
l32i a8, a2, PT_AREG8
|
|
|
l32i a7, a2, PT_AREG7
|
|
|
l32i a6, a2, PT_AREG6
|
|
|
l32i a5, a2, PT_AREG5
|
|
|
l32i a4, a2, PT_AREG4
|
|
|
+ l32i a0, a2, PT_AREG2
|
|
|
+ xsr a0, depc # restore depc and a0
|
|
|
wsr a0, sar
|
|
|
- mov a1, a2
|
|
|
+
|
|
|
+ rsr a0, exccause
|
|
|
+ s32i a0, a2, PT_DEPC # mark as a regular exception
|
|
|
|
|
|
rsr a0, ps
|
|
|
- bbsi.l a2, PS_UM_BIT, 1f # jump if user mode
|
|
|
+ bbsi.l a0, PS_UM_BIT, 1f # jump if user mode
|
|
|
|
|
|
- movi a0, _kernel_exception
|
|
|
+ rsr a0, exccause
|
|
|
+ addx4 a0, a0, a3 # find entry in table
|
|
|
+ l32i a0, a0, EXC_TABLE_FAST_KERNEL # load handler
|
|
|
+ l32i a3, a2, PT_AREG3
|
|
|
jx a0
|
|
|
-
|
|
|
-1: movi a0, _user_exception
|
|
|
+1:
|
|
|
+ rsr a0, exccause
|
|
|
+ addx4 a0, a0, a3 # find entry in table
|
|
|
+ l32i a0, a0, EXC_TABLE_FAST_USER # load handler
|
|
|
+ l32i a3, a2, PT_AREG3
|
|
|
jx a0
|
|
|
|
|
|
-ENDPROC(fast_unaligned)
|
|
|
+ENDPROC(fast_unaligned_fixup)
|
|
|
|
|
|
#endif /* XCHAL_UNALIGNED_LOAD_EXCEPTION || XCHAL_UNALIGNED_STORE_EXCEPTION */
|