|
@@ -38,6 +38,7 @@
|
|
#include <linux/unistd.h>
|
|
#include <linux/unistd.h>
|
|
#include <asm/blackfin.h>
|
|
#include <asm/blackfin.h>
|
|
#include <asm/errno.h>
|
|
#include <asm/errno.h>
|
|
|
|
+#include <asm/fixed_code.h>
|
|
#include <asm/thread_info.h> /* TIF_NEED_RESCHED */
|
|
#include <asm/thread_info.h> /* TIF_NEED_RESCHED */
|
|
#include <asm/asm-offsets.h>
|
|
#include <asm/asm-offsets.h>
|
|
#include <asm/trace.h>
|
|
#include <asm/trace.h>
|
|
@@ -52,15 +53,6 @@
|
|
# define EX_SCRATCH_REG CYCLES
|
|
# define EX_SCRATCH_REG CYCLES
|
|
#endif
|
|
#endif
|
|
|
|
|
|
-#if ANOMALY_05000281
|
|
|
|
-ENTRY(_safe_speculative_execution)
|
|
|
|
- NOP;
|
|
|
|
- NOP;
|
|
|
|
- NOP;
|
|
|
|
- jump _safe_speculative_execution;
|
|
|
|
-ENDPROC(_safe_speculative_execution)
|
|
|
|
-#endif
|
|
|
|
-
|
|
|
|
#ifdef CONFIG_EXCPT_IRQ_SYSC_L1
|
|
#ifdef CONFIG_EXCPT_IRQ_SYSC_L1
|
|
.section .l1.text
|
|
.section .l1.text
|
|
#else
|
|
#else
|
|
@@ -230,6 +222,26 @@ ENTRY(_ex_trap_c)
|
|
[p4] = p5;
|
|
[p4] = p5;
|
|
csync;
|
|
csync;
|
|
|
|
|
|
|
|
+ p4.l = lo(DCPLB_FAULT_ADDR);
|
|
|
|
+ p4.h = hi(DCPLB_FAULT_ADDR);
|
|
|
|
+ r7 = [p4];
|
|
|
|
+ p5.h = _saved_dcplb_fault_addr;
|
|
|
|
+ p5.l = _saved_dcplb_fault_addr;
|
|
|
|
+ [p5] = r7;
|
|
|
|
+
|
|
|
|
+ r7 = [p4 + (ICPLB_FAULT_ADDR - DCPLB_FAULT_ADDR)];
|
|
|
|
+ p5.h = _saved_icplb_fault_addr;
|
|
|
|
+ p5.l = _saved_icplb_fault_addr;
|
|
|
|
+ [p5] = r7;
|
|
|
|
+
|
|
|
|
+ p4.l = __retx;
|
|
|
|
+ p4.h = __retx;
|
|
|
|
+ r6 = retx;
|
|
|
|
+ [p4] = r6;
|
|
|
|
+ p4.l = lo(SAFE_USER_INSTRUCTION);
|
|
|
|
+ p4.h = hi(SAFE_USER_INSTRUCTION);
|
|
|
|
+ retx = p4;
|
|
|
|
+
|
|
/* Disable all interrupts, but make sure level 5 is enabled so
|
|
/* Disable all interrupts, but make sure level 5 is enabled so
|
|
* we can switch to that level. Save the old mask. */
|
|
* we can switch to that level. Save the old mask. */
|
|
cli r6;
|
|
cli r6;
|
|
@@ -239,23 +251,6 @@ ENTRY(_ex_trap_c)
|
|
r6 = 0x3f;
|
|
r6 = 0x3f;
|
|
sti r6;
|
|
sti r6;
|
|
|
|
|
|
- /* Save the excause into a circular buffer, in case the instruction
|
|
|
|
- * which caused this excecptions causes others.
|
|
|
|
- */
|
|
|
|
- P5.l = _in_ptr_excause;
|
|
|
|
- P5.h = _in_ptr_excause;
|
|
|
|
- R7 = [P5];
|
|
|
|
- R7 += 4;
|
|
|
|
- R6 = 0xF;
|
|
|
|
- R7 = R7 & R6;
|
|
|
|
- [P5] = R7;
|
|
|
|
- R6.l = _excause_circ_buf;
|
|
|
|
- R6.h = _excause_circ_buf;
|
|
|
|
- R7 = R7 + R6;
|
|
|
|
- p5 = R7;
|
|
|
|
- R6 = SEQSTAT;
|
|
|
|
- [P5] = R6;
|
|
|
|
-
|
|
|
|
(R7:6,P5:4) = [sp++];
|
|
(R7:6,P5:4) = [sp++];
|
|
ASTAT = [sp++];
|
|
ASTAT = [sp++];
|
|
SP = EX_SCRATCH_REG;
|
|
SP = EX_SCRATCH_REG;
|
|
@@ -312,6 +307,11 @@ ENDPROC(_double_fault)
|
|
ENTRY(_exception_to_level5)
|
|
ENTRY(_exception_to_level5)
|
|
SAVE_ALL_SYS
|
|
SAVE_ALL_SYS
|
|
|
|
|
|
|
|
+ p4.l = __retx;
|
|
|
|
+ p4.h = __retx;
|
|
|
|
+ r6 = [p4];
|
|
|
|
+ [sp + PT_PC] = r6;
|
|
|
|
+
|
|
/* Restore interrupt mask. We haven't pushed RETI, so this
|
|
/* Restore interrupt mask. We haven't pushed RETI, so this
|
|
* doesn't enable interrupts until we return from this handler. */
|
|
* doesn't enable interrupts until we return from this handler. */
|
|
p4.l = _excpt_saved_imask;
|
|
p4.l = _excpt_saved_imask;
|
|
@@ -333,42 +333,11 @@ ENTRY(_exception_to_level5)
|
|
r0 = [p2]; /* Read current IPEND */
|
|
r0 = [p2]; /* Read current IPEND */
|
|
[sp + PT_IPEND] = r0; /* Store IPEND */
|
|
[sp + PT_IPEND] = r0; /* Store IPEND */
|
|
|
|
|
|
- /* Pop the excause from the circular buffer and push it on the stack
|
|
|
|
- * (in the right place - if you change the location of SEQSTAT, you
|
|
|
|
- * must change this offset.
|
|
|
|
- */
|
|
|
|
-.L_excep_to_5_again:
|
|
|
|
- P5.l = _out_ptr_excause;
|
|
|
|
- P5.h = _out_ptr_excause;
|
|
|
|
- R7 = [P5];
|
|
|
|
- R7 += 4;
|
|
|
|
- R6 = 0xF;
|
|
|
|
- R7 = R7 & R6;
|
|
|
|
- [P5] = R7;
|
|
|
|
- R6.l = _excause_circ_buf;
|
|
|
|
- R6.h = _excause_circ_buf;
|
|
|
|
- R7 = R7 + R6;
|
|
|
|
- P5 = R7;
|
|
|
|
- R1 = [P5];
|
|
|
|
- [SP + PT_SEQSTAT] = r1;
|
|
|
|
-
|
|
|
|
r0 = sp; /* stack frame pt_regs pointer argument ==> r0 */
|
|
r0 = sp; /* stack frame pt_regs pointer argument ==> r0 */
|
|
SP += -12;
|
|
SP += -12;
|
|
call _trap_c;
|
|
call _trap_c;
|
|
SP += 12;
|
|
SP += 12;
|
|
|
|
|
|
- /* See if anything else is in the exception buffer
|
|
|
|
- * if there is, process it
|
|
|
|
- */
|
|
|
|
- P5.l = _out_ptr_excause;
|
|
|
|
- P5.h = _out_ptr_excause;
|
|
|
|
- P4.l = _in_ptr_excause;
|
|
|
|
- P4.h = _in_ptr_excause;
|
|
|
|
- R6 = [P5];
|
|
|
|
- R7 = [P4];
|
|
|
|
- CC = R6 == R7;
|
|
|
|
- if ! CC JUMP .L_excep_to_5_again
|
|
|
|
-
|
|
|
|
call _ret_from_exception;
|
|
call _ret_from_exception;
|
|
RESTORE_ALL_SYS
|
|
RESTORE_ALL_SYS
|
|
rti;
|
|
rti;
|
|
@@ -732,8 +701,8 @@ ENTRY(_return_from_int)
|
|
[p0] = p1;
|
|
[p0] = p1;
|
|
csync;
|
|
csync;
|
|
#if ANOMALY_05000281
|
|
#if ANOMALY_05000281
|
|
- r0.l = _safe_speculative_execution;
|
|
|
|
- r0.h = _safe_speculative_execution;
|
|
|
|
|
|
+ r0.l = lo(SAFE_USER_INSTRUCTION);
|
|
|
|
+ r0.h = hi(SAFE_USER_INSTRUCTION);
|
|
reti = r0;
|
|
reti = r0;
|
|
#endif
|
|
#endif
|
|
r0 = 0x801f (z);
|
|
r0 = 0x801f (z);
|
|
@@ -746,8 +715,8 @@ ENDPROC(_return_from_int)
|
|
|
|
|
|
ENTRY(_lower_to_irq14)
|
|
ENTRY(_lower_to_irq14)
|
|
#if ANOMALY_05000281
|
|
#if ANOMALY_05000281
|
|
- r0.l = _safe_speculative_execution;
|
|
|
|
- r0.h = _safe_speculative_execution;
|
|
|
|
|
|
+ r0.l = lo(SAFE_USER_INSTRUCTION);
|
|
|
|
+ r0.h = hi(SAFE_USER_INSTRUCTION);
|
|
reti = r0;
|
|
reti = r0;
|
|
#endif
|
|
#endif
|
|
r0 = 0x401f;
|
|
r0 = 0x401f;
|
|
@@ -814,20 +783,6 @@ _schedule_and_signal:
|
|
rti;
|
|
rti;
|
|
ENDPROC(_lower_to_irq14)
|
|
ENDPROC(_lower_to_irq14)
|
|
|
|
|
|
-/* Make sure when we start, that the circular buffer is initialized properly
|
|
|
|
- * R0 and P0 are call clobbered, so we can use them here.
|
|
|
|
- */
|
|
|
|
-ENTRY(_init_exception_buff)
|
|
|
|
- r0 = 0;
|
|
|
|
- p0.h = _in_ptr_excause;
|
|
|
|
- p0.l = _in_ptr_excause;
|
|
|
|
- [p0] = r0;
|
|
|
|
- p0.h = _out_ptr_excause;
|
|
|
|
- p0.l = _out_ptr_excause;
|
|
|
|
- [p0] = r0;
|
|
|
|
- rts;
|
|
|
|
-ENDPROC(_init_exception_buff)
|
|
|
|
-
|
|
|
|
/* We handle this 100% in exception space - to reduce overhead
|
|
/* We handle this 100% in exception space - to reduce overhead
|
|
* Only potiential problem is if the software buffer gets swapped out of the
|
|
* Only potiential problem is if the software buffer gets swapped out of the
|
|
* CPLB table - then double fault. - so we don't let this happen in other places
|
|
* CPLB table - then double fault. - so we don't let this happen in other places
|
|
@@ -1403,17 +1358,7 @@ _exception_stack_top:
|
|
_last_cplb_fault_retx:
|
|
_last_cplb_fault_retx:
|
|
.long 0;
|
|
.long 0;
|
|
#endif
|
|
#endif
|
|
-/*
|
|
|
|
- * Single instructions can have multiple faults, which need to be
|
|
|
|
- * handled by traps.c, in irq5. We store the exception cause to ensure
|
|
|
|
- * we don't miss a double fault condition
|
|
|
|
- */
|
|
|
|
-ENTRY(_in_ptr_excause)
|
|
|
|
- .long 0;
|
|
|
|
-ENTRY(_out_ptr_excause)
|
|
|
|
|
|
+ /* Used to save the real RETX when temporarily storing a safe
|
|
|
|
+ * return address. */
|
|
|
|
+__retx:
|
|
.long 0;
|
|
.long 0;
|
|
-ALIGN
|
|
|
|
-ENTRY(_excause_circ_buf)
|
|
|
|
- .rept 4
|
|
|
|
- .long 0
|
|
|
|
- .endr
|
|
|