|
@@ -10,34 +10,13 @@
|
|
|
#ifndef _ASM_HAZARDS_H
|
|
|
#define _ASM_HAZARDS_H
|
|
|
|
|
|
-#ifdef __ASSEMBLY__
|
|
|
-#define ASMMACRO(name, code...) .macro name; code; .endm
|
|
|
-#else
|
|
|
-
|
|
|
-#include <asm/cpu-features.h>
|
|
|
-
|
|
|
-#define ASMMACRO(name, code...) \
|
|
|
-__asm__(".macro " #name "; " #code "; .endm"); \
|
|
|
- \
|
|
|
-static inline void name(void) \
|
|
|
-{ \
|
|
|
- __asm__ __volatile__ (#name); \
|
|
|
-}
|
|
|
-
|
|
|
-/*
|
|
|
- * MIPS R2 instruction hazard barrier. Needs to be called as a subroutine.
|
|
|
- */
|
|
|
-extern void mips_ihb(void);
|
|
|
-
|
|
|
-#endif
|
|
|
+#include <linux/stringify.h>
|
|
|
|
|
|
-ASMMACRO(_ssnop,
|
|
|
- sll $0, $0, 1
|
|
|
- )
|
|
|
+#define ___ssnop \
|
|
|
+ sll $0, $0, 1
|
|
|
|
|
|
-ASMMACRO(_ehb,
|
|
|
- sll $0, $0, 3
|
|
|
- )
|
|
|
+#define ___ehb \
|
|
|
+ sll $0, $0, 3
|
|
|
|
|
|
/*
|
|
|
* TLB hazards
|
|
@@ -48,24 +27,24 @@ ASMMACRO(_ehb,
|
|
|
* MIPSR2 defines ehb for hazard avoidance
|
|
|
*/
|
|
|
|
|
|
-ASMMACRO(mtc0_tlbw_hazard,
|
|
|
- _ehb
|
|
|
- )
|
|
|
-ASMMACRO(tlbw_use_hazard,
|
|
|
- _ehb
|
|
|
- )
|
|
|
-ASMMACRO(tlb_probe_hazard,
|
|
|
- _ehb
|
|
|
- )
|
|
|
-ASMMACRO(irq_enable_hazard,
|
|
|
- _ehb
|
|
|
- )
|
|
|
-ASMMACRO(irq_disable_hazard,
|
|
|
- _ehb
|
|
|
- )
|
|
|
-ASMMACRO(back_to_back_c0_hazard,
|
|
|
- _ehb
|
|
|
- )
|
|
|
+#define __mtc0_tlbw_hazard \
|
|
|
+ ___ehb
|
|
|
+
|
|
|
+#define __tlbw_use_hazard \
|
|
|
+ ___ehb
|
|
|
+
|
|
|
+#define __tlb_probe_hazard \
|
|
|
+ ___ehb
|
|
|
+
|
|
|
+#define __irq_enable_hazard \
|
|
|
+ ___ehb
|
|
|
+
|
|
|
+#define __irq_disable_hazard \
|
|
|
+ ___ehb
|
|
|
+
|
|
|
+#define __back_to_back_c0_hazard \
|
|
|
+ ___ehb
|
|
|
+
|
|
|
/*
|
|
|
* gcc has a tradition of misscompiling the previous construct using the
|
|
|
* address of a label as argument to inline assembler. Gas otoh has the
|
|
@@ -94,24 +73,42 @@ do { \
|
|
|
* These are slightly complicated by the fact that we guarantee R1 kernels to
|
|
|
* run fine on R2 processors.
|
|
|
*/
|
|
|
-ASMMACRO(mtc0_tlbw_hazard,
|
|
|
- _ssnop; _ssnop; _ehb
|
|
|
- )
|
|
|
-ASMMACRO(tlbw_use_hazard,
|
|
|
- _ssnop; _ssnop; _ssnop; _ehb
|
|
|
- )
|
|
|
-ASMMACRO(tlb_probe_hazard,
|
|
|
- _ssnop; _ssnop; _ssnop; _ehb
|
|
|
- )
|
|
|
-ASMMACRO(irq_enable_hazard,
|
|
|
- _ssnop; _ssnop; _ssnop; _ehb
|
|
|
- )
|
|
|
-ASMMACRO(irq_disable_hazard,
|
|
|
- _ssnop; _ssnop; _ssnop; _ehb
|
|
|
- )
|
|
|
-ASMMACRO(back_to_back_c0_hazard,
|
|
|
- _ssnop; _ssnop; _ssnop; _ehb
|
|
|
- )
|
|
|
+
|
|
|
+#define __mtc0_tlbw_hazard \
|
|
|
+ ___ssnop; \
|
|
|
+ ___ssnop; \
|
|
|
+ ___ehb
|
|
|
+
|
|
|
+#define __tlbw_use_hazard \
|
|
|
+ ___ssnop; \
|
|
|
+ ___ssnop; \
|
|
|
+ ___ssnop; \
|
|
|
+ ___ehb
|
|
|
+
|
|
|
+#define __tlb_probe_hazard \
|
|
|
+ ___ssnop; \
|
|
|
+ ___ssnop; \
|
|
|
+ ___ssnop; \
|
|
|
+ ___ehb
|
|
|
+
|
|
|
+#define __irq_enable_hazard \
|
|
|
+ ___ssnop; \
|
|
|
+ ___ssnop; \
|
|
|
+ ___ssnop; \
|
|
|
+ ___ehb
|
|
|
+
|
|
|
+#define __irq_disable_hazard \
|
|
|
+ ___ssnop; \
|
|
|
+ ___ssnop; \
|
|
|
+ ___ssnop; \
|
|
|
+ ___ehb
|
|
|
+
|
|
|
+#define __back_to_back_c0_hazard \
|
|
|
+ ___ssnop; \
|
|
|
+ ___ssnop; \
|
|
|
+ ___ssnop; \
|
|
|
+ ___ehb
|
|
|
+
|
|
|
/*
|
|
|
* gcc has a tradition of misscompiling the previous construct using the
|
|
|
* address of a label as argument to inline assembler. Gas otoh has the
|
|
@@ -147,18 +144,18 @@ do { \
|
|
|
* R10000 rocks - all hazards handled in hardware, so this becomes a nobrainer.
|
|
|
*/
|
|
|
|
|
|
-ASMMACRO(mtc0_tlbw_hazard,
|
|
|
- )
|
|
|
-ASMMACRO(tlbw_use_hazard,
|
|
|
- )
|
|
|
-ASMMACRO(tlb_probe_hazard,
|
|
|
- )
|
|
|
-ASMMACRO(irq_enable_hazard,
|
|
|
- )
|
|
|
-ASMMACRO(irq_disable_hazard,
|
|
|
- )
|
|
|
-ASMMACRO(back_to_back_c0_hazard,
|
|
|
- )
|
|
|
+#define __mtc0_tlbw_hazard
|
|
|
+
|
|
|
+#define __tlbw_use_hazard
|
|
|
+
|
|
|
+#define __tlb_probe_hazard
|
|
|
+
|
|
|
+#define __irq_enable_hazard
|
|
|
+
|
|
|
+#define __irq_disable_hazard
|
|
|
+
|
|
|
+#define __back_to_back_c0_hazard
|
|
|
+
|
|
|
#define instruction_hazard() do { } while (0)
|
|
|
|
|
|
#elif defined(CONFIG_CPU_SB1)
|
|
@@ -166,19 +163,21 @@ ASMMACRO(back_to_back_c0_hazard,
|
|
|
/*
|
|
|
* Mostly like R4000 for historic reasons
|
|
|
*/
|
|
|
-ASMMACRO(mtc0_tlbw_hazard,
|
|
|
- )
|
|
|
-ASMMACRO(tlbw_use_hazard,
|
|
|
- )
|
|
|
-ASMMACRO(tlb_probe_hazard,
|
|
|
- )
|
|
|
-ASMMACRO(irq_enable_hazard,
|
|
|
- )
|
|
|
-ASMMACRO(irq_disable_hazard,
|
|
|
- _ssnop; _ssnop; _ssnop
|
|
|
- )
|
|
|
-ASMMACRO(back_to_back_c0_hazard,
|
|
|
- )
|
|
|
+#define __mtc0_tlbw_hazard
|
|
|
+
|
|
|
+#define __tlbw_use_hazard
|
|
|
+
|
|
|
+#define __tlb_probe_hazard
|
|
|
+
|
|
|
+#define __irq_enable_hazard
|
|
|
+
|
|
|
+#define __irq_disable_hazard \
|
|
|
+ ___ssnop; \
|
|
|
+ ___ssnop; \
|
|
|
+ ___ssnop
|
|
|
+
|
|
|
+#define __back_to_back_c0_hazard
|
|
|
+
|
|
|
#define instruction_hazard() do { } while (0)
|
|
|
|
|
|
#else
|
|
@@ -192,24 +191,35 @@ ASMMACRO(back_to_back_c0_hazard,
|
|
|
* hazard so this is nice trick to have an optimal code for a range of
|
|
|
* processors.
|
|
|
*/
|
|
|
-ASMMACRO(mtc0_tlbw_hazard,
|
|
|
- nop; nop
|
|
|
- )
|
|
|
-ASMMACRO(tlbw_use_hazard,
|
|
|
- nop; nop; nop
|
|
|
- )
|
|
|
-ASMMACRO(tlb_probe_hazard,
|
|
|
- nop; nop; nop
|
|
|
- )
|
|
|
-ASMMACRO(irq_enable_hazard,
|
|
|
- _ssnop; _ssnop; _ssnop;
|
|
|
- )
|
|
|
-ASMMACRO(irq_disable_hazard,
|
|
|
- nop; nop; nop
|
|
|
- )
|
|
|
-ASMMACRO(back_to_back_c0_hazard,
|
|
|
- _ssnop; _ssnop; _ssnop;
|
|
|
- )
|
|
|
+#define __mtc0_tlbw_hazard \
|
|
|
+ nop; \
|
|
|
+ nop
|
|
|
+
|
|
|
+#define __tlbw_use_hazard \
|
|
|
+ nop; \
|
|
|
+ nop; \
|
|
|
+ nop
|
|
|
+
|
|
|
+#define __tlb_probe_hazard \
|
|
|
+ nop; \
|
|
|
+ nop; \
|
|
|
+ nop
|
|
|
+
|
|
|
+#define __irq_enable_hazard \
|
|
|
+ ___ssnop; \
|
|
|
+ ___ssnop; \
|
|
|
+ ___ssnop
|
|
|
+
|
|
|
+#define __irq_disable_hazard \
|
|
|
+ nop; \
|
|
|
+ nop; \
|
|
|
+ nop
|
|
|
+
|
|
|
+#define __back_to_back_c0_hazard \
|
|
|
+ ___ssnop; \
|
|
|
+ ___ssnop; \
|
|
|
+ ___ssnop
|
|
|
+
|
|
|
#define instruction_hazard() do { } while (0)
|
|
|
|
|
|
#endif
|
|
@@ -218,32 +228,137 @@ ASMMACRO(back_to_back_c0_hazard,
|
|
|
/* FPU hazards */
|
|
|
|
|
|
#if defined(CONFIG_CPU_SB1)
|
|
|
-ASMMACRO(enable_fpu_hazard,
|
|
|
- .set push;
|
|
|
- .set mips64;
|
|
|
- .set noreorder;
|
|
|
- _ssnop;
|
|
|
- bnezl $0, .+4;
|
|
|
- _ssnop;
|
|
|
- .set pop
|
|
|
-)
|
|
|
-ASMMACRO(disable_fpu_hazard,
|
|
|
-)
|
|
|
+
|
|
|
+#define __enable_fpu_hazard \
|
|
|
+ .set push; \
|
|
|
+ .set mips64; \
|
|
|
+ .set noreorder; \
|
|
|
+ ___ssnop; \
|
|
|
+ bnezl $0, .+4; \
|
|
|
+ ___ssnop; \
|
|
|
+ .set pop
|
|
|
+
|
|
|
+#define __disable_fpu_hazard
|
|
|
|
|
|
#elif defined(CONFIG_CPU_MIPSR2)
|
|
|
-ASMMACRO(enable_fpu_hazard,
|
|
|
- _ehb
|
|
|
-)
|
|
|
-ASMMACRO(disable_fpu_hazard,
|
|
|
- _ehb
|
|
|
-)
|
|
|
+
|
|
|
+#define __enable_fpu_hazard \
|
|
|
+ ___ehb
|
|
|
+
|
|
|
+#define __disable_fpu_hazard \
|
|
|
+ ___ehb
|
|
|
+
|
|
|
#else
|
|
|
-ASMMACRO(enable_fpu_hazard,
|
|
|
- nop; nop; nop; nop
|
|
|
-)
|
|
|
-ASMMACRO(disable_fpu_hazard,
|
|
|
- _ehb
|
|
|
-)
|
|
|
+
|
|
|
+#define __enable_fpu_hazard \
|
|
|
+ nop; \
|
|
|
+ nop; \
|
|
|
+ nop; \
|
|
|
+ nop
|
|
|
+
|
|
|
+#define __disable_fpu_hazard \
|
|
|
+ ___ehb
|
|
|
+
|
|
|
#endif
|
|
|
|
|
|
+#ifdef __ASSEMBLY__
|
|
|
+
|
|
|
+#define _ssnop ___ssnop
|
|
|
+#define _ehb ___ehb
|
|
|
+#define mtc0_tlbw_hazard __mtc0_tlbw_hazard
|
|
|
+#define tlbw_use_hazard __tlbw_use_hazard
|
|
|
+#define tlb_probe_hazard __tlb_probe_hazard
|
|
|
+#define irq_enable_hazard __irq_enable_hazard
|
|
|
+#define irq_disable_hazard __irq_disable_hazard
|
|
|
+#define back_to_back_c0_hazard __back_to_back_c0_hazard
|
|
|
+#define enable_fpu_hazard __enable_fpu_hazard
|
|
|
+#define disable_fpu_hazard __disable_fpu_hazard
|
|
|
+
|
|
|
+#else
|
|
|
+
|
|
|
+#define _ssnop() \
|
|
|
+do { \
|
|
|
+ __asm__ __volatile__( \
|
|
|
+ __stringify(___ssnop) \
|
|
|
+ ); \
|
|
|
+} while (0)
|
|
|
+
|
|
|
+#define _ehb() \
|
|
|
+do { \
|
|
|
+ __asm__ __volatile__( \
|
|
|
+ __stringify(___ehb) \
|
|
|
+ ); \
|
|
|
+} while (0)
|
|
|
+
|
|
|
+
|
|
|
+#define mtc0_tlbw_hazard() \
|
|
|
+do { \
|
|
|
+ __asm__ __volatile__( \
|
|
|
+ __stringify(__mtc0_tlbw_hazard) \
|
|
|
+ ); \
|
|
|
+} while (0)
|
|
|
+
|
|
|
+
|
|
|
+#define tlbw_use_hazard() \
|
|
|
+do { \
|
|
|
+ __asm__ __volatile__( \
|
|
|
+ __stringify(__tlbw_use_hazard) \
|
|
|
+ ); \
|
|
|
+} while (0)
|
|
|
+
|
|
|
+
|
|
|
+#define tlb_probe_hazard() \
|
|
|
+do { \
|
|
|
+ __asm__ __volatile__( \
|
|
|
+ __stringify(__tlb_probe_hazard) \
|
|
|
+ ); \
|
|
|
+} while (0)
|
|
|
+
|
|
|
+
|
|
|
+#define irq_enable_hazard() \
|
|
|
+do { \
|
|
|
+ __asm__ __volatile__( \
|
|
|
+ __stringify(__irq_enable_hazard) \
|
|
|
+ ); \
|
|
|
+} while (0)
|
|
|
+
|
|
|
+
|
|
|
+#define irq_disable_hazard() \
|
|
|
+do { \
|
|
|
+ __asm__ __volatile__( \
|
|
|
+ __stringify(__irq_disable_hazard) \
|
|
|
+ ); \
|
|
|
+} while (0)
|
|
|
+
|
|
|
+
|
|
|
+#define back_to_back_c0_hazard() \
|
|
|
+do { \
|
|
|
+ __asm__ __volatile__( \
|
|
|
+ __stringify(__back_to_back_c0_hazard) \
|
|
|
+ ); \
|
|
|
+} while (0)
|
|
|
+
|
|
|
+
|
|
|
+#define enable_fpu_hazard() \
|
|
|
+do { \
|
|
|
+ __asm__ __volatile__( \
|
|
|
+ __stringify(__enable_fpu_hazard) \
|
|
|
+ ); \
|
|
|
+} while (0)
|
|
|
+
|
|
|
+
|
|
|
+#define disable_fpu_hazard() \
|
|
|
+do { \
|
|
|
+ __asm__ __volatile__( \
|
|
|
+ __stringify(__disable_fpu_hazard) \
|
|
|
+ ); \
|
|
|
+} while (0)
|
|
|
+
|
|
|
+/*
|
|
|
+ * MIPS R2 instruction hazard barrier. Needs to be called as a subroutine.
|
|
|
+ */
|
|
|
+extern void mips_ihb(void);
|
|
|
+
|
|
|
+#endif /* __ASSEMBLY__ */
|
|
|
+
|
|
|
#endif /* _ASM_HAZARDS_H */
|