hazards.h 4.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219
  1. /*
  2. * This file is subject to the terms and conditions of the GNU General Public
  3. * License. See the file "COPYING" in the main directory of this archive
  4. * for more details.
  5. *
  6. * Copyright (C) 2003, 04, 07 Ralf Baechle <ralf@linux-mips.org>
  7. * Copyright (C) MIPS Technologies, Inc.
  8. * written by Ralf Baechle <ralf@linux-mips.org>
  9. */
  10. #ifndef _ASM_HAZARDS_H
  11. #define _ASM_HAZARDS_H
  12. #ifdef __ASSEMBLY__
  13. #define ASMMACRO(name, code...) .macro name; code; .endm
  14. #else
  15. #define ASMMACRO(name, code...) \
  16. __asm__(".macro " #name "; " #code "; .endm"); \
  17. \
  18. static inline void name(void) \
  19. { \
  20. __asm__ __volatile__ (#name); \
  21. }
  22. /*
  23. * MIPS R2 instruction hazard barrier. Needs to be called as a subroutine.
  24. */
  25. extern void mips_ihb(void);
  26. #endif
  27. ASMMACRO(_ssnop,
  28. sll $0, $0, 1
  29. )
  30. ASMMACRO(_ehb,
  31. sll $0, $0, 3
  32. )
  33. /*
  34. * TLB hazards
  35. */
  36. #if defined(CONFIG_CPU_MIPSR2)
  37. /*
  38. * MIPSR2 defines ehb for hazard avoidance
  39. */
  40. ASMMACRO(mtc0_tlbw_hazard,
  41. _ehb
  42. )
  43. ASMMACRO(tlbw_use_hazard,
  44. _ehb
  45. )
  46. ASMMACRO(tlb_probe_hazard,
  47. _ehb
  48. )
  49. ASMMACRO(irq_enable_hazard,
  50. _ehb
  51. )
  52. ASMMACRO(irq_disable_hazard,
  53. _ehb
  54. )
  55. ASMMACRO(back_to_back_c0_hazard,
  56. _ehb
  57. )
  58. /*
  59. * gcc has a tradition of misscompiling the previous construct using the
  60. * address of a label as argument to inline assembler. Gas otoh has the
  61. * annoying difference between la and dla which are only usable for 32-bit
  62. * rsp. 64-bit code, so can't be used without conditional compilation.
  63. * The alterantive is switching the assembler to 64-bit code which happens
  64. * to work right even for 32-bit code ...
  65. */
  66. #define instruction_hazard() \
  67. do { \
  68. unsigned long tmp; \
  69. \
  70. __asm__ __volatile__( \
  71. " .set mips64r2 \n" \
  72. " dla %0, 1f \n" \
  73. " jr.hb %0 \n" \
  74. " .set mips0 \n" \
  75. "1: \n" \
  76. : "=r" (tmp)); \
  77. } while (0)
  78. #elif defined(CONFIG_CPU_R10000)
  79. /*
  80. * R10000 rocks - all hazards handled in hardware, so this becomes a nobrainer.
  81. */
  82. ASMMACRO(mtc0_tlbw_hazard,
  83. )
  84. ASMMACRO(tlbw_use_hazard,
  85. )
  86. ASMMACRO(tlb_probe_hazard,
  87. )
  88. ASMMACRO(irq_enable_hazard,
  89. )
  90. ASMMACRO(irq_disable_hazard,
  91. )
  92. ASMMACRO(back_to_back_c0_hazard,
  93. )
  94. #define instruction_hazard() do { } while (0)
  95. #elif defined(CONFIG_CPU_RM9000)
  96. /*
  97. * RM9000 hazards. When the JTLB is updated by tlbwi or tlbwr, a subsequent
  98. * use of the JTLB for instructions should not occur for 4 cpu cycles and use
  99. * for data translations should not occur for 3 cpu cycles.
  100. */
  101. ASMMACRO(mtc0_tlbw_hazard,
  102. _ssnop; _ssnop; _ssnop; _ssnop
  103. )
  104. ASMMACRO(tlbw_use_hazard,
  105. _ssnop; _ssnop; _ssnop; _ssnop
  106. )
  107. ASMMACRO(tlb_probe_hazard,
  108. _ssnop; _ssnop; _ssnop; _ssnop
  109. )
  110. ASMMACRO(irq_enable_hazard,
  111. )
  112. ASMMACRO(irq_disable_hazard,
  113. )
  114. ASMMACRO(back_to_back_c0_hazard,
  115. )
  116. #define instruction_hazard() do { } while (0)
  117. #elif defined(CONFIG_CPU_SB1)
  118. /*
  119. * Mostly like R4000 for historic reasons
  120. */
  121. ASMMACRO(mtc0_tlbw_hazard,
  122. )
  123. ASMMACRO(tlbw_use_hazard,
  124. )
  125. ASMMACRO(tlb_probe_hazard,
  126. )
  127. ASMMACRO(irq_enable_hazard,
  128. )
  129. ASMMACRO(irq_disable_hazard,
  130. _ssnop; _ssnop; _ssnop
  131. )
  132. ASMMACRO(back_to_back_c0_hazard,
  133. )
  134. #define instruction_hazard() do { } while (0)
  135. #else
  136. /*
  137. * Finally the catchall case for all other processors including R4000, R4400,
  138. * R4600, R4700, R5000, RM7000, NEC VR41xx etc.
  139. *
  140. * The taken branch will result in a two cycle penalty for the two killed
  141. * instructions on R4000 / R4400. Other processors only have a single cycle
  142. * hazard so this is nice trick to have an optimal code for a range of
  143. * processors.
  144. */
  145. ASMMACRO(mtc0_tlbw_hazard,
  146. nop; nop
  147. )
  148. ASMMACRO(tlbw_use_hazard,
  149. nop; nop; nop
  150. )
  151. ASMMACRO(tlb_probe_hazard,
  152. nop; nop; nop
  153. )
  154. ASMMACRO(irq_enable_hazard,
  155. _ssnop; _ssnop; _ssnop;
  156. )
  157. ASMMACRO(irq_disable_hazard,
  158. nop; nop; nop
  159. )
  160. ASMMACRO(back_to_back_c0_hazard,
  161. _ssnop; _ssnop; _ssnop;
  162. )
  163. #define instruction_hazard() do { } while (0)
  164. #endif
  165. /* FPU hazards */
  166. #if defined(CONFIG_CPU_SB1)
  167. ASMMACRO(enable_fpu_hazard,
  168. .set push;
  169. .set mips64;
  170. .set noreorder;
  171. _ssnop;
  172. bnezl $0,.+4;
  173. _ssnop;
  174. .set pop
  175. )
  176. ASMMACRO(disable_fpu_hazard,
  177. )
  178. #elif defined(CONFIG_CPU_MIPSR2)
  179. ASMMACRO(enable_fpu_hazard,
  180. _ehb
  181. )
  182. ASMMACRO(disable_fpu_hazard,
  183. _ehb
  184. )
  185. #else
  186. ASMMACRO(enable_fpu_hazard,
  187. nop; nop; nop; nop
  188. )
  189. ASMMACRO(disable_fpu_hazard,
  190. _ehb
  191. )
  192. #endif
  193. #endif /* _ASM_HAZARDS_H */