hazards.h 4.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213
  1. /*
  2. * This file is subject to the terms and conditions of the GNU General Public
  3. * License. See the file "COPYING" in the main directory of this archive
  4. * for more details.
  5. *
  6. * Copyright (C) 2003, 2004 Ralf Baechle <ralf@linux-mips.org>
  7. * Copyright (C) MIPS Technologies, Inc.
  8. * written by Ralf Baechle <ralf@linux-mips.org>
  9. */
  10. #ifndef _ASM_HAZARDS_H
  11. #define _ASM_HAZARDS_H
  12. #ifdef __ASSEMBLY__
  13. #define ASMMACRO(name, code...) .macro name; code; .endm
  14. #else
  15. #define ASMMACRO(name, code...) \
  16. __asm__(".macro " #name "; " #code "; .endm"); \
  17. \
  18. static inline void name(void) \
  19. { \
  20. __asm__ __volatile__ (#name); \
  21. }
  22. #endif
  23. ASMMACRO(_ssnop,
  24. sll $0, $0, 1
  25. )
  26. ASMMACRO(_ehb,
  27. sll $0, $0, 3
  28. )
  29. /*
  30. * TLB hazards
  31. */
  32. #if defined(CONFIG_CPU_MIPSR2)
  33. /*
  34. * MIPSR2 defines ehb for hazard avoidance
  35. */
  36. ASMMACRO(mtc0_tlbw_hazard,
  37. _ehb
  38. )
  39. ASMMACRO(tlbw_use_hazard,
  40. _ehb
  41. )
  42. ASMMACRO(tlb_probe_hazard,
  43. _ehb
  44. )
  45. ASMMACRO(irq_enable_hazard,
  46. _ehb
  47. )
  48. ASMMACRO(irq_disable_hazard,
  49. _ehb
  50. )
  51. ASMMACRO(back_to_back_c0_hazard,
  52. _ehb
  53. )
  54. /*
  55. * gcc has a tradition of misscompiling the previous construct using the
  56. * address of a label as argument to inline assembler. Gas otoh has the
  57. * annoying difference between la and dla which are only usable for 32-bit
  58. * rsp. 64-bit code, so can't be used without conditional compilation.
  59. * The alterantive is switching the assembler to 64-bit code which happens
  60. * to work right even for 32-bit code ...
  61. */
  62. #define instruction_hazard() \
  63. do { \
  64. unsigned long tmp; \
  65. \
  66. __asm__ __volatile__( \
  67. " .set mips64r2 \n" \
  68. " dla %0, 1f \n" \
  69. " jr.hb %0 \n" \
  70. " .set mips0 \n" \
  71. "1: \n" \
  72. : "=r" (tmp)); \
  73. } while (0)
  74. #elif defined(CONFIG_CPU_R10000)
  75. /*
  76. * R10000 rocks - all hazards handled in hardware, so this becomes a nobrainer.
  77. */
  78. ASMMACRO(mtc0_tlbw_hazard,
  79. )
  80. ASMMACRO(tlbw_use_hazard,
  81. )
  82. ASMMACRO(tlb_probe_hazard,
  83. )
  84. ASMMACRO(irq_enable_hazard,
  85. )
  86. ASMMACRO(irq_disable_hazard,
  87. )
  88. ASMMACRO(back_to_back_c0_hazard,
  89. )
  90. #define instruction_hazard() do { } while (0)
  91. #elif defined(CONFIG_CPU_RM9000)
  92. /*
  93. * RM9000 hazards. When the JTLB is updated by tlbwi or tlbwr, a subsequent
  94. * use of the JTLB for instructions should not occur for 4 cpu cycles and use
  95. * for data translations should not occur for 3 cpu cycles.
  96. */
  97. ASMMACRO(mtc0_tlbw_hazard,
  98. _ssnop; _ssnop; _ssnop; _ssnop
  99. )
  100. ASMMACRO(tlbw_use_hazard,
  101. _ssnop; _ssnop; _ssnop; _ssnop
  102. )
  103. ASMMACRO(tlb_probe_hazard,
  104. _ssnop; _ssnop; _ssnop; _ssnop
  105. )
  106. ASMMACRO(irq_enable_hazard,
  107. )
  108. ASMMACRO(irq_disable_hazard,
  109. )
  110. ASMMACRO(back_to_back_c0_hazard,
  111. )
  112. #define instruction_hazard() do { } while (0)
  113. #elif defined(CONFIG_CPU_SB1)
  114. /*
  115. * Mostly like R4000 for historic reasons
  116. */
  117. ASMMACRO(mtc0_tlbw_hazard,
  118. )
  119. ASMMACRO(tlbw_use_hazard,
  120. )
  121. ASMMACRO(tlb_probe_hazard,
  122. )
  123. ASMMACRO(irq_enable_hazard,
  124. )
  125. ASMMACRO(irq_disable_hazard,
  126. _ssnop; _ssnop; _ssnop
  127. )
  128. ASMMACRO(back_to_back_c0_hazard,
  129. )
  130. #define instruction_hazard() do { } while (0)
  131. #else
  132. /*
  133. * Finally the catchall case for all other processors including R4000, R4400,
  134. * R4600, R4700, R5000, RM7000, NEC VR41xx etc.
  135. *
  136. * The taken branch will result in a two cycle penalty for the two killed
  137. * instructions on R4000 / R4400. Other processors only have a single cycle
  138. * hazard so this is nice trick to have an optimal code for a range of
  139. * processors.
  140. */
  141. ASMMACRO(mtc0_tlbw_hazard,
  142. nop; nop
  143. )
  144. ASMMACRO(tlbw_use_hazard,
  145. nop; nop; nop
  146. )
  147. ASMMACRO(tlb_probe_hazard,
  148. nop; nop; nop
  149. )
  150. ASMMACRO(irq_enable_hazard,
  151. )
  152. ASMMACRO(irq_disable_hazard,
  153. nop; nop; nop
  154. )
  155. ASMMACRO(back_to_back_c0_hazard,
  156. _ssnop; _ssnop; _ssnop;
  157. )
  158. #define instruction_hazard() do { } while (0)
  159. #endif
  160. /* FPU hazards */
  161. #if defined(CONFIG_CPU_SB1)
  162. ASMMACRO(enable_fpu_hazard,
  163. .set push;
  164. .set mips64;
  165. .set noreorder;
  166. _ssnop;
  167. bnezl $0,.+4;
  168. _ssnop;
  169. .set pop
  170. )
  171. ASMMACRO(disable_fpu_hazard,
  172. )
  173. #elif defined(CONFIG_CPU_MIPSR2)
  174. ASMMACRO(enable_fpu_hazard,
  175. _ehb
  176. )
  177. ASMMACRO(disable_fpu_hazard,
  178. _ehb
  179. )
  180. #else
  181. ASMMACRO(enable_fpu_hazard,
  182. nop; nop; nop; nop
  183. )
  184. ASMMACRO(disable_fpu_hazard,
  185. _ehb
  186. )
  187. #endif
  188. #endif /* _ASM_HAZARDS_H */