hazards.h 5.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271
  1. /*
  2. * This file is subject to the terms and conditions of the GNU General Public
  3. * License. See the file "COPYING" in the main directory of this archive
  4. * for more details.
  5. *
  6. * Copyright (C) 2003, 04, 07 Ralf Baechle <ralf@linux-mips.org>
  7. * Copyright (C) MIPS Technologies, Inc.
  8. * written by Ralf Baechle <ralf@linux-mips.org>
  9. */
  10. #ifndef _ASM_HAZARDS_H
  11. #define _ASM_HAZARDS_H
  12. #ifdef __ASSEMBLY__
  13. #define ASMMACRO(name, code...) .macro name; code; .endm
  14. #else
  15. #include <asm/cpu-features.h>
  16. #define ASMMACRO(name, code...) \
  17. __asm__(".macro " #name "; " #code "; .endm"); \
  18. \
  19. static inline void name(void) \
  20. { \
  21. __asm__ __volatile__ (#name); \
  22. }
  23. /*
  24. * MIPS R2 instruction hazard barrier. Needs to be called as a subroutine.
  25. */
  26. extern void mips_ihb(void);
  27. #endif
  28. ASMMACRO(_ssnop,
  29. sll $0, $0, 1
  30. )
  31. ASMMACRO(_ehb,
  32. sll $0, $0, 3
  33. )
  34. /*
  35. * TLB hazards
  36. */
  37. #if defined(CONFIG_CPU_MIPSR2)
  38. /*
  39. * MIPSR2 defines ehb for hazard avoidance
  40. */
  41. ASMMACRO(mtc0_tlbw_hazard,
  42. _ehb
  43. )
  44. ASMMACRO(tlbw_use_hazard,
  45. _ehb
  46. )
  47. ASMMACRO(tlb_probe_hazard,
  48. _ehb
  49. )
  50. ASMMACRO(irq_enable_hazard,
  51. _ehb
  52. )
  53. ASMMACRO(irq_disable_hazard,
  54. _ehb
  55. )
  56. ASMMACRO(back_to_back_c0_hazard,
  57. _ehb
  58. )
  59. /*
  60. * gcc has a tradition of misscompiling the previous construct using the
  61. * address of a label as argument to inline assembler. Gas otoh has the
  62. * annoying difference between la and dla which are only usable for 32-bit
  63. * rsp. 64-bit code, so can't be used without conditional compilation.
  64. * The alterantive is switching the assembler to 64-bit code which happens
  65. * to work right even for 32-bit code ...
  66. */
  67. #define instruction_hazard() \
  68. do { \
  69. unsigned long tmp; \
  70. \
  71. __asm__ __volatile__( \
  72. " .set mips64r2 \n" \
  73. " dla %0, 1f \n" \
  74. " jr.hb %0 \n" \
  75. " .set mips0 \n" \
  76. "1: \n" \
  77. : "=r" (tmp)); \
  78. } while (0)
  79. #elif defined(CONFIG_CPU_MIPSR1)
  80. /*
  81. * These are slightly complicated by the fact that we guarantee R1 kernels to
  82. * run fine on R2 processors.
  83. */
  84. ASMMACRO(mtc0_tlbw_hazard,
  85. _ssnop; _ssnop; _ehb
  86. )
  87. ASMMACRO(tlbw_use_hazard,
  88. _ssnop; _ssnop; _ssnop; _ehb
  89. )
  90. ASMMACRO(tlb_probe_hazard,
  91. _ssnop; _ssnop; _ssnop; _ehb
  92. )
  93. ASMMACRO(irq_enable_hazard,
  94. _ssnop; _ssnop; _ssnop; _ehb
  95. )
  96. ASMMACRO(irq_disable_hazard,
  97. _ssnop; _ssnop; _ssnop; _ehb
  98. )
  99. ASMMACRO(back_to_back_c0_hazard,
  100. _ssnop; _ssnop; _ssnop; _ehb
  101. )
  102. /*
  103. * gcc has a tradition of misscompiling the previous construct using the
  104. * address of a label as argument to inline assembler. Gas otoh has the
  105. * annoying difference between la and dla which are only usable for 32-bit
  106. * rsp. 64-bit code, so can't be used without conditional compilation.
  107. * The alterantive is switching the assembler to 64-bit code which happens
  108. * to work right even for 32-bit code ...
  109. */
  110. #define __instruction_hazard() \
  111. do { \
  112. unsigned long tmp; \
  113. \
  114. __asm__ __volatile__( \
  115. " .set mips64r2 \n" \
  116. " dla %0, 1f \n" \
  117. " jr.hb %0 \n" \
  118. " .set mips0 \n" \
  119. "1: \n" \
  120. : "=r" (tmp)); \
  121. } while (0)
  122. #define instruction_hazard() \
  123. do { \
  124. if (cpu_has_mips_r2) \
  125. __instruction_hazard(); \
  126. } while (0)
  127. #elif defined(CONFIG_CPU_R10000)
  128. /*
  129. * R10000 rocks - all hazards handled in hardware, so this becomes a nobrainer.
  130. */
  131. ASMMACRO(mtc0_tlbw_hazard,
  132. )
  133. ASMMACRO(tlbw_use_hazard,
  134. )
  135. ASMMACRO(tlb_probe_hazard,
  136. )
  137. ASMMACRO(irq_enable_hazard,
  138. )
  139. ASMMACRO(irq_disable_hazard,
  140. )
  141. ASMMACRO(back_to_back_c0_hazard,
  142. )
  143. #define instruction_hazard() do { } while (0)
  144. #elif defined(CONFIG_CPU_RM9000)
  145. /*
  146. * RM9000 hazards. When the JTLB is updated by tlbwi or tlbwr, a subsequent
  147. * use of the JTLB for instructions should not occur for 4 cpu cycles and use
  148. * for data translations should not occur for 3 cpu cycles.
  149. */
  150. ASMMACRO(mtc0_tlbw_hazard,
  151. _ssnop; _ssnop; _ssnop; _ssnop
  152. )
  153. ASMMACRO(tlbw_use_hazard,
  154. _ssnop; _ssnop; _ssnop; _ssnop
  155. )
  156. ASMMACRO(tlb_probe_hazard,
  157. _ssnop; _ssnop; _ssnop; _ssnop
  158. )
  159. ASMMACRO(irq_enable_hazard,
  160. )
  161. ASMMACRO(irq_disable_hazard,
  162. )
  163. ASMMACRO(back_to_back_c0_hazard,
  164. )
  165. #define instruction_hazard() do { } while (0)
  166. #elif defined(CONFIG_CPU_SB1)
  167. /*
  168. * Mostly like R4000 for historic reasons
  169. */
  170. ASMMACRO(mtc0_tlbw_hazard,
  171. )
  172. ASMMACRO(tlbw_use_hazard,
  173. )
  174. ASMMACRO(tlb_probe_hazard,
  175. )
  176. ASMMACRO(irq_enable_hazard,
  177. )
  178. ASMMACRO(irq_disable_hazard,
  179. _ssnop; _ssnop; _ssnop
  180. )
  181. ASMMACRO(back_to_back_c0_hazard,
  182. )
  183. #define instruction_hazard() do { } while (0)
  184. #else
  185. /*
  186. * Finally the catchall case for all other processors including R4000, R4400,
  187. * R4600, R4700, R5000, RM7000, NEC VR41xx etc.
  188. *
  189. * The taken branch will result in a two cycle penalty for the two killed
  190. * instructions on R4000 / R4400. Other processors only have a single cycle
  191. * hazard so this is nice trick to have an optimal code for a range of
  192. * processors.
  193. */
  194. ASMMACRO(mtc0_tlbw_hazard,
  195. nop; nop
  196. )
  197. ASMMACRO(tlbw_use_hazard,
  198. nop; nop; nop
  199. )
  200. ASMMACRO(tlb_probe_hazard,
  201. nop; nop; nop
  202. )
  203. ASMMACRO(irq_enable_hazard,
  204. _ssnop; _ssnop; _ssnop;
  205. )
  206. ASMMACRO(irq_disable_hazard,
  207. nop; nop; nop
  208. )
  209. ASMMACRO(back_to_back_c0_hazard,
  210. _ssnop; _ssnop; _ssnop;
  211. )
  212. #define instruction_hazard() do { } while (0)
  213. #endif
  214. /* FPU hazards */
  215. #if defined(CONFIG_CPU_SB1)
  216. ASMMACRO(enable_fpu_hazard,
  217. .set push;
  218. .set mips64;
  219. .set noreorder;
  220. _ssnop;
  221. bnezl $0, .+4;
  222. _ssnop;
  223. .set pop
  224. )
  225. ASMMACRO(disable_fpu_hazard,
  226. )
  227. #elif defined(CONFIG_CPU_MIPSR2)
  228. ASMMACRO(enable_fpu_hazard,
  229. _ehb
  230. )
  231. ASMMACRO(disable_fpu_hazard,
  232. _ehb
  233. )
  234. #else
  235. ASMMACRO(enable_fpu_hazard,
  236. nop; nop; nop; nop
  237. )
  238. ASMMACRO(disable_fpu_hazard,
  239. _ehb
  240. )
  241. #endif
  242. #endif /* _ASM_HAZARDS_H */