hazards.h 5.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274
  1. /*
  2. * This file is subject to the terms and conditions of the GNU General Public
  3. * License. See the file "COPYING" in the main directory of this archive
  4. * for more details.
  5. *
  6. * Copyright (C) 2003, 04, 07 Ralf Baechle <ralf@linux-mips.org>
  7. * Copyright (C) MIPS Technologies, Inc.
  8. * written by Ralf Baechle <ralf@linux-mips.org>
  9. */
  10. #ifndef _ASM_HAZARDS_H
  11. #define _ASM_HAZARDS_H
  12. #ifdef __ASSEMBLY__
  13. #define ASMMACRO(name, code...) .macro name; code; .endm
  14. #else
  15. #include <asm/cpu-features.h>
  16. #define ASMMACRO(name, code...) \
  17. __asm__(".macro " #name "; " #code "; .endm"); \
  18. \
  19. static inline void name(void) \
  20. { \
  21. __asm__ __volatile__ (#name); \
  22. }
  23. /*
  24. * MIPS R2 instruction hazard barrier. Needs to be called as a subroutine.
  25. */
  26. extern void mips_ihb(void);
  27. #endif
  28. ASMMACRO(_ssnop,
  29. sll $0, $0, 1
  30. )
  31. ASMMACRO(_ehb,
  32. sll $0, $0, 3
  33. )
  34. /*
  35. * TLB hazards
  36. */
  37. #if defined(CONFIG_CPU_MIPSR2) && !defined(CONFIG_CPU_CAVIUM_OCTEON)
  38. /*
  39. * MIPSR2 defines ehb for hazard avoidance
  40. */
  41. ASMMACRO(mtc0_tlbw_hazard,
  42. _ehb
  43. )
  44. ASMMACRO(tlbw_use_hazard,
  45. _ehb
  46. )
  47. ASMMACRO(tlb_probe_hazard,
  48. _ehb
  49. )
  50. ASMMACRO(irq_enable_hazard,
  51. _ehb
  52. )
  53. ASMMACRO(irq_disable_hazard,
  54. _ehb
  55. )
  56. ASMMACRO(back_to_back_c0_hazard,
  57. _ehb
  58. )
  59. /*
  60. * gcc has a tradition of misscompiling the previous construct using the
  61. * address of a label as argument to inline assembler. Gas otoh has the
  62. * annoying difference between la and dla which are only usable for 32-bit
  63. * rsp. 64-bit code, so can't be used without conditional compilation.
  64. * The alterantive is switching the assembler to 64-bit code which happens
  65. * to work right even for 32-bit code ...
  66. */
  67. #define instruction_hazard() \
  68. do { \
  69. unsigned long tmp; \
  70. \
  71. __asm__ __volatile__( \
  72. " .set mips64r2 \n" \
  73. " dla %0, 1f \n" \
  74. " jr.hb %0 \n" \
  75. " .set mips0 \n" \
  76. "1: \n" \
  77. : "=r" (tmp)); \
  78. } while (0)
  79. #elif (defined(CONFIG_CPU_MIPSR1) && !defined(CONFIG_MIPS_ALCHEMY)) || \
  80. defined(CONFIG_CPU_BMIPS)
  81. /*
  82. * These are slightly complicated by the fact that we guarantee R1 kernels to
  83. * run fine on R2 processors.
  84. */
  85. ASMMACRO(mtc0_tlbw_hazard,
  86. _ssnop; _ssnop; _ehb
  87. )
  88. ASMMACRO(tlbw_use_hazard,
  89. _ssnop; _ssnop; _ssnop; _ehb
  90. )
  91. ASMMACRO(tlb_probe_hazard,
  92. _ssnop; _ssnop; _ssnop; _ehb
  93. )
  94. ASMMACRO(irq_enable_hazard,
  95. _ssnop; _ssnop; _ssnop; _ehb
  96. )
  97. ASMMACRO(irq_disable_hazard,
  98. _ssnop; _ssnop; _ssnop; _ehb
  99. )
  100. ASMMACRO(back_to_back_c0_hazard,
  101. _ssnop; _ssnop; _ssnop; _ehb
  102. )
  103. /*
  104. * gcc has a tradition of misscompiling the previous construct using the
  105. * address of a label as argument to inline assembler. Gas otoh has the
  106. * annoying difference between la and dla which are only usable for 32-bit
  107. * rsp. 64-bit code, so can't be used without conditional compilation.
  108. * The alterantive is switching the assembler to 64-bit code which happens
  109. * to work right even for 32-bit code ...
  110. */
  111. #define __instruction_hazard() \
  112. do { \
  113. unsigned long tmp; \
  114. \
  115. __asm__ __volatile__( \
  116. " .set mips64r2 \n" \
  117. " dla %0, 1f \n" \
  118. " jr.hb %0 \n" \
  119. " .set mips0 \n" \
  120. "1: \n" \
  121. : "=r" (tmp)); \
  122. } while (0)
  123. #define instruction_hazard() \
  124. do { \
  125. if (cpu_has_mips_r2) \
  126. __instruction_hazard(); \
  127. } while (0)
  128. #elif defined(CONFIG_MIPS_ALCHEMY) || defined(CONFIG_CPU_CAVIUM_OCTEON) || \
  129. defined(CONFIG_CPU_LOONGSON2) || defined(CONFIG_CPU_R10000) || \
  130. defined(CONFIG_CPU_R5500)
  131. /*
  132. * R10000 rocks - all hazards handled in hardware, so this becomes a nobrainer.
  133. */
  134. ASMMACRO(mtc0_tlbw_hazard,
  135. )
  136. ASMMACRO(tlbw_use_hazard,
  137. )
  138. ASMMACRO(tlb_probe_hazard,
  139. )
  140. ASMMACRO(irq_enable_hazard,
  141. )
  142. ASMMACRO(irq_disable_hazard,
  143. )
  144. ASMMACRO(back_to_back_c0_hazard,
  145. )
  146. #define instruction_hazard() do { } while (0)
  147. #elif defined(CONFIG_CPU_RM9000)
  148. /*
  149. * RM9000 hazards. When the JTLB is updated by tlbwi or tlbwr, a subsequent
  150. * use of the JTLB for instructions should not occur for 4 cpu cycles and use
  151. * for data translations should not occur for 3 cpu cycles.
  152. */
  153. ASMMACRO(mtc0_tlbw_hazard,
  154. _ssnop; _ssnop; _ssnop; _ssnop
  155. )
  156. ASMMACRO(tlbw_use_hazard,
  157. _ssnop; _ssnop; _ssnop; _ssnop
  158. )
  159. ASMMACRO(tlb_probe_hazard,
  160. _ssnop; _ssnop; _ssnop; _ssnop
  161. )
  162. ASMMACRO(irq_enable_hazard,
  163. )
  164. ASMMACRO(irq_disable_hazard,
  165. )
  166. ASMMACRO(back_to_back_c0_hazard,
  167. )
  168. #define instruction_hazard() do { } while (0)
  169. #elif defined(CONFIG_CPU_SB1)
  170. /*
  171. * Mostly like R4000 for historic reasons
  172. */
  173. ASMMACRO(mtc0_tlbw_hazard,
  174. )
  175. ASMMACRO(tlbw_use_hazard,
  176. )
  177. ASMMACRO(tlb_probe_hazard,
  178. )
  179. ASMMACRO(irq_enable_hazard,
  180. )
  181. ASMMACRO(irq_disable_hazard,
  182. _ssnop; _ssnop; _ssnop
  183. )
  184. ASMMACRO(back_to_back_c0_hazard,
  185. )
  186. #define instruction_hazard() do { } while (0)
  187. #else
  188. /*
  189. * Finally the catchall case for all other processors including R4000, R4400,
  190. * R4600, R4700, R5000, RM7000, NEC VR41xx etc.
  191. *
  192. * The taken branch will result in a two cycle penalty for the two killed
  193. * instructions on R4000 / R4400. Other processors only have a single cycle
  194. * hazard so this is nice trick to have an optimal code for a range of
  195. * processors.
  196. */
  197. ASMMACRO(mtc0_tlbw_hazard,
  198. nop; nop
  199. )
  200. ASMMACRO(tlbw_use_hazard,
  201. nop; nop; nop
  202. )
  203. ASMMACRO(tlb_probe_hazard,
  204. nop; nop; nop
  205. )
  206. ASMMACRO(irq_enable_hazard,
  207. _ssnop; _ssnop; _ssnop;
  208. )
  209. ASMMACRO(irq_disable_hazard,
  210. nop; nop; nop
  211. )
  212. ASMMACRO(back_to_back_c0_hazard,
  213. _ssnop; _ssnop; _ssnop;
  214. )
  215. #define instruction_hazard() do { } while (0)
  216. #endif
  217. /* FPU hazards */
  218. #if defined(CONFIG_CPU_SB1)
  219. ASMMACRO(enable_fpu_hazard,
  220. .set push;
  221. .set mips64;
  222. .set noreorder;
  223. _ssnop;
  224. bnezl $0, .+4;
  225. _ssnop;
  226. .set pop
  227. )
  228. ASMMACRO(disable_fpu_hazard,
  229. )
  230. #elif defined(CONFIG_CPU_MIPSR2)
  231. ASMMACRO(enable_fpu_hazard,
  232. _ehb
  233. )
  234. ASMMACRO(disable_fpu_hazard,
  235. _ehb
  236. )
  237. #else
  238. ASMMACRO(enable_fpu_hazard,
  239. nop; nop; nop; nop
  240. )
  241. ASMMACRO(disable_fpu_hazard,
  242. _ehb
  243. )
  244. #endif
  245. #endif /* _ASM_HAZARDS_H */