hazards.h 5.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273
  1. /*
  2. * This file is subject to the terms and conditions of the GNU General Public
  3. * License. See the file "COPYING" in the main directory of this archive
  4. * for more details.
  5. *
  6. * Copyright (C) 2003, 04, 07 Ralf Baechle <ralf@linux-mips.org>
  7. * Copyright (C) MIPS Technologies, Inc.
  8. * written by Ralf Baechle <ralf@linux-mips.org>
  9. */
  10. #ifndef _ASM_HAZARDS_H
  11. #define _ASM_HAZARDS_H
  12. #ifdef __ASSEMBLY__
  13. #define ASMMACRO(name, code...) .macro name; code; .endm
  14. #else
  15. #include <asm/cpu-features.h>
  16. #define ASMMACRO(name, code...) \
  17. __asm__(".macro " #name "; " #code "; .endm"); \
  18. \
  19. static inline void name(void) \
  20. { \
  21. __asm__ __volatile__ (#name); \
  22. }
  23. /*
  24. * MIPS R2 instruction hazard barrier. Needs to be called as a subroutine.
  25. */
  26. extern void mips_ihb(void);
  27. #endif
  28. ASMMACRO(_ssnop,
  29. sll $0, $0, 1
  30. )
  31. ASMMACRO(_ehb,
  32. sll $0, $0, 3
  33. )
  34. /*
  35. * TLB hazards
  36. */
  37. #if defined(CONFIG_CPU_MIPSR2) && !defined(CONFIG_CPU_CAVIUM_OCTEON)
  38. /*
  39. * MIPSR2 defines ehb for hazard avoidance
  40. */
  41. ASMMACRO(mtc0_tlbw_hazard,
  42. _ehb
  43. )
  44. ASMMACRO(tlbw_use_hazard,
  45. _ehb
  46. )
  47. ASMMACRO(tlb_probe_hazard,
  48. _ehb
  49. )
  50. ASMMACRO(irq_enable_hazard,
  51. _ehb
  52. )
  53. ASMMACRO(irq_disable_hazard,
  54. _ehb
  55. )
  56. ASMMACRO(back_to_back_c0_hazard,
  57. _ehb
  58. )
  59. /*
  60. * gcc has a tradition of misscompiling the previous construct using the
  61. * address of a label as argument to inline assembler. Gas otoh has the
  62. * annoying difference between la and dla which are only usable for 32-bit
  63. * rsp. 64-bit code, so can't be used without conditional compilation.
  64. * The alterantive is switching the assembler to 64-bit code which happens
  65. * to work right even for 32-bit code ...
  66. */
  67. #define instruction_hazard() \
  68. do { \
  69. unsigned long tmp; \
  70. \
  71. __asm__ __volatile__( \
  72. " .set mips64r2 \n" \
  73. " dla %0, 1f \n" \
  74. " jr.hb %0 \n" \
  75. " .set mips0 \n" \
  76. "1: \n" \
  77. : "=r" (tmp)); \
  78. } while (0)
  79. #elif defined(CONFIG_CPU_MIPSR1) && !defined(CONFIG_MACH_ALCHEMY)
  80. /*
  81. * These are slightly complicated by the fact that we guarantee R1 kernels to
  82. * run fine on R2 processors.
  83. */
  84. ASMMACRO(mtc0_tlbw_hazard,
  85. _ssnop; _ssnop; _ehb
  86. )
  87. ASMMACRO(tlbw_use_hazard,
  88. _ssnop; _ssnop; _ssnop; _ehb
  89. )
  90. ASMMACRO(tlb_probe_hazard,
  91. _ssnop; _ssnop; _ssnop; _ehb
  92. )
  93. ASMMACRO(irq_enable_hazard,
  94. _ssnop; _ssnop; _ssnop; _ehb
  95. )
  96. ASMMACRO(irq_disable_hazard,
  97. _ssnop; _ssnop; _ssnop; _ehb
  98. )
  99. ASMMACRO(back_to_back_c0_hazard,
  100. _ssnop; _ssnop; _ssnop; _ehb
  101. )
  102. /*
  103. * gcc has a tradition of misscompiling the previous construct using the
  104. * address of a label as argument to inline assembler. Gas otoh has the
  105. * annoying difference between la and dla which are only usable for 32-bit
  106. * rsp. 64-bit code, so can't be used without conditional compilation.
  107. * The alterantive is switching the assembler to 64-bit code which happens
  108. * to work right even for 32-bit code ...
  109. */
  110. #define __instruction_hazard() \
  111. do { \
  112. unsigned long tmp; \
  113. \
  114. __asm__ __volatile__( \
  115. " .set mips64r2 \n" \
  116. " dla %0, 1f \n" \
  117. " jr.hb %0 \n" \
  118. " .set mips0 \n" \
  119. "1: \n" \
  120. : "=r" (tmp)); \
  121. } while (0)
  122. #define instruction_hazard() \
  123. do { \
  124. if (cpu_has_mips_r2) \
  125. __instruction_hazard(); \
  126. } while (0)
  127. #elif defined(CONFIG_MACH_ALCHEMY) || defined(CONFIG_CPU_CAVIUM_OCTEON) || \
  128. defined(CONFIG_CPU_LOONGSON2) || defined(CONFIG_CPU_R10000) || \
  129. defined(CONFIG_CPU_R5500)
  130. /*
  131. * R10000 rocks - all hazards handled in hardware, so this becomes a nobrainer.
  132. */
  133. ASMMACRO(mtc0_tlbw_hazard,
  134. )
  135. ASMMACRO(tlbw_use_hazard,
  136. )
  137. ASMMACRO(tlb_probe_hazard,
  138. )
  139. ASMMACRO(irq_enable_hazard,
  140. )
  141. ASMMACRO(irq_disable_hazard,
  142. )
  143. ASMMACRO(back_to_back_c0_hazard,
  144. )
  145. #define instruction_hazard() do { } while (0)
  146. #elif defined(CONFIG_CPU_RM9000)
  147. /*
  148. * RM9000 hazards. When the JTLB is updated by tlbwi or tlbwr, a subsequent
  149. * use of the JTLB for instructions should not occur for 4 cpu cycles and use
  150. * for data translations should not occur for 3 cpu cycles.
  151. */
  152. ASMMACRO(mtc0_tlbw_hazard,
  153. _ssnop; _ssnop; _ssnop; _ssnop
  154. )
  155. ASMMACRO(tlbw_use_hazard,
  156. _ssnop; _ssnop; _ssnop; _ssnop
  157. )
  158. ASMMACRO(tlb_probe_hazard,
  159. _ssnop; _ssnop; _ssnop; _ssnop
  160. )
  161. ASMMACRO(irq_enable_hazard,
  162. )
  163. ASMMACRO(irq_disable_hazard,
  164. )
  165. ASMMACRO(back_to_back_c0_hazard,
  166. )
  167. #define instruction_hazard() do { } while (0)
  168. #elif defined(CONFIG_CPU_SB1)
  169. /*
  170. * Mostly like R4000 for historic reasons
  171. */
  172. ASMMACRO(mtc0_tlbw_hazard,
  173. )
  174. ASMMACRO(tlbw_use_hazard,
  175. )
  176. ASMMACRO(tlb_probe_hazard,
  177. )
  178. ASMMACRO(irq_enable_hazard,
  179. )
  180. ASMMACRO(irq_disable_hazard,
  181. _ssnop; _ssnop; _ssnop
  182. )
  183. ASMMACRO(back_to_back_c0_hazard,
  184. )
  185. #define instruction_hazard() do { } while (0)
  186. #else
  187. /*
  188. * Finally the catchall case for all other processors including R4000, R4400,
  189. * R4600, R4700, R5000, RM7000, NEC VR41xx etc.
  190. *
  191. * The taken branch will result in a two cycle penalty for the two killed
  192. * instructions on R4000 / R4400. Other processors only have a single cycle
  193. * hazard so this is nice trick to have an optimal code for a range of
  194. * processors.
  195. */
  196. ASMMACRO(mtc0_tlbw_hazard,
  197. nop; nop
  198. )
  199. ASMMACRO(tlbw_use_hazard,
  200. nop; nop; nop
  201. )
  202. ASMMACRO(tlb_probe_hazard,
  203. nop; nop; nop
  204. )
  205. ASMMACRO(irq_enable_hazard,
  206. _ssnop; _ssnop; _ssnop;
  207. )
  208. ASMMACRO(irq_disable_hazard,
  209. nop; nop; nop
  210. )
  211. ASMMACRO(back_to_back_c0_hazard,
  212. _ssnop; _ssnop; _ssnop;
  213. )
  214. #define instruction_hazard() do { } while (0)
  215. #endif
  216. /* FPU hazards */
  217. #if defined(CONFIG_CPU_SB1)
  218. ASMMACRO(enable_fpu_hazard,
  219. .set push;
  220. .set mips64;
  221. .set noreorder;
  222. _ssnop;
  223. bnezl $0, .+4;
  224. _ssnop;
  225. .set pop
  226. )
  227. ASMMACRO(disable_fpu_hazard,
  228. )
  229. #elif defined(CONFIG_CPU_MIPSR2)
  230. ASMMACRO(enable_fpu_hazard,
  231. _ehb
  232. )
  233. ASMMACRO(disable_fpu_hazard,
  234. _ehb
  235. )
  236. #else
  237. ASMMACRO(enable_fpu_hazard,
  238. nop; nop; nop; nop
  239. )
  240. ASMMACRO(disable_fpu_hazard,
  241. _ehb
  242. )
  243. #endif
  244. #endif /* _ASM_HAZARDS_H */