hazards.h 5.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272
  1. /*
  2. * This file is subject to the terms and conditions of the GNU General Public
  3. * License. See the file "COPYING" in the main directory of this archive
  4. * for more details.
  5. *
  6. * Copyright (C) 2003, 04, 07 Ralf Baechle <ralf@linux-mips.org>
  7. * Copyright (C) MIPS Technologies, Inc.
  8. * written by Ralf Baechle <ralf@linux-mips.org>
  9. */
  10. #ifndef _ASM_HAZARDS_H
  11. #define _ASM_HAZARDS_H
  12. #ifdef __ASSEMBLY__
  13. #define ASMMACRO(name, code...) .macro name; code; .endm
  14. #else
  15. #include <asm/cpu-features.h>
  16. #define ASMMACRO(name, code...) \
  17. __asm__(".macro " #name "; " #code "; .endm"); \
  18. \
  19. static inline void name(void) \
  20. { \
  21. __asm__ __volatile__ (#name); \
  22. }
  23. /*
  24. * MIPS R2 instruction hazard barrier. Needs to be called as a subroutine.
  25. */
  26. extern void mips_ihb(void);
  27. #endif
  28. ASMMACRO(_ssnop,
  29. sll $0, $0, 1
  30. )
  31. ASMMACRO(_ehb,
  32. sll $0, $0, 3
  33. )
  34. /*
  35. * TLB hazards
  36. */
  37. #if defined(CONFIG_CPU_MIPSR2) && !defined(CONFIG_CPU_CAVIUM_OCTEON)
  38. /*
  39. * MIPSR2 defines ehb for hazard avoidance
  40. */
  41. ASMMACRO(mtc0_tlbw_hazard,
  42. _ehb
  43. )
  44. ASMMACRO(tlbw_use_hazard,
  45. _ehb
  46. )
  47. ASMMACRO(tlb_probe_hazard,
  48. _ehb
  49. )
  50. ASMMACRO(irq_enable_hazard,
  51. _ehb
  52. )
  53. ASMMACRO(irq_disable_hazard,
  54. _ehb
  55. )
  56. ASMMACRO(back_to_back_c0_hazard,
  57. _ehb
  58. )
  59. /*
  60. * gcc has a tradition of misscompiling the previous construct using the
  61. * address of a label as argument to inline assembler. Gas otoh has the
  62. * annoying difference between la and dla which are only usable for 32-bit
  63. * rsp. 64-bit code, so can't be used without conditional compilation.
  64. * The alterantive is switching the assembler to 64-bit code which happens
  65. * to work right even for 32-bit code ...
  66. */
  67. #define instruction_hazard() \
  68. do { \
  69. unsigned long tmp; \
  70. \
  71. __asm__ __volatile__( \
  72. " .set mips64r2 \n" \
  73. " dla %0, 1f \n" \
  74. " jr.hb %0 \n" \
  75. " .set mips0 \n" \
  76. "1: \n" \
  77. : "=r" (tmp)); \
  78. } while (0)
  79. #elif defined(CONFIG_CPU_MIPSR1) && !defined(CONFIG_MACH_ALCHEMY)
  80. /*
  81. * These are slightly complicated by the fact that we guarantee R1 kernels to
  82. * run fine on R2 processors.
  83. */
  84. ASMMACRO(mtc0_tlbw_hazard,
  85. _ssnop; _ssnop; _ehb
  86. )
  87. ASMMACRO(tlbw_use_hazard,
  88. _ssnop; _ssnop; _ssnop; _ehb
  89. )
  90. ASMMACRO(tlb_probe_hazard,
  91. _ssnop; _ssnop; _ssnop; _ehb
  92. )
  93. ASMMACRO(irq_enable_hazard,
  94. _ssnop; _ssnop; _ssnop; _ehb
  95. )
  96. ASMMACRO(irq_disable_hazard,
  97. _ssnop; _ssnop; _ssnop; _ehb
  98. )
  99. ASMMACRO(back_to_back_c0_hazard,
  100. _ssnop; _ssnop; _ssnop; _ehb
  101. )
  102. /*
  103. * gcc has a tradition of misscompiling the previous construct using the
  104. * address of a label as argument to inline assembler. Gas otoh has the
  105. * annoying difference between la and dla which are only usable for 32-bit
  106. * rsp. 64-bit code, so can't be used without conditional compilation.
  107. * The alterantive is switching the assembler to 64-bit code which happens
  108. * to work right even for 32-bit code ...
  109. */
  110. #define __instruction_hazard() \
  111. do { \
  112. unsigned long tmp; \
  113. \
  114. __asm__ __volatile__( \
  115. " .set mips64r2 \n" \
  116. " dla %0, 1f \n" \
  117. " jr.hb %0 \n" \
  118. " .set mips0 \n" \
  119. "1: \n" \
  120. : "=r" (tmp)); \
  121. } while (0)
  122. #define instruction_hazard() \
  123. do { \
  124. if (cpu_has_mips_r2) \
  125. __instruction_hazard(); \
  126. } while (0)
  127. #elif defined(CONFIG_CPU_R10000) || defined(CONFIG_CPU_CAVIUM_OCTEON) || \
  128. defined(CONFIG_CPU_R5500) || defined(CONFIG_MACH_ALCHEMY)
  129. /*
  130. * R10000 rocks - all hazards handled in hardware, so this becomes a nobrainer.
  131. */
  132. ASMMACRO(mtc0_tlbw_hazard,
  133. )
  134. ASMMACRO(tlbw_use_hazard,
  135. )
  136. ASMMACRO(tlb_probe_hazard,
  137. )
  138. ASMMACRO(irq_enable_hazard,
  139. )
  140. ASMMACRO(irq_disable_hazard,
  141. )
  142. ASMMACRO(back_to_back_c0_hazard,
  143. )
  144. #define instruction_hazard() do { } while (0)
  145. #elif defined(CONFIG_CPU_RM9000)
  146. /*
  147. * RM9000 hazards. When the JTLB is updated by tlbwi or tlbwr, a subsequent
  148. * use of the JTLB for instructions should not occur for 4 cpu cycles and use
  149. * for data translations should not occur for 3 cpu cycles.
  150. */
  151. ASMMACRO(mtc0_tlbw_hazard,
  152. _ssnop; _ssnop; _ssnop; _ssnop
  153. )
  154. ASMMACRO(tlbw_use_hazard,
  155. _ssnop; _ssnop; _ssnop; _ssnop
  156. )
  157. ASMMACRO(tlb_probe_hazard,
  158. _ssnop; _ssnop; _ssnop; _ssnop
  159. )
  160. ASMMACRO(irq_enable_hazard,
  161. )
  162. ASMMACRO(irq_disable_hazard,
  163. )
  164. ASMMACRO(back_to_back_c0_hazard,
  165. )
  166. #define instruction_hazard() do { } while (0)
  167. #elif defined(CONFIG_CPU_SB1)
  168. /*
  169. * Mostly like R4000 for historic reasons
  170. */
  171. ASMMACRO(mtc0_tlbw_hazard,
  172. )
  173. ASMMACRO(tlbw_use_hazard,
  174. )
  175. ASMMACRO(tlb_probe_hazard,
  176. )
  177. ASMMACRO(irq_enable_hazard,
  178. )
  179. ASMMACRO(irq_disable_hazard,
  180. _ssnop; _ssnop; _ssnop
  181. )
  182. ASMMACRO(back_to_back_c0_hazard,
  183. )
  184. #define instruction_hazard() do { } while (0)
  185. #else
  186. /*
  187. * Finally the catchall case for all other processors including R4000, R4400,
  188. * R4600, R4700, R5000, RM7000, NEC VR41xx etc.
  189. *
  190. * The taken branch will result in a two cycle penalty for the two killed
  191. * instructions on R4000 / R4400. Other processors only have a single cycle
  192. * hazard so this is nice trick to have an optimal code for a range of
  193. * processors.
  194. */
  195. ASMMACRO(mtc0_tlbw_hazard,
  196. nop; nop
  197. )
  198. ASMMACRO(tlbw_use_hazard,
  199. nop; nop; nop
  200. )
  201. ASMMACRO(tlb_probe_hazard,
  202. nop; nop; nop
  203. )
  204. ASMMACRO(irq_enable_hazard,
  205. _ssnop; _ssnop; _ssnop;
  206. )
  207. ASMMACRO(irq_disable_hazard,
  208. nop; nop; nop
  209. )
  210. ASMMACRO(back_to_back_c0_hazard,
  211. _ssnop; _ssnop; _ssnop;
  212. )
  213. #define instruction_hazard() do { } while (0)
  214. #endif
  215. /* FPU hazards */
  216. #if defined(CONFIG_CPU_SB1)
  217. ASMMACRO(enable_fpu_hazard,
  218. .set push;
  219. .set mips64;
  220. .set noreorder;
  221. _ssnop;
  222. bnezl $0, .+4;
  223. _ssnop;
  224. .set pop
  225. )
  226. ASMMACRO(disable_fpu_hazard,
  227. )
  228. #elif defined(CONFIG_CPU_MIPSR2)
  229. ASMMACRO(enable_fpu_hazard,
  230. _ehb
  231. )
  232. ASMMACRO(disable_fpu_hazard,
  233. _ehb
  234. )
  235. #else
  236. ASMMACRO(enable_fpu_hazard,
  237. nop; nop; nop; nop
  238. )
  239. ASMMACRO(disable_fpu_hazard,
  240. _ehb
  241. )
  242. #endif
  243. #endif /* _ASM_HAZARDS_H */