hazards.h 7.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364
  1. /*
  2. * This file is subject to the terms and conditions of the GNU General Public
  3. * License. See the file "COPYING" in the main directory of this archive
  4. * for more details.
  5. *
  6. * Copyright (C) 2003, 04, 07 Ralf Baechle <ralf@linux-mips.org>
  7. * Copyright (C) MIPS Technologies, Inc.
  8. * written by Ralf Baechle <ralf@linux-mips.org>
  9. */
  10. #ifndef _ASM_HAZARDS_H
  11. #define _ASM_HAZARDS_H
  12. #include <linux/stringify.h>
  13. #define ___ssnop \
  14. sll $0, $0, 1
  15. #define ___ehb \
  16. sll $0, $0, 3
  17. /*
  18. * TLB hazards
  19. */
  20. #if defined(CONFIG_CPU_MIPSR2) && !defined(CONFIG_CPU_CAVIUM_OCTEON)
  21. /*
  22. * MIPSR2 defines ehb for hazard avoidance
  23. */
  24. #define __mtc0_tlbw_hazard \
  25. ___ehb
  26. #define __tlbw_use_hazard \
  27. ___ehb
  28. #define __tlb_probe_hazard \
  29. ___ehb
  30. #define __irq_enable_hazard \
  31. ___ehb
  32. #define __irq_disable_hazard \
  33. ___ehb
  34. #define __back_to_back_c0_hazard \
  35. ___ehb
  36. /*
  37. * gcc has a tradition of misscompiling the previous construct using the
  38. * address of a label as argument to inline assembler. Gas otoh has the
  39. * annoying difference between la and dla which are only usable for 32-bit
  40. * rsp. 64-bit code, so can't be used without conditional compilation.
  41. * The alterantive is switching the assembler to 64-bit code which happens
  42. * to work right even for 32-bit code ...
  43. */
  44. #define instruction_hazard() \
  45. do { \
  46. unsigned long tmp; \
  47. \
  48. __asm__ __volatile__( \
  49. " .set mips64r2 \n" \
  50. " dla %0, 1f \n" \
  51. " jr.hb %0 \n" \
  52. " .set mips0 \n" \
  53. "1: \n" \
  54. : "=r" (tmp)); \
  55. } while (0)
  56. #elif (defined(CONFIG_CPU_MIPSR1) && !defined(CONFIG_MIPS_ALCHEMY)) || \
  57. defined(CONFIG_CPU_BMIPS)
  58. /*
  59. * These are slightly complicated by the fact that we guarantee R1 kernels to
  60. * run fine on R2 processors.
  61. */
  62. #define __mtc0_tlbw_hazard \
  63. ___ssnop; \
  64. ___ssnop; \
  65. ___ehb
  66. #define __tlbw_use_hazard \
  67. ___ssnop; \
  68. ___ssnop; \
  69. ___ssnop; \
  70. ___ehb
  71. #define __tlb_probe_hazard \
  72. ___ssnop; \
  73. ___ssnop; \
  74. ___ssnop; \
  75. ___ehb
  76. #define __irq_enable_hazard \
  77. ___ssnop; \
  78. ___ssnop; \
  79. ___ssnop; \
  80. ___ehb
  81. #define __irq_disable_hazard \
  82. ___ssnop; \
  83. ___ssnop; \
  84. ___ssnop; \
  85. ___ehb
  86. #define __back_to_back_c0_hazard \
  87. ___ssnop; \
  88. ___ssnop; \
  89. ___ssnop; \
  90. ___ehb
  91. /*
  92. * gcc has a tradition of misscompiling the previous construct using the
  93. * address of a label as argument to inline assembler. Gas otoh has the
  94. * annoying difference between la and dla which are only usable for 32-bit
  95. * rsp. 64-bit code, so can't be used without conditional compilation.
  96. * The alterantive is switching the assembler to 64-bit code which happens
  97. * to work right even for 32-bit code ...
  98. */
  99. #define __instruction_hazard() \
  100. do { \
  101. unsigned long tmp; \
  102. \
  103. __asm__ __volatile__( \
  104. " .set mips64r2 \n" \
  105. " dla %0, 1f \n" \
  106. " jr.hb %0 \n" \
  107. " .set mips0 \n" \
  108. "1: \n" \
  109. : "=r" (tmp)); \
  110. } while (0)
  111. #define instruction_hazard() \
  112. do { \
  113. if (cpu_has_mips_r2) \
  114. __instruction_hazard(); \
  115. } while (0)
  116. #elif defined(CONFIG_MIPS_ALCHEMY) || defined(CONFIG_CPU_CAVIUM_OCTEON) || \
  117. defined(CONFIG_CPU_LOONGSON2) || defined(CONFIG_CPU_R10000) || \
  118. defined(CONFIG_CPU_R5500) || defined(CONFIG_CPU_XLR)
  119. /*
  120. * R10000 rocks - all hazards handled in hardware, so this becomes a nobrainer.
  121. */
  122. #define __mtc0_tlbw_hazard
  123. #define __tlbw_use_hazard
  124. #define __tlb_probe_hazard
  125. #define __irq_enable_hazard
  126. #define __irq_disable_hazard
  127. #define __back_to_back_c0_hazard
  128. #define instruction_hazard() do { } while (0)
  129. #elif defined(CONFIG_CPU_SB1)
  130. /*
  131. * Mostly like R4000 for historic reasons
  132. */
  133. #define __mtc0_tlbw_hazard
  134. #define __tlbw_use_hazard
  135. #define __tlb_probe_hazard
  136. #define __irq_enable_hazard
  137. #define __irq_disable_hazard \
  138. ___ssnop; \
  139. ___ssnop; \
  140. ___ssnop
  141. #define __back_to_back_c0_hazard
  142. #define instruction_hazard() do { } while (0)
  143. #else
  144. /*
  145. * Finally the catchall case for all other processors including R4000, R4400,
  146. * R4600, R4700, R5000, RM7000, NEC VR41xx etc.
  147. *
  148. * The taken branch will result in a two cycle penalty for the two killed
  149. * instructions on R4000 / R4400. Other processors only have a single cycle
  150. * hazard so this is nice trick to have an optimal code for a range of
  151. * processors.
  152. */
  153. #define __mtc0_tlbw_hazard \
  154. nop; \
  155. nop
  156. #define __tlbw_use_hazard \
  157. nop; \
  158. nop; \
  159. nop
  160. #define __tlb_probe_hazard \
  161. nop; \
  162. nop; \
  163. nop
  164. #define __irq_enable_hazard \
  165. ___ssnop; \
  166. ___ssnop; \
  167. ___ssnop
  168. #define __irq_disable_hazard \
  169. nop; \
  170. nop; \
  171. nop
  172. #define __back_to_back_c0_hazard \
  173. ___ssnop; \
  174. ___ssnop; \
  175. ___ssnop
  176. #define instruction_hazard() do { } while (0)
  177. #endif
  178. /* FPU hazards */
  179. #if defined(CONFIG_CPU_SB1)
  180. #define __enable_fpu_hazard \
  181. .set push; \
  182. .set mips64; \
  183. .set noreorder; \
  184. ___ssnop; \
  185. bnezl $0, .+4; \
  186. ___ssnop; \
  187. .set pop
  188. #define __disable_fpu_hazard
  189. #elif defined(CONFIG_CPU_MIPSR2)
  190. #define __enable_fpu_hazard \
  191. ___ehb
  192. #define __disable_fpu_hazard \
  193. ___ehb
  194. #else
  195. #define __enable_fpu_hazard \
  196. nop; \
  197. nop; \
  198. nop; \
  199. nop
  200. #define __disable_fpu_hazard \
  201. ___ehb
  202. #endif
  203. #ifdef __ASSEMBLY__
  204. #define _ssnop ___ssnop
  205. #define _ehb ___ehb
  206. #define mtc0_tlbw_hazard __mtc0_tlbw_hazard
  207. #define tlbw_use_hazard __tlbw_use_hazard
  208. #define tlb_probe_hazard __tlb_probe_hazard
  209. #define irq_enable_hazard __irq_enable_hazard
  210. #define irq_disable_hazard __irq_disable_hazard
  211. #define back_to_back_c0_hazard __back_to_back_c0_hazard
  212. #define enable_fpu_hazard __enable_fpu_hazard
  213. #define disable_fpu_hazard __disable_fpu_hazard
  214. #else
  215. #define _ssnop() \
  216. do { \
  217. __asm__ __volatile__( \
  218. __stringify(___ssnop) \
  219. ); \
  220. } while (0)
  221. #define _ehb() \
  222. do { \
  223. __asm__ __volatile__( \
  224. __stringify(___ehb) \
  225. ); \
  226. } while (0)
  227. #define mtc0_tlbw_hazard() \
  228. do { \
  229. __asm__ __volatile__( \
  230. __stringify(__mtc0_tlbw_hazard) \
  231. ); \
  232. } while (0)
  233. #define tlbw_use_hazard() \
  234. do { \
  235. __asm__ __volatile__( \
  236. __stringify(__tlbw_use_hazard) \
  237. ); \
  238. } while (0)
  239. #define tlb_probe_hazard() \
  240. do { \
  241. __asm__ __volatile__( \
  242. __stringify(__tlb_probe_hazard) \
  243. ); \
  244. } while (0)
  245. #define irq_enable_hazard() \
  246. do { \
  247. __asm__ __volatile__( \
  248. __stringify(__irq_enable_hazard) \
  249. ); \
  250. } while (0)
  251. #define irq_disable_hazard() \
  252. do { \
  253. __asm__ __volatile__( \
  254. __stringify(__irq_disable_hazard) \
  255. ); \
  256. } while (0)
  257. #define back_to_back_c0_hazard() \
  258. do { \
  259. __asm__ __volatile__( \
  260. __stringify(__back_to_back_c0_hazard) \
  261. ); \
  262. } while (0)
  263. #define enable_fpu_hazard() \
  264. do { \
  265. __asm__ __volatile__( \
  266. __stringify(__enable_fpu_hazard) \
  267. ); \
  268. } while (0)
  269. #define disable_fpu_hazard() \
  270. do { \
  271. __asm__ __volatile__( \
  272. __stringify(__disable_fpu_hazard) \
  273. ); \
  274. } while (0)
  275. /*
  276. * MIPS R2 instruction hazard barrier. Needs to be called as a subroutine.
  277. */
  278. extern void mips_ihb(void);
  279. #endif /* __ASSEMBLY__ */
  280. #endif /* _ASM_HAZARDS_H */