hazards.h 3.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180
  1. /*
  2. * This file is subject to the terms and conditions of the GNU General Public
  3. * License. See the file "COPYING" in the main directory of this archive
  4. * for more details.
  5. *
  6. * Copyright (C) 2003, 2004 Ralf Baechle <ralf@linux-mips.org>
  7. * Copyright (C) MIPS Technologies, Inc.
  8. * written by Ralf Baechle <ralf@linux-mips.org>
  9. */
  10. #ifndef _ASM_HAZARDS_H
  11. #define _ASM_HAZARDS_H
  12. #ifdef __ASSEMBLY__
  13. #define ASMMACRO(name, code...) .macro name; code; .endm
  14. #else
  15. #define ASMMACRO(name, code...) \
  16. __asm__(".macro " #name "; " #code "; .endm"); \
  17. \
  18. static inline void name(void) \
  19. { \
  20. __asm__ __volatile__ (#name); \
  21. }
  22. #endif
  23. ASMMACRO(_ssnop,
  24. sll $0, $0, 1
  25. )
  26. ASMMACRO(_ehb,
  27. sll $0, $0, 3
  28. )
  29. /*
  30. * TLB hazards
  31. */
  32. #if defined(CONFIG_CPU_MIPSR2)
  33. /*
  34. * MIPSR2 defines ehb for hazard avoidance
  35. */
  36. ASMMACRO(mtc0_tlbw_hazard,
  37. _ehb
  38. )
  39. ASMMACRO(tlbw_use_hazard,
  40. _ehb
  41. )
  42. ASMMACRO(tlb_probe_hazard,
  43. _ehb
  44. )
  45. ASMMACRO(irq_enable_hazard,
  46. )
  47. ASMMACRO(irq_disable_hazard,
  48. _ehb
  49. )
  50. ASMMACRO(back_to_back_c0_hazard,
  51. _ehb
  52. )
  53. /*
  54. * gcc has a tradition of misscompiling the previous construct using the
  55. * address of a label as argument to inline assembler. Gas otoh has the
  56. * annoying difference between la and dla which are only usable for 32-bit
  57. * rsp. 64-bit code, so can't be used without conditional compilation.
  58. * The alterantive is switching the assembler to 64-bit code which happens
  59. * to work right even for 32-bit code ...
  60. */
  61. #define instruction_hazard() \
  62. do { \
  63. unsigned long tmp; \
  64. \
  65. __asm__ __volatile__( \
  66. " .set mips64r2 \n" \
  67. " dla %0, 1f \n" \
  68. " jr.hb %0 \n" \
  69. " .set mips0 \n" \
  70. "1: \n" \
  71. : "=r" (tmp)); \
  72. } while (0)
  73. #elif defined(CONFIG_CPU_R10000)
  74. /*
  75. * R10000 rocks - all hazards handled in hardware, so this becomes a nobrainer.
  76. */
  77. ASMMACRO(mtc0_tlbw_hazard,
  78. )
  79. ASMMACRO(tlbw_use_hazard,
  80. )
  81. ASMMACRO(tlb_probe_hazard,
  82. )
  83. ASMMACRO(irq_enable_hazard,
  84. )
  85. ASMMACRO(irq_disable_hazard,
  86. )
  87. ASMMACRO(back_to_back_c0_hazard,
  88. )
  89. #define instruction_hazard() do { } while (0)
  90. #elif defined(CONFIG_CPU_RM9000)
  91. /*
  92. * RM9000 hazards. When the JTLB is updated by tlbwi or tlbwr, a subsequent
  93. * use of the JTLB for instructions should not occur for 4 cpu cycles and use
  94. * for data translations should not occur for 3 cpu cycles.
  95. */
  96. ASMMACRO(mtc0_tlbw_hazard,
  97. _ssnop; _ssnop; _ssnop; _ssnop
  98. )
  99. ASMMACRO(tlbw_use_hazard,
  100. _ssnop; _ssnop; _ssnop; _ssnop
  101. )
  102. ASMMACRO(tlb_probe_hazard,
  103. _ssnop; _ssnop; _ssnop; _ssnop
  104. )
  105. ASMMACRO(irq_enable_hazard,
  106. )
  107. ASMMACRO(irq_disable_hazard,
  108. )
  109. ASMMACRO(back_to_back_c0_hazard,
  110. )
  111. #define instruction_hazard() do { } while (0)
  112. #elif defined(CONFIG_CPU_SB1)
  113. /*
  114. * Mostly like R4000 for historic reasons
  115. */
  116. ASMMACRO(mtc0_tlbw_hazard,
  117. )
  118. ASMMACRO(tlbw_use_hazard,
  119. )
  120. ASMMACRO(tlb_probe_hazard,
  121. )
  122. ASMMACRO(irq_enable_hazard,
  123. )
  124. ASMMACRO(irq_disable_hazard,
  125. _ssnop; _ssnop; _ssnop
  126. )
  127. ASMMACRO(back_to_back_c0_hazard,
  128. )
  129. #define instruction_hazard() do { } while (0)
  130. #else
  131. /*
  132. * Finally the catchall case for all other processors including R4000, R4400,
  133. * R4600, R4700, R5000, RM7000, NEC VR41xx etc.
  134. *
  135. * The taken branch will result in a two cycle penalty for the two killed
  136. * instructions on R4000 / R4400. Other processors only have a single cycle
  137. * hazard so this is nice trick to have an optimal code for a range of
  138. * processors.
  139. */
  140. ASMMACRO(mtc0_tlbw_hazard,
  141. nop
  142. )
  143. ASMMACRO(tlbw_use_hazard,
  144. nop; nop; nop
  145. )
  146. ASMMACRO(tlb_probe_hazard,
  147. nop; nop; nop
  148. )
  149. ASMMACRO(irq_enable_hazard,
  150. )
  151. ASMMACRO(irq_disable_hazard,
  152. nop; nop; nop
  153. )
  154. ASMMACRO(back_to_back_c0_hazard,
  155. _ssnop; _ssnop; _ssnop;
  156. )
  157. #define instruction_hazard() do { } while (0)
  158. #endif
  159. #endif /* _ASM_HAZARDS_H */