mips-atomic.c 4.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195
  1. /*
  2. * This file is subject to the terms and conditions of the GNU General Public
  3. * License. See the file "COPYING" in the main directory of this archive
  4. * for more details.
  5. *
  6. * Copyright (C) 1994, 95, 96, 97, 98, 99, 2003 by Ralf Baechle
  7. * Copyright (C) 1996 by Paul M. Antoine
  8. * Copyright (C) 1999 Silicon Graphics
  9. * Copyright (C) 2000 MIPS Technologies, Inc.
  10. */
  11. #include <asm/irqflags.h>
  12. #include <asm/hazards.h>
  13. #include <linux/compiler.h>
  14. #include <linux/preempt.h>
  15. #include <linux/export.h>
  16. #include <linux/stringify.h>
  17. #if !defined(CONFIG_CPU_MIPSR2) || defined(CONFIG_MIPS_MT_SMTC)
  18. /*
  19. * For cli() we have to insert nops to make sure that the new value
  20. * has actually arrived in the status register before the end of this
  21. * macro.
  22. * R4000/R4400 need three nops, the R4600 two nops and the R10000 needs
  23. * no nops at all.
  24. */
  25. /*
  26. * For TX49, operating only IE bit is not enough.
  27. *
  28. * If mfc0 $12 follows store and the mfc0 is last instruction of a
  29. * page and fetching the next instruction causes TLB miss, the result
  30. * of the mfc0 might wrongly contain EXL bit.
  31. *
  32. * ERT-TX49H2-027, ERT-TX49H3-012, ERT-TX49HL3-006, ERT-TX49H4-008
  33. *
  34. * Workaround: mask EXL bit of the result or place a nop before mfc0.
  35. */
  36. notrace void arch_local_irq_disable(void)
  37. {
  38. preempt_disable();
  39. __asm__ __volatile__(
  40. " .set push \n"
  41. " .set noat \n"
  42. #ifdef CONFIG_MIPS_MT_SMTC
  43. " mfc0 $1, $2, 1 \n"
  44. " ori $1, 0x400 \n"
  45. " .set noreorder \n"
  46. " mtc0 $1, $2, 1 \n"
  47. #elif defined(CONFIG_CPU_MIPSR2)
  48. /* see irqflags.h for inline function */
  49. #else
  50. " mfc0 $1,$12 \n"
  51. " ori $1,0x1f \n"
  52. " xori $1,0x1f \n"
  53. " .set noreorder \n"
  54. " mtc0 $1,$12 \n"
  55. #endif
  56. " " __stringify(__irq_disable_hazard) " \n"
  57. " .set pop \n"
  58. : /* no outputs */
  59. : /* no inputs */
  60. : "memory");
  61. preempt_enable();
  62. }
  63. EXPORT_SYMBOL(arch_local_irq_disable);
  64. notrace unsigned long arch_local_irq_save(void)
  65. {
  66. unsigned long flags;
  67. preempt_disable();
  68. __asm__ __volatile__(
  69. " .set push \n"
  70. " .set reorder \n"
  71. " .set noat \n"
  72. #ifdef CONFIG_MIPS_MT_SMTC
  73. " mfc0 %[flags], $2, 1 \n"
  74. " ori $1, %[flags], 0x400 \n"
  75. " .set noreorder \n"
  76. " mtc0 $1, $2, 1 \n"
  77. " andi %[flags], %[flags], 0x400 \n"
  78. #elif defined(CONFIG_CPU_MIPSR2)
  79. /* see irqflags.h for inline function */
  80. #else
  81. " mfc0 %[flags], $12 \n"
  82. " ori $1, %[flags], 0x1f \n"
  83. " xori $1, 0x1f \n"
  84. " .set noreorder \n"
  85. " mtc0 $1, $12 \n"
  86. #endif
  87. " " __stringify(__irq_disable_hazard) " \n"
  88. " .set pop \n"
  89. : [flags] "=r" (flags)
  90. : /* no inputs */
  91. : "memory");
  92. preempt_enable();
  93. return flags;
  94. }
  95. EXPORT_SYMBOL(arch_local_irq_save);
  96. notrace void arch_local_irq_restore(unsigned long flags)
  97. {
  98. unsigned long __tmp1;
  99. #ifdef CONFIG_MIPS_MT_SMTC
  100. /*
  101. * SMTC kernel needs to do a software replay of queued
  102. * IPIs, at the cost of branch and call overhead on each
  103. * local_irq_restore()
  104. */
  105. if (unlikely(!(flags & 0x0400)))
  106. smtc_ipi_replay();
  107. #endif
  108. preempt_disable();
  109. __asm__ __volatile__(
  110. " .set push \n"
  111. " .set noreorder \n"
  112. " .set noat \n"
  113. #ifdef CONFIG_MIPS_MT_SMTC
  114. " mfc0 $1, $2, 1 \n"
  115. " andi %[flags], 0x400 \n"
  116. " ori $1, 0x400 \n"
  117. " xori $1, 0x400 \n"
  118. " or %[flags], $1 \n"
  119. " mtc0 %[flags], $2, 1 \n"
  120. #elif defined(CONFIG_CPU_MIPSR2) && defined(CONFIG_IRQ_CPU)
  121. /* see irqflags.h for inline function */
  122. #elif defined(CONFIG_CPU_MIPSR2)
  123. /* see irqflags.h for inline function */
  124. #else
  125. " mfc0 $1, $12 \n"
  126. " andi %[flags], 1 \n"
  127. " ori $1, 0x1f \n"
  128. " xori $1, 0x1f \n"
  129. " or %[flags], $1 \n"
  130. " mtc0 %[flags], $12 \n"
  131. #endif
  132. " " __stringify(__irq_disable_hazard) " \n"
  133. " .set pop \n"
  134. : [flags] "=r" (__tmp1)
  135. : "0" (flags)
  136. : "memory");
  137. preempt_enable();
  138. }
  139. EXPORT_SYMBOL(arch_local_irq_restore);
  140. notrace void __arch_local_irq_restore(unsigned long flags)
  141. {
  142. unsigned long __tmp1;
  143. preempt_disable();
  144. __asm__ __volatile__(
  145. " .set push \n"
  146. " .set noreorder \n"
  147. " .set noat \n"
  148. #ifdef CONFIG_MIPS_MT_SMTC
  149. " mfc0 $1, $2, 1 \n"
  150. " andi %[flags], 0x400 \n"
  151. " ori $1, 0x400 \n"
  152. " xori $1, 0x400 \n"
  153. " or %[flags], $1 \n"
  154. " mtc0 %[flags], $2, 1 \n"
  155. #elif defined(CONFIG_CPU_MIPSR2) && defined(CONFIG_IRQ_CPU)
  156. /* see irqflags.h for inline function */
  157. #elif defined(CONFIG_CPU_MIPSR2)
  158. /* see irqflags.h for inline function */
  159. #else
  160. " mfc0 $1, $12 \n"
  161. " andi %[flags], 1 \n"
  162. " ori $1, 0x1f \n"
  163. " xori $1, 0x1f \n"
  164. " or %[flags], $1 \n"
  165. " mtc0 %[flags], $12 \n"
  166. #endif
  167. " " __stringify(__irq_disable_hazard) " \n"
  168. " .set pop \n"
  169. : [flags] "=r" (__tmp1)
  170. : "0" (flags)
  171. : "memory");
  172. preempt_enable();
  173. }
  174. EXPORT_SYMBOL(__arch_local_irq_restore);
  175. #endif /* !defined(CONFIG_CPU_MIPSR2) || defined(CONFIG_MIPS_MT_SMTC) */