system_32.h 5.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220
  1. #ifndef __ASM_SH_SYSTEM_32_H
  2. #define __ASM_SH_SYSTEM_32_H
  3. #include <linux/types.h>
  4. #ifdef CONFIG_SH_DSP
  5. #define is_dsp_enabled(tsk) \
  6. (!!(tsk->thread.dsp_status.status & SR_DSP))
  7. #define __restore_dsp(tsk) \
  8. do { \
  9. register u32 *__ts2 __asm__ ("r2") = \
  10. (u32 *)&tsk->thread.dsp_status; \
  11. __asm__ __volatile__ ( \
  12. ".balign 4\n\t" \
  13. "movs.l @r2+, a1\n\t" \
  14. "movs.l @r2+, a0g\n\t" \
  15. "movs.l @r2+, a1g\n\t" \
  16. "movs.l @r2+, m0\n\t" \
  17. "movs.l @r2+, m1\n\t" \
  18. "movs.l @r2+, a0\n\t" \
  19. "movs.l @r2+, x0\n\t" \
  20. "movs.l @r2+, x1\n\t" \
  21. "movs.l @r2+, y0\n\t" \
  22. "movs.l @r2+, y1\n\t" \
  23. "lds.l @r2+, dsr\n\t" \
  24. "ldc.l @r2+, rs\n\t" \
  25. "ldc.l @r2+, re\n\t" \
  26. "ldc.l @r2+, mod\n\t" \
  27. : : "r" (__ts2)); \
  28. } while (0)
  29. #define __save_dsp(tsk) \
  30. do { \
  31. register u32 *__ts2 __asm__ ("r2") = \
  32. (u32 *)&tsk->thread.dsp_status + 14; \
  33. \
  34. __asm__ __volatile__ ( \
  35. ".balign 4\n\t" \
  36. "stc.l mod, @-r2\n\t" \
  37. "stc.l re, @-r2\n\t" \
  38. "stc.l rs, @-r2\n\t" \
  39. "sts.l dsr, @-r2\n\t" \
  40. "sts.l y1, @-r2\n\t" \
  41. "sts.l y0, @-r2\n\t" \
  42. "sts.l x1, @-r2\n\t" \
  43. "sts.l x0, @-r2\n\t" \
  44. "sts.l a0, @-r2\n\t" \
  45. ".word 0xf653 ! movs.l a1, @-r2\n\t" \
  46. ".word 0xf6f3 ! movs.l a0g, @-r2\n\t" \
  47. ".word 0xf6d3 ! movs.l a1g, @-r2\n\t" \
  48. ".word 0xf6c3 ! movs.l m0, @-r2\n\t" \
  49. ".word 0xf6e3 ! movs.l m1, @-r2\n\t" \
  50. : : "r" (__ts2)); \
  51. } while (0)
  52. #else
  53. #define is_dsp_enabled(tsk) (0)
  54. #define __save_dsp(tsk) do { } while (0)
  55. #define __restore_dsp(tsk) do { } while (0)
  56. #endif
  57. struct task_struct *__switch_to(struct task_struct *prev,
  58. struct task_struct *next);
  59. /*
  60. * switch_to() should switch tasks to task nr n, first
  61. */
  62. #define switch_to(prev, next, last) \
  63. do { \
  64. register u32 *__ts1 __asm__ ("r1"); \
  65. register u32 *__ts2 __asm__ ("r2"); \
  66. register u32 *__ts4 __asm__ ("r4"); \
  67. register u32 *__ts5 __asm__ ("r5"); \
  68. register u32 *__ts6 __asm__ ("r6"); \
  69. register u32 __ts7 __asm__ ("r7"); \
  70. struct task_struct *__last; \
  71. \
  72. if (is_dsp_enabled(prev)) \
  73. __save_dsp(prev); \
  74. \
  75. __ts1 = (u32 *)&prev->thread.sp; \
  76. __ts2 = (u32 *)&prev->thread.pc; \
  77. __ts4 = (u32 *)prev; \
  78. __ts5 = (u32 *)next; \
  79. __ts6 = (u32 *)&next->thread.sp; \
  80. __ts7 = next->thread.pc; \
  81. \
  82. __asm__ __volatile__ ( \
  83. ".balign 4\n\t" \
  84. "stc.l gbr, @-r15\n\t" \
  85. "sts.l pr, @-r15\n\t" \
  86. "mov.l r8, @-r15\n\t" \
  87. "mov.l r9, @-r15\n\t" \
  88. "mov.l r10, @-r15\n\t" \
  89. "mov.l r11, @-r15\n\t" \
  90. "mov.l r12, @-r15\n\t" \
  91. "mov.l r13, @-r15\n\t" \
  92. "mov.l r14, @-r15\n\t" \
  93. "mov.l r15, @r1\t! save SP\n\t" \
  94. "mov.l @r6, r15\t! change to new stack\n\t" \
  95. "mova 1f, %0\n\t" \
  96. "mov.l %0, @r2\t! save PC\n\t" \
  97. "mov.l 2f, %0\n\t" \
  98. "jmp @%0\t! call __switch_to\n\t" \
  99. " lds r7, pr\t! with return to new PC\n\t" \
  100. ".balign 4\n" \
  101. "2:\n\t" \
  102. ".long __switch_to\n" \
  103. "1:\n\t" \
  104. "mov.l @r15+, r14\n\t" \
  105. "mov.l @r15+, r13\n\t" \
  106. "mov.l @r15+, r12\n\t" \
  107. "mov.l @r15+, r11\n\t" \
  108. "mov.l @r15+, r10\n\t" \
  109. "mov.l @r15+, r9\n\t" \
  110. "mov.l @r15+, r8\n\t" \
  111. "lds.l @r15+, pr\n\t" \
  112. "ldc.l @r15+, gbr\n\t" \
  113. : "=z" (__last) \
  114. : "r" (__ts1), "r" (__ts2), "r" (__ts4), \
  115. "r" (__ts5), "r" (__ts6), "r" (__ts7) \
  116. : "r3", "t"); \
  117. \
  118. last = __last; \
  119. } while (0)
  120. #define finish_arch_switch(prev) \
  121. do { \
  122. if (is_dsp_enabled(prev)) \
  123. __restore_dsp(prev); \
  124. } while (0)
  125. #define __uses_jump_to_uncached \
  126. noinline __attribute__ ((__section__ (".uncached.text")))
  127. /*
  128. * Jump to uncached area.
  129. * When handling TLB or caches, we need to do it from an uncached area.
  130. */
  131. #define jump_to_uncached() \
  132. do { \
  133. unsigned long __dummy; \
  134. \
  135. __asm__ __volatile__( \
  136. "mova 1f, %0\n\t" \
  137. "add %1, %0\n\t" \
  138. "jmp @%0\n\t" \
  139. " nop\n\t" \
  140. ".balign 4\n" \
  141. "1:" \
  142. : "=&z" (__dummy) \
  143. : "r" (cached_to_uncached)); \
  144. } while (0)
  145. /*
  146. * Back to cached area.
  147. */
  148. #define back_to_cached() \
  149. do { \
  150. unsigned long __dummy; \
  151. ctrl_barrier(); \
  152. __asm__ __volatile__( \
  153. "mov.l 1f, %0\n\t" \
  154. "jmp @%0\n\t" \
  155. " nop\n\t" \
  156. ".balign 4\n" \
  157. "1: .long 2f\n" \
  158. "2:" \
  159. : "=&r" (__dummy)); \
  160. } while (0)
  161. #ifdef CONFIG_CPU_HAS_SR_RB
  162. #define lookup_exception_vector() \
  163. ({ \
  164. unsigned long _vec; \
  165. \
  166. __asm__ __volatile__ ( \
  167. "stc r2_bank, %0\n\t" \
  168. : "=r" (_vec) \
  169. ); \
  170. \
  171. _vec; \
  172. })
  173. #else
  174. #define lookup_exception_vector() \
  175. ({ \
  176. unsigned long _vec; \
  177. __asm__ __volatile__ ( \
  178. "mov r4, %0\n\t" \
  179. : "=r" (_vec) \
  180. ); \
  181. \
  182. _vec; \
  183. })
  184. #endif
  185. int handle_unaligned_access(insn_size_t instruction, struct pt_regs *regs,
  186. struct mem_access *ma);
  187. asmlinkage void do_address_error(struct pt_regs *regs,
  188. unsigned long writeaccess,
  189. unsigned long address);
  190. asmlinkage void do_divide_error(unsigned long r4, unsigned long r5,
  191. unsigned long r6, unsigned long r7,
  192. struct pt_regs __regs);
  193. asmlinkage void do_reserved_inst(unsigned long r4, unsigned long r5,
  194. unsigned long r6, unsigned long r7,
  195. struct pt_regs __regs);
  196. asmlinkage void do_illegal_slot_inst(unsigned long r4, unsigned long r5,
  197. unsigned long r6, unsigned long r7,
  198. struct pt_regs __regs);
  199. asmlinkage void do_exception_error(unsigned long r4, unsigned long r5,
  200. unsigned long r6, unsigned long r7,
  201. struct pt_regs __regs);
  202. #endif /* __ASM_SH_SYSTEM_32_H */