entry.S 7.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330
  1. /* -*- mode: asm -*-
  2. *
  3. * linux/arch/h8300/platform/h8s/entry.S
  4. *
  5. * Yoshinori Sato <ysato@users.sourceforge.jp>
  6. *
  7. * fairly heavy changes to fix syscall args and signal processing
  8. * by David McCullough <davidm@snapgear.com>
  9. */
  10. /*
  11. * entry.S
  12. * include exception/interrupt gateway
  13. * system call entry
  14. */
  15. #include <linux/sys.h>
  16. #include <asm/unistd.h>
  17. #include <asm/setup.h>
  18. #include <asm/segment.h>
  19. #include <asm/linkage.h>
  20. #include <asm/asm-offsets.h>
  21. #include <asm/thread_info.h>
  22. #include <asm/errno.h>
  23. .h8300s
  24. /* CPU context save/restore macros. */
  25. .macro SAVE_ALL
  26. mov.l er0,@-sp
  27. stc ccr,r0l /* check kernel mode */
  28. orc #0x10,ccr
  29. btst #4,r0l
  30. bne 5f
  31. mov.l sp,@SYMBOL_NAME(sw_usp) /* user mode */
  32. mov.l @sp,er0
  33. mov.l @SYMBOL_NAME(sw_ksp),sp
  34. sub.l #(LRET-LORIG),sp /* allocate LORIG - LRET */
  35. stm.l er0-er3,@-sp
  36. mov.l @SYMBOL_NAME(sw_usp),er0
  37. mov.l @(10:16,er0),er1 /* copy the RET addr */
  38. mov.l er1,@(LRET-LER3:16,sp)
  39. mov.w @(8:16,er0),r1
  40. mov.w r1,@(LEXR-LER3:16,sp) /* copy EXR */
  41. mov.w e1,r1 /* e1 highbyte = ccr */
  42. and #0xef,r1h /* mask mode? flag */
  43. sub.w r0,r0
  44. mov.b r1h,r0l
  45. mov.w r0,@(LCCR-LER3:16,sp) /* copy ccr */
  46. mov.l @(LORIG-LER3:16,sp),er0
  47. mov.l er0,@(LER0-LER3:16,sp) /* copy ER0 */
  48. bra 6f
  49. 5:
  50. mov.l @sp,er0 /* kernel mode */
  51. subs #2,sp /* dummy ccr */
  52. stm.l er0-er3,@-sp
  53. mov.w @(LRET-LER3:16,sp),r1 /* copy old ccr */
  54. mov.b r1h,r1l
  55. mov.b #0,r1h
  56. mov.w r1,@(LCCR-LER3:16,sp)
  57. 6:
  58. mov.l er6,@-sp /* syscall arg #6 */
  59. mov.l er5,@-sp /* syscall arg #5 */
  60. mov.l er4,@-sp /* syscall arg #4 */
  61. .endm
  62. .macro RESTORE_ALL
  63. mov.l @sp+,er4
  64. mov.l @sp+,er5
  65. mov.l @sp+,er6
  66. ldm.l @sp+,er2-er3
  67. mov.w @(LCCR-LER1:16,sp),r0 /* check kernel mode */
  68. btst #4,r0l
  69. bne 7f
  70. orc #0x80,ccr
  71. mov.l @SYMBOL_NAME(sw_usp),er0
  72. mov.l @(LER0-LER1:16,sp),er1 /* restore ER0 */
  73. mov.l er1,@er0
  74. mov.w @(LEXR-LER1:16,sp),r1 /* restore EXR */
  75. mov.b r1l,r1h
  76. mov.w r1,@(8:16,er0)
  77. mov.w @(LCCR-LER1:16,sp),r1 /* restore the RET addr */
  78. mov.b r1l,r1h
  79. mov.b @(LRET+1-LER1:16,sp),r1l
  80. mov.w r1,e1
  81. mov.w @(LRET+2-LER1:16,sp),r1
  82. mov.l er1,@(10:16,er0)
  83. mov.l @sp+,er1
  84. add.l #(LRET-LER1),sp /* remove LORIG - LRET */
  85. mov.l sp,@SYMBOL_NAME(sw_ksp)
  86. mov.l er0,sp
  87. bra 8f
  88. 7:
  89. mov.l @sp+,er1
  90. adds #4,sp
  91. adds #2,sp
  92. 8:
  93. mov.l @sp+,er0
  94. adds #4,sp /* remove the sw created LVEC */
  95. rte
  96. .endm
  97. .globl SYMBOL_NAME(system_call)
  98. .globl SYMBOL_NAME(ret_from_exception)
  99. .globl SYMBOL_NAME(ret_from_fork)
  100. .globl SYMBOL_NAME(ret_from_interrupt)
  101. .globl SYMBOL_NAME(interrupt_redirect_table)
  102. .globl SYMBOL_NAME(sw_ksp),SYMBOL_NAME(sw_usp)
  103. .globl SYMBOL_NAME(resume)
  104. .globl SYMBOL_NAME(trace_break)
  105. .globl SYMBOL_NAME(interrupt_entry)
  106. INTERRUPTS = 128
  107. #if defined(CONFIG_ROMKERNEL)
  108. .section .int_redirect,"ax"
  109. SYMBOL_NAME_LABEL(interrupt_redirect_table)
  110. .rept 7
  111. .long 0
  112. .endr
  113. jsr @SYMBOL_NAME(interrupt_entry) /* NMI */
  114. jmp @SYMBOL_NAME(system_call) /* TRAPA #0 (System call) */
  115. .long 0
  116. .long 0
  117. jmp @SYMBOL_NAME(trace_break) /* TRAPA #3 (breakpoint) */
  118. .rept INTERRUPTS-12
  119. jsr @SYMBOL_NAME(interrupt_entry)
  120. .endr
  121. #endif
  122. #if defined(CONFIG_RAMKERNEL)
  123. .globl SYMBOL_NAME(interrupt_redirect_table)
  124. .section .bss
  125. SYMBOL_NAME_LABEL(interrupt_redirect_table)
  126. .space 4
  127. #endif
  128. .section .text
  129. .align 2
  130. SYMBOL_NAME_LABEL(interrupt_entry)
  131. SAVE_ALL
  132. mov.w @(LCCR,sp),r0
  133. btst #4,r0l
  134. bne 1f
  135. mov.l @SYMBOL_NAME(sw_usp),er0
  136. mov.l @(4:16,er0),er0
  137. bra 2f
  138. 1:
  139. mov.l @(LVEC:16,sp),er0
  140. 2:
  141. #if defined(CONFIG_ROMKERNEL)
  142. sub.l #SYMBOL_NAME(interrupt_redirect_table),er0
  143. #endif
  144. #if defined(CONFIG_RAMKERNEL)
  145. mov.l @SYMBOL_NAME(interrupt_redirect_table),er1
  146. sub.l er1,er0
  147. #endif
  148. shlr.l #2,er0
  149. dec.l #1,er0
  150. mov.l sp,er1
  151. subs #4,er1 /* adjust ret_pc */
  152. jsr @SYMBOL_NAME(process_int)
  153. mov.l @SYMBOL_NAME(irq_stat)+CPUSTAT_SOFTIRQ_PENDING,er0
  154. beq 1f
  155. jsr @SYMBOL_NAME(do_softirq)
  156. 1:
  157. jmp @SYMBOL_NAME(ret_from_exception)
  158. SYMBOL_NAME_LABEL(system_call)
  159. subs #4,sp /* dummy LVEC */
  160. SAVE_ALL
  161. mov.w @(LCCR:16,sp),r1
  162. bset #4,r1l
  163. ldc r1l,ccr /* restore ccr */
  164. mov.l er0,er4
  165. mov.l #-ENOSYS,er0
  166. mov.l er0,@(LER0:16,sp)
  167. /* save top of frame */
  168. mov.l sp,er0
  169. jsr @SYMBOL_NAME(set_esp0)
  170. cmp.l #NR_syscalls,er4
  171. bcc SYMBOL_NAME(ret_from_exception):16
  172. shll.l #2,er4
  173. mov.l #SYMBOL_NAME(sys_call_table),er0
  174. add.l er4,er0
  175. mov.l @er0,er0
  176. mov.l er0,er4
  177. beq SYMBOL_NAME(ret_from_exception):16
  178. mov.l sp,er2
  179. and.w #0xe000,r2
  180. mov.b @((TASK_FLAGS+3-(TIF_SYSCALL_TRACE >> 3)):16,er2),r2l
  181. btst #(TIF_SYSCALL_TRACE & 7),r2l
  182. mov.l @(LER1:16,sp),er0
  183. mov.l @(LER2:16,sp),er1
  184. mov.l @(LER3:16,sp),er2
  185. jsr @er4
  186. mov.l er0,@(LER0:16,sp) /* save the return value */
  187. #if defined(CONFIG_SYSCALL_PRINT)
  188. jsr @SYMBOL_NAME(syscall_print)
  189. #endif
  190. bra SYMBOL_NAME(ret_from_exception):8
  191. 1:
  192. jsr SYMBOL_NAME(syscall_trace)
  193. mov.l @(LER1:16,sp),er0
  194. mov.l @(LER2:16,sp),er1
  195. mov.l @(LER3:16,sp),er2
  196. jsr @er4
  197. mov.l er0,@(LER0:16,sp) /* save the return value */
  198. jsr @SYMBOL_NAME(syscall_trace)
  199. bra SYMBOL_NAME(ret_from_exception):8
  200. SYMBOL_NAME_LABEL(ret_from_fork)
  201. mov.l er2,er0
  202. jsr @SYMBOL_NAME(schedule_tail)
  203. bra SYMBOL_NAME(ret_from_exception):8
  204. SYMBOL_NAME_LABEL(reschedule)
  205. /* save top of frame */
  206. mov.l sp,er0
  207. jsr @SYMBOL_NAME(set_esp0)
  208. jsr @SYMBOL_NAME(schedule)
  209. SYMBOL_NAME_LABEL(ret_from_exception)
  210. #if defined(CONFIG_PREEMPT)
  211. orc #0x80,ccr
  212. #endif
  213. SYMBOL_NAME_LABEL(ret_from_interrupt)
  214. mov.b @(LCCR+1:16,sp),r0l
  215. btst #4,r0l /* check if returning to kernel */
  216. bne done:8 /* if so, skip resched, signals */
  217. andc #0x7f,ccr
  218. mov.l sp,er4
  219. and.w #0xe000,r4
  220. mov.l @(TI_FLAGS:16,er4),er1
  221. and.l #_TIF_WORK_MASK,er1
  222. beq done:8
  223. 1:
  224. mov.l @(TI_FLAGS:16,er4),er1
  225. btst #TIF_NEED_RESCHED,r1l
  226. bne SYMBOL_NAME(reschedule):16
  227. mov.l sp,er0
  228. subs #4,er0 /* adjust retpc */
  229. mov.l er2,er1
  230. jsr @SYMBOL_NAME(do_signal)
  231. #if defined(CONFIG_PREEMPT)
  232. bra done:8 /* userspace thoru */
  233. 3:
  234. btst #4,r0l
  235. beq done:8 /* userspace thoru */
  236. 4:
  237. mov.l @(TI_PRE_COUNT:16,er4),er1
  238. bne done:8
  239. mov.l @(TI_FLAGS:16,er4),er1
  240. btst #TIF_NEED_RESCHED,r1l
  241. beq done:8
  242. mov.b r0l,r0l
  243. bpl done:8 /* interrupt off (exception path?) */
  244. mov.l #PREEMPT_ACTIVE,er1
  245. mov.l er1,@(TI_PRE_COUNT:16,er4)
  246. andc #0x7f,ccr
  247. jsr @SYMBOL_NAME(schedule)
  248. sub.l er1,er1
  249. mov.l er1,@(TI_PRE_COUNT:16,er4)
  250. orc #0x80,ccr
  251. bra 4b:8
  252. #endif
  253. done:
  254. RESTORE_ALL /* Does RTE */
  255. SYMBOL_NAME_LABEL(resume)
  256. /*
  257. * er0 = prev
  258. * er1 = next
  259. * return last in er2
  260. */
  261. /* save sr */
  262. sub.w r3,r3
  263. stc ccr,r3l
  264. stc exr,r3h
  265. mov.w r3,@(THREAD_CCR+2:16,er0)
  266. /* disable interrupts */
  267. orc #0x80,ccr
  268. mov.l @SYMBOL_NAME(sw_usp),er3
  269. mov.l er3,@(THREAD_USP:16,er0)
  270. mov.l sp,@(THREAD_KSP:16,er0)
  271. /* Skip address space switching if they are the same. */
  272. /* FIXME: what did we hack out of here, this does nothing! */
  273. mov.l @(THREAD_USP:16,er1),er0
  274. mov.l er0,@SYMBOL_NAME(sw_usp)
  275. mov.l @(THREAD_KSP:16,er1),sp
  276. /* restore status register */
  277. mov.w @(THREAD_CCR+2:16,er1),r3
  278. ldc r3l,ccr
  279. ldc r3h,exr
  280. rts
  281. SYMBOL_NAME_LABEL(trace_break)
  282. subs #4,sp /* dummy LVEC */
  283. SAVE_ALL
  284. sub.l er1,er1
  285. dec.l #1,er1
  286. mov.l er1,@(LORIG,sp)
  287. mov.l sp,er0
  288. jsr @SYMBOL_NAME(set_esp0)
  289. mov.l @SYMBOL_NAME(sw_usp),er0
  290. mov.l @er0,er1
  291. subs #2,er1
  292. mov.l er1,@er0
  293. and.w #0xff,e1
  294. mov.l er1,er0
  295. jsr @SYMBOL_NAME(trace_trap)
  296. jmp @SYMBOL_NAME(ret_from_exception)
  297. .section .bss
  298. SYMBOL_NAME_LABEL(sw_ksp)
  299. .space 4
  300. SYMBOL_NAME_LABEL(sw_usp)
  301. .space 4