entry.S 8.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393
  1. /* -*- mode: asm -*-
  2. *
  3. * linux/arch/h8300/platform/h8300h/entry.S
  4. *
  5. * Yoshinori Sato <ysato@users.sourceforge.jp>
  6. * David McCullough <davidm@snapgear.com>
  7. *
  8. */
  9. /*
  10. * entry.S
  11. * include exception/interrupt gateway
  12. * system call entry
  13. */
  14. #include <linux/sys.h>
  15. #include <asm/unistd.h>
  16. #include <asm/setup.h>
  17. #include <asm/segment.h>
  18. #include <asm/linkage.h>
  19. #include <asm/asm-offsets.h>
  20. #include <asm/thread_info.h>
  21. #include <asm/errno.h>
  22. #if defined(CONFIG_CPU_H8300H)
  23. #define USERRET 8
  24. INTERRUPTS = 64
  25. .h8300h
  26. .macro SHLL2 reg
  27. shll.l \reg
  28. shll.l \reg
  29. .endm
  30. .macro SHLR2 reg
  31. shlr.l \reg
  32. shlr.l \reg
  33. .endm
  34. .macro SAVEREGS
  35. mov.l er0,@-sp
  36. mov.l er1,@-sp
  37. mov.l er2,@-sp
  38. mov.l er3,@-sp
  39. .endm
  40. .macro RESTOREREGS
  41. mov.l @sp+,er3
  42. mov.l @sp+,er2
  43. .endm
  44. .macro SAVEEXR
  45. .endm
  46. .macro RESTOREEXR
  47. .endm
  48. #endif
  49. #if defined(CONFIG_CPU_H8S)
  50. #define USERRET 10
  51. #define USEREXR 8
  52. INTERRUPTS = 128
  53. .h8300s
  54. .macro SHLL2 reg
  55. shll.l #2,\reg
  56. .endm
  57. .macro SHLR2 reg
  58. shlr.l #2,\reg
  59. .endm
  60. .macro SAVEREGS
  61. stm.l er0-er3,@-sp
  62. .endm
  63. .macro RESTOREREGS
  64. ldm.l @sp+,er2-er3
  65. .endm
  66. .macro SAVEEXR
  67. mov.w @(USEREXR:16,er0),r1
  68. mov.w r1,@(LEXR-LER3:16,sp) /* copy EXR */
  69. .endm
  70. .macro RESTOREEXR
  71. mov.w @(LEXR-LER1:16,sp),r1 /* restore EXR */
  72. mov.b r1l,r1h
  73. mov.w r1,@(USEREXR:16,er0)
  74. .endm
  75. #endif
  76. /* CPU context save/restore macros. */
  77. .macro SAVE_ALL
  78. mov.l er0,@-sp
  79. stc ccr,r0l /* check kernel mode */
  80. btst #4,r0l
  81. bne 5f
  82. /* user mode */
  83. mov.l sp,@SYMBOL_NAME(sw_usp)
  84. mov.l @sp,er0 /* restore saved er0 */
  85. orc #0x10,ccr /* switch kernel stack */
  86. mov.l @SYMBOL_NAME(sw_ksp),sp
  87. sub.l #(LRET-LORIG),sp /* allocate LORIG - LRET */
  88. SAVEREGS
  89. mov.l @SYMBOL_NAME(sw_usp),er0
  90. mov.l @(USERRET:16,er0),er1 /* copy the RET addr */
  91. mov.l er1,@(LRET-LER3:16,sp)
  92. SAVEEXR
  93. mov.l @(LORIG-LER3:16,sp),er0
  94. mov.l er0,@(LER0-LER3:16,sp) /* copy ER0 */
  95. mov.w e1,r1 /* e1 highbyte = ccr */
  96. and #0xef,r1h /* mask mode? flag */
  97. bra 6f
  98. 5:
  99. /* kernel mode */
  100. mov.l @sp,er0 /* restore saved er0 */
  101. subs #2,sp /* set dummy ccr */
  102. SAVEREGS
  103. mov.w @(LRET-LER3:16,sp),r1 /* copy old ccr */
  104. 6:
  105. mov.b r1h,r1l
  106. mov.b #0,r1h
  107. mov.w r1,@(LCCR-LER3:16,sp) /* set ccr */
  108. mov.l er6,@-sp /* syscall arg #6 */
  109. mov.l er5,@-sp /* syscall arg #5 */
  110. mov.l er4,@-sp /* syscall arg #4 */
  111. .endm /* r1 = ccr */
  112. .macro RESTORE_ALL
  113. mov.l @sp+,er4
  114. mov.l @sp+,er5
  115. mov.l @sp+,er6
  116. RESTOREREGS
  117. mov.w @(LCCR-LER1:16,sp),r0 /* check kernel mode */
  118. btst #4,r0l
  119. bne 7f
  120. orc #0x80,ccr
  121. mov.l @SYMBOL_NAME(sw_usp),er0
  122. mov.l @(LER0-LER1:16,sp),er1 /* restore ER0 */
  123. mov.l er1,@er0
  124. RESTOREEXR
  125. mov.w @(LCCR-LER1:16,sp),r1 /* restore the RET addr */
  126. mov.b r1l,r1h
  127. mov.b @(LRET+1-LER1:16,sp),r1l
  128. mov.w r1,e1
  129. mov.w @(LRET+2-LER1:16,sp),r1
  130. mov.l er1,@(USERRET:16,er0)
  131. mov.l @sp+,er1
  132. add.l #(LRET-LER1),sp /* remove LORIG - LRET */
  133. mov.l sp,@SYMBOL_NAME(sw_ksp)
  134. andc #0xef,ccr /* switch to user mode */
  135. mov.l er0,sp
  136. bra 8f
  137. 7:
  138. mov.l @sp+,er1
  139. adds #4,sp
  140. adds #2,sp
  141. 8:
  142. mov.l @sp+,er0
  143. adds #4,sp /* remove the sw created LVEC */
  144. rte
  145. .endm
  146. .globl SYMBOL_NAME(system_call)
  147. .globl SYMBOL_NAME(ret_from_exception)
  148. .globl SYMBOL_NAME(ret_from_fork)
  149. .globl SYMBOL_NAME(ret_from_interrupt)
  150. .globl SYMBOL_NAME(interrupt_redirect_table)
  151. .globl SYMBOL_NAME(sw_ksp),SYMBOL_NAME(sw_usp)
  152. .globl SYMBOL_NAME(resume)
  153. .globl SYMBOL_NAME(interrupt_entry)
  154. .globl SYMBOL_NAME(trace_break)
  155. #if defined(CONFIG_ROMKERNEL)
  156. .section .int_redirect,"ax"
  157. SYMBOL_NAME_LABEL(interrupt_redirect_table)
  158. #if defined(CONFIG_CPU_H8300H)
  159. .rept 7
  160. .long 0
  161. .endr
  162. #endif
  163. #if defined(CONFIG_CPU_H8S)
  164. .rept 5
  165. .long 0
  166. .endr
  167. jmp @SYMBOL_NAME(trace_break)
  168. .long 0
  169. #endif
  170. jsr @SYMBOL_NAME(interrupt_entry) /* NMI */
  171. jmp @SYMBOL_NAME(system_call) /* TRAPA #0 (System call) */
  172. .long 0
  173. .long 0
  174. jmp @SYMBOL_NAME(trace_break) /* TRAPA #3 (breakpoint) */
  175. .rept INTERRUPTS-12
  176. jsr @SYMBOL_NAME(interrupt_entry)
  177. .endr
  178. #endif
  179. #if defined(CONFIG_RAMKERNEL)
  180. .globl SYMBOL_NAME(interrupt_redirect_table)
  181. .section .bss
  182. SYMBOL_NAME_LABEL(interrupt_redirect_table)
  183. .space 4
  184. #endif
  185. .section .text
  186. .align 2
  187. SYMBOL_NAME_LABEL(interrupt_entry)
  188. SAVE_ALL
  189. mov.l sp,er0
  190. add.l #LVEC,er0
  191. btst #4,r1l
  192. bne 1f
  193. /* user LVEC */
  194. mov.l @SYMBOL_NAME(sw_usp),er0
  195. adds #4,er0
  196. 1:
  197. mov.l @er0,er0 /* LVEC address */
  198. #if defined(CONFIG_ROMKERNEL)
  199. sub.l #SYMBOL_NAME(interrupt_redirect_table),er0
  200. #endif
  201. #if defined(CONFIG_RAMKERNEL)
  202. mov.l @SYMBOL_NAME(interrupt_redirect_table),er1
  203. sub.l er1,er0
  204. #endif
  205. SHLR2 er0
  206. dec.l #1,er0
  207. mov.l sp,er1
  208. subs #4,er1 /* adjust ret_pc */
  209. jsr @SYMBOL_NAME(do_IRQ)
  210. jmp @SYMBOL_NAME(ret_from_interrupt)
  211. SYMBOL_NAME_LABEL(system_call)
  212. subs #4,sp /* dummy LVEC */
  213. SAVE_ALL
  214. andc #0x7f,ccr
  215. mov.l er0,er4
  216. /* save top of frame */
  217. mov.l sp,er0
  218. jsr @SYMBOL_NAME(set_esp0)
  219. mov.l sp,er2
  220. and.w #0xe000,r2
  221. mov.b @((TI_FLAGS+3-(TIF_SYSCALL_TRACE >> 3)):16,er2),r2l
  222. btst #(TIF_SYSCALL_TRACE & 7),r2l
  223. beq 1f
  224. jsr @SYMBOL_NAME(do_syscall_trace)
  225. 1:
  226. cmp.l #NR_syscalls,er4
  227. bcc badsys
  228. SHLL2 er4
  229. mov.l #SYMBOL_NAME(sys_call_table),er0
  230. add.l er4,er0
  231. mov.l @er0,er4
  232. beq SYMBOL_NAME(ret_from_exception):16
  233. mov.l @(LER1:16,sp),er0
  234. mov.l @(LER2:16,sp),er1
  235. mov.l @(LER3:16,sp),er2
  236. jsr @er4
  237. mov.l er0,@(LER0:16,sp) /* save the return value */
  238. mov.l sp,er2
  239. and.w #0xe000,r2
  240. mov.b @((TI_FLAGS+3-(TIF_SYSCALL_TRACE >> 3)):16,er2),r2l
  241. btst #(TIF_SYSCALL_TRACE & 7),r2l
  242. beq 2f
  243. jsr @SYMBOL_NAME(do_syscall_trace)
  244. 2:
  245. #if defined(CONFIG_SYSCALL_PRINT)
  246. jsr @SYMBOL_NAME(syscall_print)
  247. #endif
  248. orc #0x80,ccr
  249. bra resume_userspace
  250. badsys:
  251. mov.l #-ENOSYS,er0
  252. mov.l er0,@(LER0:16,sp)
  253. bra resume_userspace
  254. #if !defined(CONFIG_PREEMPT)
  255. #define resume_kernel restore_all
  256. #endif
  257. SYMBOL_NAME_LABEL(ret_from_exception)
  258. #if defined(CONFIG_PREEMPT)
  259. orc #0x80,ccr
  260. #endif
  261. SYMBOL_NAME_LABEL(ret_from_interrupt)
  262. mov.b @(LCCR+1:16,sp),r0l
  263. btst #4,r0l
  264. bne resume_kernel:8 /* return from kernel */
  265. resume_userspace:
  266. andc #0x7f,ccr
  267. mov.l sp,er4
  268. and.w #0xe000,r4 /* er4 <- current thread info */
  269. mov.l @(TI_FLAGS:16,er4),er1
  270. and.l #_TIF_WORK_MASK,er1
  271. beq restore_all:8
  272. work_pending:
  273. btst #TIF_NEED_RESCHED,r1l
  274. bne work_resched:8
  275. /* work notifysig */
  276. mov.l sp,er0
  277. subs #4,er0 /* er0: pt_regs */
  278. jsr @SYMBOL_NAME(do_notify_resume)
  279. bra restore_all:8
  280. work_resched:
  281. mov.l sp,er0
  282. jsr @SYMBOL_NAME(set_esp0)
  283. jsr @SYMBOL_NAME(schedule)
  284. bra resume_userspace:8
  285. restore_all:
  286. RESTORE_ALL /* Does RTE */
  287. #if defined(CONFIG_PREEMPT)
  288. resume_kernel:
  289. mov.l @(TI_PRE_COUNT:16,er4),er0
  290. bne restore_all:8
  291. need_resched:
  292. mov.l @(TI_FLAGS:16,er4),er0
  293. btst #TIF_NEED_RESCHED,r0l
  294. beq restore_all:8
  295. mov.b @(LCCR+1:16,sp),r0l /* Interrupt Enabled? */
  296. bmi restore_all:8
  297. mov.l #PREEMPT_ACTIVE,er0
  298. mov.l er0,@(TI_PRE_COUNT:16,er4)
  299. andc #0x7f,ccr
  300. mov.l sp,er0
  301. jsr @SYMBOL_NAME(set_esp0)
  302. jsr @SYMBOL_NAME(schedule)
  303. orc #0x80,ccr
  304. bra need_resched:8
  305. #endif
  306. SYMBOL_NAME_LABEL(ret_from_fork)
  307. mov.l er2,er0
  308. jsr @SYMBOL_NAME(schedule_tail)
  309. jmp @SYMBOL_NAME(ret_from_exception)
  310. SYMBOL_NAME_LABEL(resume)
  311. /*
  312. * Beware - when entering resume, offset of tss is in d1,
  313. * prev (the current task) is in a0, next (the new task)
  314. * is in a1 and d2.b is non-zero if the mm structure is
  315. * shared between the tasks, so don't change these
  316. * registers until their contents are no longer needed.
  317. */
  318. /* save sr */
  319. sub.w r3,r3
  320. stc ccr,r3l
  321. mov.w r3,@(THREAD_CCR+2:16,er0)
  322. /* disable interrupts */
  323. orc #0x80,ccr
  324. mov.l @SYMBOL_NAME(sw_usp),er3
  325. mov.l er3,@(THREAD_USP:16,er0)
  326. mov.l sp,@(THREAD_KSP:16,er0)
  327. /* Skip address space switching if they are the same. */
  328. /* FIXME: what did we hack out of here, this does nothing! */
  329. mov.l @(THREAD_USP:16,er1),er0
  330. mov.l er0,@SYMBOL_NAME(sw_usp)
  331. mov.l @(THREAD_KSP:16,er1),sp
  332. /* restore status register */
  333. mov.w @(THREAD_CCR+2:16,er1),r3
  334. ldc r3l,ccr
  335. rts
  336. SYMBOL_NAME_LABEL(trace_break)
  337. subs #4,sp
  338. SAVE_ALL
  339. sub.l er1,er1
  340. dec.l #1,er1
  341. mov.l er1,@(LORIG,sp)
  342. mov.l sp,er0
  343. jsr @SYMBOL_NAME(set_esp0)
  344. mov.l @SYMBOL_NAME(sw_usp),er0
  345. mov.l @er0,er1
  346. mov.w @(-2:16,er1),r2
  347. cmp.w #0x5730,r2
  348. beq 1f
  349. subs #2,er1
  350. mov.l er1,@er0
  351. 1:
  352. and.w #0xff,e1
  353. mov.l er1,er0
  354. jsr @SYMBOL_NAME(trace_trap)
  355. jmp @SYMBOL_NAME(ret_from_exception)
  356. .section .bss
  357. SYMBOL_NAME_LABEL(sw_ksp)
  358. .space 4
  359. SYMBOL_NAME_LABEL(sw_usp)
  360. .space 4
  361. .end