entry_no.h 4.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181
  1. #ifndef __M68KNOMMU_ENTRY_H
  2. #define __M68KNOMMU_ENTRY_H
  3. #include <asm/setup.h>
  4. #include <asm/page.h>
  5. /*
  6. * Stack layout in 'ret_from_exception':
  7. *
  8. * This allows access to the syscall arguments in registers d1-d5
  9. *
  10. * 0(sp) - d1
  11. * 4(sp) - d2
  12. * 8(sp) - d3
  13. * C(sp) - d4
  14. * 10(sp) - d5
  15. * 14(sp) - a0
  16. * 18(sp) - a1
  17. * 1C(sp) - a2
  18. * 20(sp) - d0
  19. * 24(sp) - orig_d0
  20. * 28(sp) - stack adjustment
  21. * 2C(sp) - [ sr ] [ format & vector ]
  22. * 2E(sp) - [ pc-hiword ] [ sr ]
  23. * 30(sp) - [ pc-loword ] [ pc-hiword ]
  24. * 32(sp) - [ format & vector ] [ pc-loword ]
  25. * ^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^
  26. * M68K COLDFIRE
  27. */
  28. #define ALLOWINT (~0x700)
  29. #ifdef __ASSEMBLY__
  30. #define SWITCH_STACK_SIZE (6*4+4) /* Includes return address */
  31. /*
  32. * This defines the normal kernel pt-regs layout.
  33. *
  34. * regs are a2-a6 and d6-d7 preserved by C code
  35. * the kernel doesn't mess with usp unless it needs to
  36. */
  37. #ifdef CONFIG_COLDFIRE
  38. #ifdef CONFIG_COLDFIRE_SW_A7
  39. /*
  40. * This is made a little more tricky on older ColdFires. There is no
  41. * separate supervisor and user stack pointers. Need to artificially
  42. * construct a usp in software... When doing this we need to disable
  43. * interrupts, otherwise bad things will happen.
  44. */
  45. .globl sw_usp
  46. .globl sw_ksp
  47. .macro SAVE_ALL
  48. move #0x2700,%sr /* disable intrs */
  49. btst #5,%sp@(2) /* from user? */
  50. bnes 6f /* no, skip */
  51. movel %sp,sw_usp /* save user sp */
  52. addql #8,sw_usp /* remove exception */
  53. movel sw_ksp,%sp /* kernel sp */
  54. subql #8,%sp /* room for exception */
  55. clrl %sp@- /* stkadj */
  56. movel %d0,%sp@- /* orig d0 */
  57. movel %d0,%sp@- /* d0 */
  58. lea %sp@(-32),%sp /* space for 8 regs */
  59. moveml %d1-%d5/%a0-%a2,%sp@
  60. movel sw_usp,%a0 /* get usp */
  61. movel %a0@-,%sp@(PT_OFF_PC) /* copy exception program counter */
  62. movel %a0@-,%sp@(PT_OFF_FORMATVEC)/*copy exception format/vector/sr */
  63. bra 7f
  64. 6:
  65. clrl %sp@- /* stkadj */
  66. movel %d0,%sp@- /* orig d0 */
  67. movel %d0,%sp@- /* d0 */
  68. lea %sp@(-32),%sp /* space for 8 regs */
  69. moveml %d1-%d5/%a0-%a2,%sp@
  70. 7:
  71. .endm
  72. .macro RESTORE_USER
  73. move #0x2700,%sr /* disable intrs */
  74. movel sw_usp,%a0 /* get usp */
  75. movel %sp@(PT_OFF_PC),%a0@- /* copy exception program counter */
  76. movel %sp@(PT_OFF_FORMATVEC),%a0@-/*copy exception format/vector/sr */
  77. moveml %sp@,%d1-%d5/%a0-%a2
  78. lea %sp@(32),%sp /* space for 8 regs */
  79. movel %sp@+,%d0
  80. addql #4,%sp /* orig d0 */
  81. addl %sp@+,%sp /* stkadj */
  82. addql #8,%sp /* remove exception */
  83. movel %sp,sw_ksp /* save ksp */
  84. subql #8,sw_usp /* set exception */
  85. movel sw_usp,%sp /* restore usp */
  86. rte
  87. .endm
  88. .macro RDUSP
  89. movel sw_usp,%a3
  90. .endm
  91. .macro WRUSP
  92. movel %a3,sw_usp
  93. .endm
  94. #else /* !CONFIG_COLDFIRE_SW_A7 */
  95. /*
  96. * Modern ColdFire parts have separate supervisor and user stack
  97. * pointers. Simple load and restore macros for this case.
  98. */
  99. .macro SAVE_ALL
  100. move #0x2700,%sr /* disable intrs */
  101. clrl %sp@- /* stkadj */
  102. movel %d0,%sp@- /* orig d0 */
  103. movel %d0,%sp@- /* d0 */
  104. lea %sp@(-32),%sp /* space for 8 regs */
  105. moveml %d1-%d5/%a0-%a2,%sp@
  106. .endm
  107. .macro RESTORE_USER
  108. moveml %sp@,%d1-%d5/%a0-%a2
  109. lea %sp@(32),%sp /* space for 8 regs */
  110. movel %sp@+,%d0
  111. addql #4,%sp /* orig d0 */
  112. addl %sp@+,%sp /* stkadj */
  113. rte
  114. .endm
  115. .macro RDUSP
  116. /*move %usp,%a3*/
  117. .word 0x4e6b
  118. .endm
  119. .macro WRUSP
  120. /*move %a3,%usp*/
  121. .word 0x4e63
  122. .endm
  123. #endif /* !CONFIG_COLDFIRE_SW_A7 */
  124. .macro SAVE_SWITCH_STACK
  125. lea %sp@(-24),%sp /* 6 regs */
  126. moveml %a3-%a6/%d6-%d7,%sp@
  127. .endm
  128. .macro RESTORE_SWITCH_STACK
  129. moveml %sp@,%a3-%a6/%d6-%d7
  130. lea %sp@(24),%sp /* 6 regs */
  131. .endm
  132. #else /* !CONFIG_COLDFIRE */
  133. /*
  134. * Standard 68k interrupt entry and exit macros.
  135. */
  136. .macro SAVE_ALL
  137. clrl %sp@- /* stkadj */
  138. movel %d0,%sp@- /* orig d0 */
  139. movel %d0,%sp@- /* d0 */
  140. moveml %d1-%d5/%a0-%a2,%sp@-
  141. .endm
  142. .macro RESTORE_ALL
  143. moveml %sp@+,%a0-%a2/%d1-%d5
  144. movel %sp@+,%d0
  145. addql #4,%sp /* orig d0 */
  146. addl %sp@+,%sp /* stkadj */
  147. rte
  148. .endm
  149. .macro SAVE_SWITCH_STACK
  150. moveml %a3-%a6/%d6-%d7,%sp@-
  151. .endm
  152. .macro RESTORE_SWITCH_STACK
  153. moveml %sp@+,%a3-%a6/%d6-%d7
  154. .endm
  155. #endif /* !COLDFIRE_SW_A7 */
  156. #endif /* __ASSEMBLY__ */
  157. #endif /* __M68KNOMMU_ENTRY_H */