entry_no.h 4.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172
  1. #ifndef __M68KNOMMU_ENTRY_H
  2. #define __M68KNOMMU_ENTRY_H
  3. #include <asm/setup.h>
  4. #include <asm/page.h>
  5. /*
  6. * Stack layout in 'ret_from_exception':
  7. *
  8. * This allows access to the syscall arguments in registers d1-d5
  9. *
  10. * 0(sp) - d1
  11. * 4(sp) - d2
  12. * 8(sp) - d3
  13. * C(sp) - d4
  14. * 10(sp) - d5
  15. * 14(sp) - a0
  16. * 18(sp) - a1
  17. * 1C(sp) - a2
  18. * 20(sp) - d0
  19. * 24(sp) - orig_d0
  20. * 28(sp) - stack adjustment
  21. * 2C(sp) - [ sr ] [ format & vector ]
  22. * 2E(sp) - [ pc-hiword ] [ sr ]
  23. * 30(sp) - [ pc-loword ] [ pc-hiword ]
  24. * 32(sp) - [ format & vector ] [ pc-loword ]
  25. * ^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^
  26. * M68K COLDFIRE
  27. */
  28. #define ALLOWINT (~0x700)
  29. #ifdef __ASSEMBLY__
  30. #define SWITCH_STACK_SIZE (6*4+4) /* Includes return address */
  31. /*
  32. * This defines the normal kernel pt-regs layout.
  33. *
  34. * regs are a2-a6 and d6-d7 preserved by C code
  35. * the kernel doesn't mess with usp unless it needs to
  36. */
  37. #ifdef CONFIG_COLDFIRE
  38. /*
  39. * This is made a little more tricky on the ColdFire. There is no
  40. * separate kernel and user stack pointers. Need to artificially
  41. * construct a usp in software... When doing this we need to disable
  42. * interrupts, otherwise bad things could happen.
  43. */
  44. .macro SAVE_ALL
  45. move #0x2700,%sr /* disable intrs */
  46. btst #5,%sp@(2) /* from user? */
  47. bnes 6f /* no, skip */
  48. movel %sp,sw_usp /* save user sp */
  49. addql #8,sw_usp /* remove exception */
  50. movel sw_ksp,%sp /* kernel sp */
  51. subql #8,%sp /* room for exception */
  52. clrl %sp@- /* stkadj */
  53. movel %d0,%sp@- /* orig d0 */
  54. movel %d0,%sp@- /* d0 */
  55. lea %sp@(-32),%sp /* space for 8 regs */
  56. moveml %d1-%d5/%a0-%a2,%sp@
  57. movel sw_usp,%a0 /* get usp */
  58. movel %a0@-,%sp@(PT_OFF_PC) /* copy exception program counter */
  59. movel %a0@-,%sp@(PT_OFF_FORMATVEC)/*copy exception format/vector/sr */
  60. bra 7f
  61. 6:
  62. clrl %sp@- /* stkadj */
  63. movel %d0,%sp@- /* orig d0 */
  64. movel %d0,%sp@- /* d0 */
  65. lea %sp@(-32),%sp /* space for 8 regs */
  66. moveml %d1-%d5/%a0-%a2,%sp@
  67. 7:
  68. .endm
  69. .macro RESTORE_ALL
  70. btst #5,%sp@(PT_SR) /* going user? */
  71. bnes 8f /* no, skip */
  72. move #0x2700,%sr /* disable intrs */
  73. movel sw_usp,%a0 /* get usp */
  74. movel %sp@(PT_OFF_PC),%a0@- /* copy exception program counter */
  75. movel %sp@(PT_OFF_FORMATVEC),%a0@-/*copy exception format/vector/sr */
  76. moveml %sp@,%d1-%d5/%a0-%a2
  77. lea %sp@(32),%sp /* space for 8 regs */
  78. movel %sp@+,%d0
  79. addql #4,%sp /* orig d0 */
  80. addl %sp@+,%sp /* stkadj */
  81. addql #8,%sp /* remove exception */
  82. movel %sp,sw_ksp /* save ksp */
  83. subql #8,sw_usp /* set exception */
  84. movel sw_usp,%sp /* restore usp */
  85. rte
  86. 8:
  87. moveml %sp@,%d1-%d5/%a0-%a2
  88. lea %sp@(32),%sp /* space for 8 regs */
  89. movel %sp@+,%d0
  90. addql #4,%sp /* orig d0 */
  91. addl %sp@+,%sp /* stkadj */
  92. rte
  93. .endm
  94. /*
  95. * Quick exception save, use current stack only.
  96. */
  97. .macro SAVE_LOCAL
  98. move #0x2700,%sr /* disable intrs */
  99. clrl %sp@- /* stkadj */
  100. movel %d0,%sp@- /* orig d0 */
  101. movel %d0,%sp@- /* d0 */
  102. lea %sp@(-32),%sp /* space for 8 regs */
  103. moveml %d1-%d5/%a0-%a2,%sp@
  104. .endm
  105. .macro RESTORE_LOCAL
  106. moveml %sp@,%d1-%d5/%a0-%a2
  107. lea %sp@(32),%sp /* space for 8 regs */
  108. movel %sp@+,%d0
  109. addql #4,%sp /* orig d0 */
  110. addl %sp@+,%sp /* stkadj */
  111. rte
  112. .endm
  113. .macro SAVE_SWITCH_STACK
  114. lea %sp@(-24),%sp /* 6 regs */
  115. moveml %a3-%a6/%d6-%d7,%sp@
  116. .endm
  117. .macro RESTORE_SWITCH_STACK
  118. moveml %sp@,%a3-%a6/%d6-%d7
  119. lea %sp@(24),%sp /* 6 regs */
  120. .endm
  121. /*
  122. * Software copy of the user and kernel stack pointers... Ugh...
  123. * Need these to get around ColdFire not having separate kernel
  124. * and user stack pointers.
  125. */
  126. .globl sw_usp
  127. .globl sw_ksp
  128. #else /* !CONFIG_COLDFIRE */
  129. /*
  130. * Standard 68k interrupt entry and exit macros.
  131. */
  132. .macro SAVE_ALL
  133. clrl %sp@- /* stkadj */
  134. movel %d0,%sp@- /* orig d0 */
  135. movel %d0,%sp@- /* d0 */
  136. moveml %d1-%d5/%a0-%a2,%sp@-
  137. .endm
  138. .macro RESTORE_ALL
  139. moveml %sp@+,%a0-%a2/%d1-%d5
  140. movel %sp@+,%d0
  141. addql #4,%sp /* orig d0 */
  142. addl %sp@+,%sp /* stkadj */
  143. rte
  144. .endm
  145. .macro SAVE_SWITCH_STACK
  146. moveml %a3-%a6/%d6-%d7,%sp@-
  147. .endm
  148. .macro RESTORE_SWITCH_STACK
  149. moveml %sp@+,%a3-%a6/%d6-%d7
  150. .endm
  151. #endif /* !CONFIG_COLDFIRE */
  152. #endif /* __ASSEMBLY__ */
  153. #endif /* __M68KNOMMU_ENTRY_H */