entry_no.h 4.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182
  1. #ifndef __M68KNOMMU_ENTRY_H
  2. #define __M68KNOMMU_ENTRY_H
  3. #include <asm/setup.h>
  4. #include <asm/page.h>
  5. /*
  6. * Stack layout in 'ret_from_exception':
  7. *
  8. * This allows access to the syscall arguments in registers d1-d5
  9. *
  10. * 0(sp) - d1
  11. * 4(sp) - d2
  12. * 8(sp) - d3
  13. * C(sp) - d4
  14. * 10(sp) - d5
  15. * 14(sp) - a0
  16. * 18(sp) - a1
  17. * 1C(sp) - a2
  18. * 20(sp) - d0
  19. * 24(sp) - orig_d0
  20. * 28(sp) - stack adjustment
  21. * 2C(sp) - [ sr ] [ format & vector ]
  22. * 2E(sp) - [ pc-hiword ] [ sr ]
  23. * 30(sp) - [ pc-loword ] [ pc-hiword ]
  24. * 32(sp) - [ format & vector ] [ pc-loword ]
  25. * ^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^
  26. * M68K COLDFIRE
  27. */
  28. #define ALLOWINT 0xf8ff
  29. #ifdef __ASSEMBLY__
  30. /* process bits for task_struct.flags */
  31. PF_TRACESYS_OFF = 3
  32. PF_TRACESYS_BIT = 5
  33. PF_PTRACED_OFF = 3
  34. PF_PTRACED_BIT = 4
  35. PF_DTRACE_OFF = 1
  36. PF_DTRACE_BIT = 5
  37. LENOSYS = 38
  38. #define SWITCH_STACK_SIZE (6*4+4) /* Includes return address */
  39. /*
  40. * This defines the normal kernel pt-regs layout.
  41. *
  42. * regs are a2-a6 and d6-d7 preserved by C code
  43. * the kernel doesn't mess with usp unless it needs to
  44. */
  45. #ifdef CONFIG_COLDFIRE
  46. /*
  47. * This is made a little more tricky on the ColdFire. There is no
  48. * separate kernel and user stack pointers. Need to artificially
  49. * construct a usp in software... When doing this we need to disable
  50. * interrupts, otherwise bad things could happen.
  51. */
  52. .macro SAVE_ALL
  53. move #0x2700,%sr /* disable intrs */
  54. btst #5,%sp@(2) /* from user? */
  55. bnes 6f /* no, skip */
  56. movel %sp,sw_usp /* save user sp */
  57. addql #8,sw_usp /* remove exception */
  58. movel sw_ksp,%sp /* kernel sp */
  59. subql #8,%sp /* room for exception */
  60. clrl %sp@- /* stkadj */
  61. movel %d0,%sp@- /* orig d0 */
  62. movel %d0,%sp@- /* d0 */
  63. lea %sp@(-32),%sp /* space for 8 regs */
  64. moveml %d1-%d5/%a0-%a2,%sp@
  65. movel sw_usp,%a0 /* get usp */
  66. movel %a0@-,%sp@(PT_PC) /* copy exception program counter */
  67. movel %a0@-,%sp@(PT_FORMATVEC)/* copy exception format/vector/sr */
  68. bra 7f
  69. 6:
  70. clrl %sp@- /* stkadj */
  71. movel %d0,%sp@- /* orig d0 */
  72. movel %d0,%sp@- /* d0 */
  73. lea %sp@(-32),%sp /* space for 8 regs */
  74. moveml %d1-%d5/%a0-%a2,%sp@
  75. 7:
  76. .endm
  77. .macro RESTORE_ALL
  78. btst #5,%sp@(PT_SR) /* going user? */
  79. bnes 8f /* no, skip */
  80. move #0x2700,%sr /* disable intrs */
  81. movel sw_usp,%a0 /* get usp */
  82. movel %sp@(PT_PC),%a0@- /* copy exception program counter */
  83. movel %sp@(PT_FORMATVEC),%a0@-/* copy exception format/vector/sr */
  84. moveml %sp@,%d1-%d5/%a0-%a2
  85. lea %sp@(32),%sp /* space for 8 regs */
  86. movel %sp@+,%d0
  87. addql #4,%sp /* orig d0 */
  88. addl %sp@+,%sp /* stkadj */
  89. addql #8,%sp /* remove exception */
  90. movel %sp,sw_ksp /* save ksp */
  91. subql #8,sw_usp /* set exception */
  92. movel sw_usp,%sp /* restore usp */
  93. rte
  94. 8:
  95. moveml %sp@,%d1-%d5/%a0-%a2
  96. lea %sp@(32),%sp /* space for 8 regs */
  97. movel %sp@+,%d0
  98. addql #4,%sp /* orig d0 */
  99. addl %sp@+,%sp /* stkadj */
  100. rte
  101. .endm
  102. /*
  103. * Quick exception save, use current stack only.
  104. */
  105. .macro SAVE_LOCAL
  106. move #0x2700,%sr /* disable intrs */
  107. clrl %sp@- /* stkadj */
  108. movel %d0,%sp@- /* orig d0 */
  109. movel %d0,%sp@- /* d0 */
  110. lea %sp@(-32),%sp /* space for 8 regs */
  111. moveml %d1-%d5/%a0-%a2,%sp@
  112. .endm
  113. .macro RESTORE_LOCAL
  114. moveml %sp@,%d1-%d5/%a0-%a2
  115. lea %sp@(32),%sp /* space for 8 regs */
  116. movel %sp@+,%d0
  117. addql #4,%sp /* orig d0 */
  118. addl %sp@+,%sp /* stkadj */
  119. rte
  120. .endm
  121. .macro SAVE_SWITCH_STACK
  122. lea %sp@(-24),%sp /* 6 regs */
  123. moveml %a3-%a6/%d6-%d7,%sp@
  124. .endm
  125. .macro RESTORE_SWITCH_STACK
  126. moveml %sp@,%a3-%a6/%d6-%d7
  127. lea %sp@(24),%sp /* 6 regs */
  128. .endm
  129. /*
  130. * Software copy of the user and kernel stack pointers... Ugh...
  131. * Need these to get around ColdFire not having separate kernel
  132. * and user stack pointers.
  133. */
  134. .globl sw_usp
  135. .globl sw_ksp
  136. #else /* !CONFIG_COLDFIRE */
  137. /*
  138. * Standard 68k interrupt entry and exit macros.
  139. */
  140. .macro SAVE_ALL
  141. clrl %sp@- /* stkadj */
  142. movel %d0,%sp@- /* orig d0 */
  143. movel %d0,%sp@- /* d0 */
  144. moveml %d1-%d5/%a0-%a2,%sp@-
  145. .endm
  146. .macro RESTORE_ALL
  147. moveml %sp@+,%a0-%a2/%d1-%d5
  148. movel %sp@+,%d0
  149. addql #4,%sp /* orig d0 */
  150. addl %sp@+,%sp /* stkadj */
  151. rte
  152. .endm
  153. .macro SAVE_SWITCH_STACK
  154. moveml %a3-%a6/%d6-%d7,%sp@-
  155. .endm
  156. .macro RESTORE_SWITCH_STACK
  157. moveml %sp@+,%a3-%a6/%d6-%d7
  158. .endm
  159. #endif /* !CONFIG_COLDFIRE */
  160. #endif /* __ASSEMBLY__ */
  161. #endif /* __M68KNOMMU_ENTRY_H */