entry.h 4.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183
  1. #ifndef __M68KNOMMU_ENTRY_H
  2. #define __M68KNOMMU_ENTRY_H
  3. #include <linux/config.h>
  4. #include <asm/setup.h>
  5. #include <asm/page.h>
  6. /*
  7. * Stack layout in 'ret_from_exception':
  8. *
  9. * This allows access to the syscall arguments in registers d1-d5
  10. *
  11. * 0(sp) - d1
  12. * 4(sp) - d2
  13. * 8(sp) - d3
  14. * C(sp) - d4
  15. * 10(sp) - d5
  16. * 14(sp) - a0
  17. * 18(sp) - a1
  18. * 1C(sp) - a2
  19. * 20(sp) - d0
  20. * 24(sp) - orig_d0
  21. * 28(sp) - stack adjustment
  22. * 2C(sp) - [ sr ] [ format & vector ]
  23. * 2E(sp) - [ pc-hiword ] [ sr ]
  24. * 30(sp) - [ pc-loword ] [ pc-hiword ]
  25. * 32(sp) - [ format & vector ] [ pc-loword ]
  26. * ^^^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^
  27. * M68K COLDFIRE
  28. */
  29. #define ALLOWINT 0xf8ff
  30. #ifdef __ASSEMBLY__
  31. /* process bits for task_struct.flags */
  32. PF_TRACESYS_OFF = 3
  33. PF_TRACESYS_BIT = 5
  34. PF_PTRACED_OFF = 3
  35. PF_PTRACED_BIT = 4
  36. PF_DTRACE_OFF = 1
  37. PF_DTRACE_BIT = 5
  38. LENOSYS = 38
  39. #define SWITCH_STACK_SIZE (6*4+4) /* Includes return address */
  40. /*
  41. * This defines the normal kernel pt-regs layout.
  42. *
  43. * regs are a2-a6 and d6-d7 preserved by C code
  44. * the kernel doesn't mess with usp unless it needs to
  45. */
  46. #ifdef CONFIG_COLDFIRE
  47. /*
  48. * This is made a little more tricky on the ColdFire. There is no
  49. * separate kernel and user stack pointers. Need to artificially
  50. * construct a usp in software... When doing this we need to disable
  51. * interrupts, otherwise bad things could happen.
  52. */
  53. .macro SAVE_ALL
  54. move #0x2700,%sr /* disable intrs */
  55. btst #5,%sp@(2) /* from user? */
  56. bnes 6f /* no, skip */
  57. movel %sp,sw_usp /* save user sp */
  58. addql #8,sw_usp /* remove exception */
  59. movel sw_ksp,%sp /* kernel sp */
  60. subql #8,%sp /* room for exception */
  61. clrl %sp@- /* stkadj */
  62. movel %d0,%sp@- /* orig d0 */
  63. movel %d0,%sp@- /* d0 */
  64. lea %sp@(-32),%sp /* space for 8 regs */
  65. moveml %d1-%d5/%a0-%a2,%sp@
  66. movel sw_usp,%a0 /* get usp */
  67. movel %a0@-,%sp@(PT_PC) /* copy exception program counter */
  68. movel %a0@-,%sp@(PT_FORMATVEC)/* copy exception format/vector/sr */
  69. bra 7f
  70. 6:
  71. clrl %sp@- /* stkadj */
  72. movel %d0,%sp@- /* orig d0 */
  73. movel %d0,%sp@- /* d0 */
  74. lea %sp@(-32),%sp /* space for 8 regs */
  75. moveml %d1-%d5/%a0-%a2,%sp@
  76. 7:
  77. .endm
  78. .macro RESTORE_ALL
  79. btst #5,%sp@(PT_SR) /* going user? */
  80. bnes 8f /* no, skip */
  81. move #0x2700,%sr /* disable intrs */
  82. movel sw_usp,%a0 /* get usp */
  83. movel %sp@(PT_PC),%a0@- /* copy exception program counter */
  84. movel %sp@(PT_FORMATVEC),%a0@-/* copy exception format/vector/sr */
  85. moveml %sp@,%d1-%d5/%a0-%a2
  86. lea %sp@(32),%sp /* space for 8 regs */
  87. movel %sp@+,%d0
  88. addql #4,%sp /* orig d0 */
  89. addl %sp@+,%sp /* stkadj */
  90. addql #8,%sp /* remove exception */
  91. movel %sp,sw_ksp /* save ksp */
  92. subql #8,sw_usp /* set exception */
  93. movel sw_usp,%sp /* restore usp */
  94. rte
  95. 8:
  96. moveml %sp@,%d1-%d5/%a0-%a2
  97. lea %sp@(32),%sp /* space for 8 regs */
  98. movel %sp@+,%d0
  99. addql #4,%sp /* orig d0 */
  100. addl %sp@+,%sp /* stkadj */
  101. rte
  102. .endm
  103. /*
  104. * Quick exception save, use current stack only.
  105. */
  106. .macro SAVE_LOCAL
  107. move #0x2700,%sr /* disable intrs */
  108. clrl %sp@- /* stkadj */
  109. movel %d0,%sp@- /* orig d0 */
  110. movel %d0,%sp@- /* d0 */
  111. lea %sp@(-32),%sp /* space for 8 regs */
  112. moveml %d1-%d5/%a0-%a2,%sp@
  113. .endm
  114. .macro RESTORE_LOCAL
  115. moveml %sp@,%d1-%d5/%a0-%a2
  116. lea %sp@(32),%sp /* space for 8 regs */
  117. movel %sp@+,%d0
  118. addql #4,%sp /* orig d0 */
  119. addl %sp@+,%sp /* stkadj */
  120. rte
  121. .endm
  122. .macro SAVE_SWITCH_STACK
  123. lea %sp@(-24),%sp /* 6 regs */
  124. moveml %a3-%a6/%d6-%d7,%sp@
  125. .endm
  126. .macro RESTORE_SWITCH_STACK
  127. moveml %sp@,%a3-%a6/%d6-%d7
  128. lea %sp@(24),%sp /* 6 regs */
  129. .endm
  130. /*
  131. * Software copy of the user and kernel stack pointers... Ugh...
  132. * Need these to get around ColdFire not having separate kernel
  133. * and user stack pointers.
  134. */
  135. .globl sw_usp
  136. .globl sw_ksp
  137. #else /* !CONFIG_COLDFIRE */
  138. /*
  139. * Standard 68k interrupt entry and exit macros.
  140. */
  141. .macro SAVE_ALL
  142. clrl %sp@- /* stkadj */
  143. movel %d0,%sp@- /* orig d0 */
  144. movel %d0,%sp@- /* d0 */
  145. moveml %d1-%d5/%a0-%a2,%sp@-
  146. .endm
  147. .macro RESTORE_ALL
  148. moveml %sp@+,%a0-%a2/%d1-%d5
  149. movel %sp@+,%d0
  150. addql #4,%sp /* orig d0 */
  151. addl %sp@+,%sp /* stkadj */
  152. rte
  153. .endm
  154. .macro SAVE_SWITCH_STACK
  155. moveml %a3-%a6/%d6-%d7,%sp@-
  156. .endm
  157. .macro RESTORE_SWITCH_STACK
  158. moveml %sp@+,%a3-%a6/%d6-%d7
  159. .endm
  160. #endif /* !CONFIG_COLDFIRE */
  161. #endif /* __ASSEMBLY__ */
  162. #endif /* __M68KNOMMU_ENTRY_H */