entry.S 3.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148
  1. /*
  2. * This file is subject to the terms and conditions of the GNU General Public
  3. * License. See the file "COPYING" in the main directory of this archive
  4. * for more details.
  5. *
  6. * Copyright (C) 1994 - 2000, 2001, 2003 Ralf Baechle
  7. * Copyright (C) 1999, 2000 Silicon Graphics, Inc.
  8. * Copyright (C) 2001 MIPS Technologies, Inc.
  9. */
  10. #include <linux/config.h>
  11. #include <asm/asm.h>
  12. #include <asm/asmmacro.h>
  13. #include <asm/regdef.h>
  14. #include <asm/mipsregs.h>
  15. #include <asm/stackframe.h>
  16. #include <asm/isadep.h>
  17. #include <asm/thread_info.h>
  18. #include <asm/war.h>
  19. #ifdef CONFIG_PREEMPT
  20. .macro preempt_stop
  21. .endm
  22. #else
  23. .macro preempt_stop
  24. local_irq_disable
  25. .endm
  26. #define resume_kernel restore_all
  27. #endif
  28. .text
  29. .align 5
  30. FEXPORT(ret_from_exception)
  31. preempt_stop
  32. FEXPORT(ret_from_irq)
  33. LONG_L t0, PT_STATUS(sp) # returning to kernel mode?
  34. andi t0, t0, KU_USER
  35. beqz t0, resume_kernel
  36. resume_userspace:
  37. local_irq_disable # make sure we dont miss an
  38. # interrupt setting need_resched
  39. # between sampling and return
  40. LONG_L a2, TI_FLAGS($28) # current->work
  41. andi t0, a2, _TIF_WORK_MASK # (ignoring syscall_trace)
  42. bnez t0, work_pending
  43. j restore_all
  44. #ifdef CONFIG_PREEMPT
  45. resume_kernel:
  46. local_irq_disable
  47. lw t0, TI_PRE_COUNT($28)
  48. bnez t0, restore_all
  49. need_resched:
  50. LONG_L t0, TI_FLAGS($28)
  51. andi t1, t0, _TIF_NEED_RESCHED
  52. beqz t1, restore_all
  53. LONG_L t0, PT_STATUS(sp) # Interrupts off?
  54. andi t0, 1
  55. beqz t0, restore_all
  56. li t0, PREEMPT_ACTIVE
  57. sw t0, TI_PRE_COUNT($28)
  58. jal preempt_schedule_irq
  59. #endif
  60. FEXPORT(ret_from_fork)
  61. jal schedule_tail # a0 = task_t *prev
  62. FEXPORT(syscall_exit)
  63. local_irq_disable # make sure need_resched and
  64. # signals dont change between
  65. # sampling and return
  66. LONG_L a2, TI_FLAGS($28) # current->work
  67. li t0, _TIF_ALLWORK_MASK
  68. and t0, a2, t0
  69. bnez t0, syscall_exit_work
  70. FEXPORT(restore_all) # restore full frame
  71. .set noat
  72. RESTORE_TEMP
  73. RESTORE_AT
  74. RESTORE_STATIC
  75. FEXPORT(restore_partial) # restore partial frame
  76. RESTORE_SOME
  77. RESTORE_SP_AND_RET
  78. .set at
  79. work_pending:
  80. andi t0, a2, _TIF_NEED_RESCHED # a2 is preloaded with TI_FLAGS
  81. beqz t0, work_notifysig
  82. work_resched:
  83. jal schedule
  84. local_irq_disable # make sure need_resched and
  85. # signals dont change between
  86. # sampling and return
  87. LONG_L a2, TI_FLAGS($28)
  88. andi t0, a2, _TIF_WORK_MASK # is there any work to be done
  89. # other than syscall tracing?
  90. beqz t0, restore_all
  91. andi t0, a2, _TIF_NEED_RESCHED
  92. bnez t0, work_resched
  93. work_notifysig: # deal with pending signals and
  94. # notify-resume requests
  95. move a0, sp
  96. li a1, 0
  97. jal do_notify_resume # a2 already loaded
  98. j restore_all
  99. FEXPORT(syscall_exit_work_partial)
  100. SAVE_STATIC
  101. syscall_exit_work:
  102. li t0, _TIF_SYSCALL_TRACE | _TIF_SYSCALL_AUDIT
  103. and t0, a2 # a2 is preloaded with TI_FLAGS
  104. beqz t0, work_pending # trace bit set?
  105. local_irq_enable # could let do_syscall_trace()
  106. # call schedule() instead
  107. move a0, sp
  108. li a1, 1
  109. jal do_syscall_trace
  110. b resume_userspace
  111. /*
  112. * Common spurious interrupt handler.
  113. */
  114. LEAF(spurious_interrupt)
  115. /*
  116. * Someone tried to fool us by sending an interrupt but we
  117. * couldn't find a cause for it.
  118. */
  119. PTR_LA t1, irq_err_count
  120. #ifdef CONFIG_SMP
  121. 1: ll t0, (t1)
  122. addiu t0, 1
  123. sc t0, (t1)
  124. #if R10000_LLSC_WAR
  125. beqzl t0, 1b
  126. #else
  127. beqz t0, 1b
  128. #endif
  129. #else
  130. lw t0, (t1)
  131. addiu t0, 1
  132. sw t0, (t1)
  133. #endif
  134. j ret_from_irq
  135. END(spurious_interrupt)