entry.S 3.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147
  1. /*
  2. * This file is subject to the terms and conditions of the GNU General Public
  3. * License. See the file "COPYING" in the main directory of this archive
  4. * for more details.
  5. *
  6. * Copyright (C) 1994 - 2000, 2001, 2003 Ralf Baechle
  7. * Copyright (C) 1999, 2000 Silicon Graphics, Inc.
  8. * Copyright (C) 2001 MIPS Technologies, Inc.
  9. */
  10. #include <linux/config.h>
  11. #include <asm/asm.h>
  12. #include <asm/asmmacro.h>
  13. #include <asm/regdef.h>
  14. #include <asm/mipsregs.h>
  15. #include <asm/stackframe.h>
  16. #include <asm/isadep.h>
  17. #include <asm/thread_info.h>
  18. #include <asm/war.h>
  19. #ifdef CONFIG_PREEMPT
  20. .macro preempt_stop
  21. .endm
  22. #else
  23. .macro preempt_stop
  24. local_irq_disable
  25. .endm
  26. #define resume_kernel restore_all
  27. #endif
  28. .text
  29. .align 5
  30. FEXPORT(ret_from_exception)
  31. preempt_stop
  32. FEXPORT(ret_from_irq)
  33. LONG_L t0, PT_STATUS(sp) # returning to kernel mode?
  34. andi t0, t0, KU_USER
  35. beqz t0, resume_kernel
  36. resume_userspace:
  37. local_irq_disable # make sure we dont miss an
  38. # interrupt setting need_resched
  39. # between sampling and return
  40. LONG_L a2, TI_FLAGS($28) # current->work
  41. andi t0, a2, _TIF_WORK_MASK # (ignoring syscall_trace)
  42. bnez t0, work_pending
  43. j restore_all
  44. #ifdef CONFIG_PREEMPT
  45. resume_kernel:
  46. local_irq_disable
  47. lw t0, TI_PRE_COUNT($28)
  48. bnez t0, restore_all
  49. need_resched:
  50. LONG_L t0, TI_FLAGS($28)
  51. andi t1, t0, _TIF_NEED_RESCHED
  52. beqz t1, restore_all
  53. LONG_L t0, PT_STATUS(sp) # Interrupts off?
  54. andi t0, 1
  55. beqz t0, restore_all
  56. jal preempt_schedule_irq
  57. b need_resched
  58. #endif
  59. FEXPORT(ret_from_fork)
  60. jal schedule_tail # a0 = task_t *prev
  61. FEXPORT(syscall_exit)
  62. local_irq_disable # make sure need_resched and
  63. # signals dont change between
  64. # sampling and return
  65. LONG_L a2, TI_FLAGS($28) # current->work
  66. li t0, _TIF_ALLWORK_MASK
  67. and t0, a2, t0
  68. bnez t0, syscall_exit_work
  69. FEXPORT(restore_all) # restore full frame
  70. .set noat
  71. RESTORE_TEMP
  72. RESTORE_AT
  73. RESTORE_STATIC
  74. FEXPORT(restore_partial) # restore partial frame
  75. RESTORE_SOME
  76. RESTORE_SP_AND_RET
  77. .set at
  78. work_pending:
  79. andi t0, a2, _TIF_NEED_RESCHED # a2 is preloaded with TI_FLAGS
  80. beqz t0, work_notifysig
  81. work_resched:
  82. jal schedule
  83. local_irq_disable # make sure need_resched and
  84. # signals dont change between
  85. # sampling and return
  86. LONG_L a2, TI_FLAGS($28)
  87. andi t0, a2, _TIF_WORK_MASK # is there any work to be done
  88. # other than syscall tracing?
  89. beqz t0, restore_all
  90. andi t0, a2, _TIF_NEED_RESCHED
  91. bnez t0, work_resched
  92. work_notifysig: # deal with pending signals and
  93. # notify-resume requests
  94. move a0, sp
  95. li a1, 0
  96. jal do_notify_resume # a2 already loaded
  97. j resume_userspace
  98. FEXPORT(syscall_exit_work_partial)
  99. SAVE_STATIC
  100. syscall_exit_work:
  101. li t0, _TIF_SYSCALL_TRACE | _TIF_SYSCALL_AUDIT
  102. and t0, a2 # a2 is preloaded with TI_FLAGS
  103. beqz t0, work_pending # trace bit set?
  104. local_irq_enable # could let do_syscall_trace()
  105. # call schedule() instead
  106. move a0, sp
  107. li a1, 1
  108. jal do_syscall_trace
  109. b resume_userspace
  110. /*
  111. * Common spurious interrupt handler.
  112. */
  113. LEAF(spurious_interrupt)
  114. /*
  115. * Someone tried to fool us by sending an interrupt but we
  116. * couldn't find a cause for it.
  117. */
  118. PTR_LA t1, irq_err_count
  119. #ifdef CONFIG_SMP
  120. 1: ll t0, (t1)
  121. addiu t0, 1
  122. sc t0, (t1)
  123. #if R10000_LLSC_WAR
  124. beqzl t0, 1b
  125. #else
  126. beqz t0, 1b
  127. #endif
  128. #else
  129. lw t0, (t1)
  130. addiu t0, 1
  131. sw t0, (t1)
  132. #endif
  133. j ret_from_irq
  134. END(spurious_interrupt)