fpu.S 3.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148
  1. /*
  2. * FPU support code, moved here from head.S so that it can be used
  3. * by chips which use other head-whatever.S files.
  4. *
  5. * Copyright (C) 1995-1996 Gary Thomas (gdt@linuxppc.org)
  6. * Copyright (C) 1996 Cort Dougan <cort@cs.nmt.edu>
  7. * Copyright (C) 1996 Paul Mackerras.
  8. * Copyright (C) 1997 Dan Malek (dmalek@jlc.net).
  9. *
  10. * This program is free software; you can redistribute it and/or
  11. * modify it under the terms of the GNU General Public License
  12. * as published by the Free Software Foundation; either version
  13. * 2 of the License, or (at your option) any later version.
  14. *
  15. */
  16. #include <asm/reg.h>
  17. #include <asm/page.h>
  18. #include <asm/mmu.h>
  19. #include <asm/pgtable.h>
  20. #include <asm/cputable.h>
  21. #include <asm/cache.h>
  22. #include <asm/thread_info.h>
  23. #include <asm/ppc_asm.h>
  24. #include <asm/asm-offsets.h>
  25. /*
  26. * This task wants to use the FPU now.
  27. * On UP, disable FP for the task which had the FPU previously,
  28. * and save its floating-point registers in its thread_struct.
  29. * Load up this task's FP registers from its thread_struct,
  30. * enable the FPU for the current task and return to the task.
  31. */
  32. _GLOBAL(load_up_fpu)
  33. mfmsr r5
  34. ori r5,r5,MSR_FP
  35. SYNC
  36. MTMSRD(r5) /* enable use of fpu now */
  37. isync
  38. /*
  39. * For SMP, we don't do lazy FPU switching because it just gets too
  40. * horrendously complex, especially when a task switches from one CPU
  41. * to another. Instead we call giveup_fpu in switch_to.
  42. */
  43. #ifndef CONFIG_SMP
  44. LOAD_REG_ADDRBASE(r3, last_task_used_math)
  45. toreal(r3)
  46. PPC_LL r4,ADDROFF(last_task_used_math)(r3)
  47. PPC_LCMPI 0,r4,0
  48. beq 1f
  49. toreal(r4)
  50. addi r4,r4,THREAD /* want last_task_used_math->thread */
  51. SAVE_32FPRS(0, r4)
  52. mffs fr0
  53. stfd fr0,THREAD_FPSCR(r4)
  54. PPC_LL r5,PT_REGS(r4)
  55. toreal(r5)
  56. PPC_LL r4,_MSR-STACK_FRAME_OVERHEAD(r5)
  57. li r10,MSR_FP|MSR_FE0|MSR_FE1
  58. andc r4,r4,r10 /* disable FP for previous task */
  59. PPC_STL r4,_MSR-STACK_FRAME_OVERHEAD(r5)
  60. 1:
  61. #endif /* CONFIG_SMP */
  62. /* enable use of FP after return */
  63. #ifdef CONFIG_PPC32
  64. mfspr r5,SPRN_SPRG3 /* current task's THREAD (phys) */
  65. lwz r4,THREAD_FPEXC_MODE(r5)
  66. ori r9,r9,MSR_FP /* enable FP for current */
  67. or r9,r9,r4
  68. #else
  69. ld r4,PACACURRENT(r13)
  70. addi r5,r4,THREAD /* Get THREAD */
  71. lwz r4,THREAD_FPEXC_MODE(r5)
  72. ori r12,r12,MSR_FP
  73. or r12,r12,r4
  74. std r12,_MSR(r1)
  75. #endif
  76. lfd fr0,THREAD_FPSCR(r5)
  77. MTFSF_L(fr0)
  78. REST_32FPRS(0, r5)
  79. #ifndef CONFIG_SMP
  80. subi r4,r5,THREAD
  81. fromreal(r4)
  82. PPC_STL r4,ADDROFF(last_task_used_math)(r3)
  83. #endif /* CONFIG_SMP */
  84. /* restore registers and return */
  85. /* we haven't used ctr or xer or lr */
  86. b fast_exception_return
  87. /*
  88. * giveup_fpu(tsk)
  89. * Disable FP for the task given as the argument,
  90. * and save the floating-point registers in its thread_struct.
  91. * Enables the FPU for use in the kernel on return.
  92. */
  93. _GLOBAL(giveup_fpu)
  94. mfmsr r5
  95. ori r5,r5,MSR_FP
  96. SYNC_601
  97. ISYNC_601
  98. MTMSRD(r5) /* enable use of fpu now */
  99. SYNC_601
  100. isync
  101. PPC_LCMPI 0,r3,0
  102. beqlr- /* if no previous owner, done */
  103. addi r3,r3,THREAD /* want THREAD of task */
  104. PPC_LL r5,PT_REGS(r3)
  105. PPC_LCMPI 0,r5,0
  106. SAVE_32FPRS(0, r3)
  107. mffs fr0
  108. stfd fr0,THREAD_FPSCR(r3)
  109. beq 1f
  110. PPC_LL r4,_MSR-STACK_FRAME_OVERHEAD(r5)
  111. li r3,MSR_FP|MSR_FE0|MSR_FE1
  112. andc r4,r4,r3 /* disable FP for previous task */
  113. PPC_STL r4,_MSR-STACK_FRAME_OVERHEAD(r5)
  114. 1:
  115. #ifndef CONFIG_SMP
  116. li r5,0
  117. LOAD_REG_ADDRBASE(r4,last_task_used_math)
  118. PPC_STL r5,ADDROFF(last_task_used_math)(r4)
  119. #endif /* CONFIG_SMP */
  120. blr
  121. /*
  122. * These are used in the alignment trap handler when emulating
  123. * single-precision loads and stores.
  124. * We restore and save the fpscr so the task gets the same result
  125. * and exceptions as if the cpu had performed the load or store.
  126. */
  127. _GLOBAL(cvt_fd)
  128. lfd 0,THREAD_FPSCR(r5) /* load up fpscr value */
  129. MTFSF_L(0)
  130. lfs 0,0(r3)
  131. stfd 0,0(r4)
  132. mffs 0
  133. stfd 0,THREAD_FPSCR(r5) /* save new fpscr value */
  134. blr
  135. _GLOBAL(cvt_df)
  136. lfd 0,THREAD_FPSCR(r5) /* load up fpscr value */
  137. MTFSF_L(0)
  138. lfd 0,0(r3)
  139. stfs 0,0(r4)
  140. mffs 0
  141. stfd 0,THREAD_FPSCR(r5) /* save new fpscr value */
  142. blr