stackframe.h 6.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351
  1. /*
  2. * This file is subject to the terms and conditions of the GNU General Public
  3. * License. See the file "COPYING" in the main directory of this archive
  4. * for more details.
  5. *
  6. * Copyright (C) 1994, 95, 96, 99, 2001 Ralf Baechle
  7. * Copyright (C) 1994, 1995, 1996 Paul M. Antoine.
  8. * Copyright (C) 1999 Silicon Graphics, Inc.
  9. */
  10. #ifndef _ASM_STACKFRAME_H
  11. #define _ASM_STACKFRAME_H
  12. #include <linux/config.h>
  13. #include <linux/threads.h>
  14. #include <asm/asm.h>
  15. #include <asm/mipsregs.h>
  16. #include <asm/asm-offsets.h>
  17. .macro SAVE_AT
  18. .set push
  19. .set noat
  20. LONG_S $1, PT_R1(sp)
  21. .set pop
  22. .endm
  23. .macro SAVE_TEMP
  24. mfhi v1
  25. #ifdef CONFIG_32BIT
  26. LONG_S $8, PT_R8(sp)
  27. LONG_S $9, PT_R9(sp)
  28. #endif
  29. LONG_S v1, PT_HI(sp)
  30. mflo v1
  31. LONG_S $10, PT_R10(sp)
  32. LONG_S $11, PT_R11(sp)
  33. LONG_S v1, PT_LO(sp)
  34. LONG_S $12, PT_R12(sp)
  35. LONG_S $13, PT_R13(sp)
  36. LONG_S $14, PT_R14(sp)
  37. LONG_S $15, PT_R15(sp)
  38. LONG_S $24, PT_R24(sp)
  39. .endm
  40. .macro SAVE_STATIC
  41. LONG_S $16, PT_R16(sp)
  42. LONG_S $17, PT_R17(sp)
  43. LONG_S $18, PT_R18(sp)
  44. LONG_S $19, PT_R19(sp)
  45. LONG_S $20, PT_R20(sp)
  46. LONG_S $21, PT_R21(sp)
  47. LONG_S $22, PT_R22(sp)
  48. LONG_S $23, PT_R23(sp)
  49. LONG_S $30, PT_R30(sp)
  50. .endm
  51. #ifdef CONFIG_SMP
  52. .macro get_saved_sp /* SMP variation */
  53. #ifdef CONFIG_32BIT
  54. mfc0 k0, CP0_CONTEXT
  55. lui k1, %hi(kernelsp)
  56. srl k0, k0, 23
  57. sll k0, k0, 2
  58. addu k1, k0
  59. LONG_L k1, %lo(kernelsp)(k1)
  60. #endif
  61. #if defined(CONFIG_64BIT) && !defined(CONFIG_BUILD_ELF64)
  62. MFC0 k1, CP0_CONTEXT
  63. dsra k1, 23
  64. lui k0, %hi(pgd_current)
  65. addiu k0, %lo(pgd_current)
  66. dsubu k1, k0
  67. lui k0, %hi(kernelsp)
  68. daddu k1, k0
  69. LONG_L k1, %lo(kernelsp)(k1)
  70. #endif
  71. #if defined(CONFIG_64BIT) && defined(CONFIG_BUILD_ELF64)
  72. MFC0 k1, CP0_CONTEXT
  73. dsrl k1, 23
  74. dsll k1, k1, 3
  75. lui k0, %highest(kernelsp)
  76. daddiu k0, %higher(kernelsp)
  77. dsll k0, k0, 16
  78. daddiu k0, %hi(kernelsp)
  79. daddu k1, k1, k0
  80. LONG_L k1, %lo(kernelsp)(k1)
  81. #endif
  82. .endm
  83. .macro set_saved_sp stackp temp temp2
  84. #ifdef CONFIG_32BIT
  85. mfc0 \temp, CP0_CONTEXT
  86. srl \temp, 23
  87. sll \temp, 2
  88. LONG_S \stackp, kernelsp(\temp)
  89. #endif
  90. #if defined(CONFIG_64BIT) && !defined(CONFIG_BUILD_ELF64)
  91. lw \temp, TI_CPU(gp)
  92. dsll \temp, 3
  93. lui \temp2, %hi(kernelsp)
  94. daddu \temp, \temp2
  95. LONG_S \stackp, %lo(kernelsp)(\temp)
  96. #endif
  97. #if defined(CONFIG_64BIT) && defined(CONFIG_BUILD_ELF64)
  98. lw \temp, TI_CPU(gp)
  99. dsll \temp, 3
  100. LONG_S \stackp, kernelsp(\temp)
  101. #endif
  102. .endm
  103. #else
  104. .macro get_saved_sp /* Uniprocessor variation */
  105. lui k1, %hi(kernelsp)
  106. LONG_L k1, %lo(kernelsp)(k1)
  107. .endm
  108. .macro set_saved_sp stackp temp temp2
  109. LONG_S \stackp, kernelsp
  110. .endm
  111. #endif
  112. .macro SAVE_SOME
  113. .set push
  114. .set noat
  115. .set reorder
  116. mfc0 k0, CP0_STATUS
  117. sll k0, 3 /* extract cu0 bit */
  118. .set noreorder
  119. bltz k0, 8f
  120. move k1, sp
  121. .set reorder
  122. /* Called from user mode, new stack. */
  123. get_saved_sp
  124. 8: move k0, sp
  125. PTR_SUBU sp, k1, PT_SIZE
  126. LONG_S k0, PT_R29(sp)
  127. LONG_S $3, PT_R3(sp)
  128. LONG_S $0, PT_R0(sp)
  129. mfc0 v1, CP0_STATUS
  130. LONG_S $2, PT_R2(sp)
  131. LONG_S v1, PT_STATUS(sp)
  132. LONG_S $4, PT_R4(sp)
  133. mfc0 v1, CP0_CAUSE
  134. LONG_S $5, PT_R5(sp)
  135. LONG_S v1, PT_CAUSE(sp)
  136. LONG_S $6, PT_R6(sp)
  137. MFC0 v1, CP0_EPC
  138. LONG_S $7, PT_R7(sp)
  139. #ifdef CONFIG_64BIT
  140. LONG_S $8, PT_R8(sp)
  141. LONG_S $9, PT_R9(sp)
  142. #endif
  143. LONG_S v1, PT_EPC(sp)
  144. LONG_S $25, PT_R25(sp)
  145. LONG_S $28, PT_R28(sp)
  146. LONG_S $31, PT_R31(sp)
  147. ori $28, sp, _THREAD_MASK
  148. xori $28, _THREAD_MASK
  149. .set pop
  150. .endm
  151. .macro SAVE_ALL
  152. SAVE_SOME
  153. SAVE_AT
  154. SAVE_TEMP
  155. SAVE_STATIC
  156. .endm
  157. .macro RESTORE_AT
  158. .set push
  159. .set noat
  160. LONG_L $1, PT_R1(sp)
  161. .set pop
  162. .endm
  163. .macro RESTORE_TEMP
  164. LONG_L $24, PT_LO(sp)
  165. #ifdef CONFIG_32BIT
  166. LONG_L $8, PT_R8(sp)
  167. LONG_L $9, PT_R9(sp)
  168. #endif
  169. mtlo $24
  170. LONG_L $24, PT_HI(sp)
  171. LONG_L $10, PT_R10(sp)
  172. LONG_L $11, PT_R11(sp)
  173. mthi $24
  174. LONG_L $12, PT_R12(sp)
  175. LONG_L $13, PT_R13(sp)
  176. LONG_L $14, PT_R14(sp)
  177. LONG_L $15, PT_R15(sp)
  178. LONG_L $24, PT_R24(sp)
  179. .endm
  180. .macro RESTORE_STATIC
  181. LONG_L $16, PT_R16(sp)
  182. LONG_L $17, PT_R17(sp)
  183. LONG_L $18, PT_R18(sp)
  184. LONG_L $19, PT_R19(sp)
  185. LONG_L $20, PT_R20(sp)
  186. LONG_L $21, PT_R21(sp)
  187. LONG_L $22, PT_R22(sp)
  188. LONG_L $23, PT_R23(sp)
  189. LONG_L $30, PT_R30(sp)
  190. .endm
  191. #if defined(CONFIG_CPU_R3000) || defined(CONFIG_CPU_TX39XX)
  192. .macro RESTORE_SOME
  193. .set push
  194. .set reorder
  195. .set noat
  196. mfc0 a0, CP0_STATUS
  197. ori a0, 0x1f
  198. xori a0, 0x1f
  199. mtc0 a0, CP0_STATUS
  200. li v1, 0xff00
  201. and a0, v1
  202. LONG_L v0, PT_STATUS(sp)
  203. nor v1, $0, v1
  204. and v0, v1
  205. or v0, a0
  206. mtc0 v0, CP0_STATUS
  207. LONG_L $31, PT_R31(sp)
  208. LONG_L $28, PT_R28(sp)
  209. LONG_L $25, PT_R25(sp)
  210. #ifdef CONFIG_64BIT
  211. LONG_L $8, PT_R8(sp)
  212. LONG_L $9, PT_R9(sp)
  213. #endif
  214. LONG_L $7, PT_R7(sp)
  215. LONG_L $6, PT_R6(sp)
  216. LONG_L $5, PT_R5(sp)
  217. LONG_L $4, PT_R4(sp)
  218. LONG_L $3, PT_R3(sp)
  219. LONG_L $2, PT_R2(sp)
  220. .set pop
  221. .endm
  222. .macro RESTORE_SP_AND_RET
  223. .set push
  224. .set noreorder
  225. LONG_L k0, PT_EPC(sp)
  226. LONG_L sp, PT_R29(sp)
  227. jr k0
  228. rfe
  229. .set pop
  230. .endm
  231. #else
  232. .macro RESTORE_SOME
  233. .set push
  234. .set reorder
  235. .set noat
  236. mfc0 a0, CP0_STATUS
  237. ori a0, 0x1f
  238. xori a0, 0x1f
  239. mtc0 a0, CP0_STATUS
  240. li v1, 0xff00
  241. and a0, v1
  242. LONG_L v0, PT_STATUS(sp)
  243. nor v1, $0, v1
  244. and v0, v1
  245. or v0, a0
  246. mtc0 v0, CP0_STATUS
  247. LONG_L v1, PT_EPC(sp)
  248. MTC0 v1, CP0_EPC
  249. LONG_L $31, PT_R31(sp)
  250. LONG_L $28, PT_R28(sp)
  251. LONG_L $25, PT_R25(sp)
  252. #ifdef CONFIG_64BIT
  253. LONG_L $8, PT_R8(sp)
  254. LONG_L $9, PT_R9(sp)
  255. #endif
  256. LONG_L $7, PT_R7(sp)
  257. LONG_L $6, PT_R6(sp)
  258. LONG_L $5, PT_R5(sp)
  259. LONG_L $4, PT_R4(sp)
  260. LONG_L $3, PT_R3(sp)
  261. LONG_L $2, PT_R2(sp)
  262. .set pop
  263. .endm
  264. .macro RESTORE_SP_AND_RET
  265. LONG_L sp, PT_R29(sp)
  266. .set mips3
  267. eret
  268. .set mips0
  269. .endm
  270. #endif
  271. .macro RESTORE_SP
  272. LONG_L sp, PT_R29(sp)
  273. .endm
  274. .macro RESTORE_ALL
  275. RESTORE_TEMP
  276. RESTORE_STATIC
  277. RESTORE_AT
  278. RESTORE_SOME
  279. RESTORE_SP
  280. .endm
  281. .macro RESTORE_ALL_AND_RET
  282. RESTORE_TEMP
  283. RESTORE_STATIC
  284. RESTORE_AT
  285. RESTORE_SOME
  286. RESTORE_SP_AND_RET
  287. .endm
  288. /*
  289. * Move to kernel mode and disable interrupts.
  290. * Set cp0 enable bit as sign that we're running on the kernel stack
  291. */
  292. .macro CLI
  293. mfc0 t0, CP0_STATUS
  294. li t1, ST0_CU0 | 0x1f
  295. or t0, t1
  296. xori t0, 0x1f
  297. mtc0 t0, CP0_STATUS
  298. irq_disable_hazard
  299. .endm
  300. /*
  301. * Move to kernel mode and enable interrupts.
  302. * Set cp0 enable bit as sign that we're running on the kernel stack
  303. */
  304. .macro STI
  305. mfc0 t0, CP0_STATUS
  306. li t1, ST0_CU0 | 0x1f
  307. or t0, t1
  308. xori t0, 0x1e
  309. mtc0 t0, CP0_STATUS
  310. irq_enable_hazard
  311. .endm
  312. /*
  313. * Just move to kernel mode and leave interrupts as they are.
  314. * Set cp0 enable bit as sign that we're running on the kernel stack
  315. */
  316. .macro KMODE
  317. mfc0 t0, CP0_STATUS
  318. li t1, ST0_CU0 | 0x1e
  319. or t0, t1
  320. xori t0, 0x1e
  321. mtc0 t0, CP0_STATUS
  322. irq_disable_hazard
  323. .endm
  324. #endif /* _ASM_STACKFRAME_H */