stackframe.h 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520
  1. /*
  2. * This file is subject to the terms and conditions of the GNU General Public
  3. * License. See the file "COPYING" in the main directory of this archive
  4. * for more details.
  5. *
  6. * Copyright (C) 1994, 95, 96, 99, 2001 Ralf Baechle
  7. * Copyright (C) 1994, 1995, 1996 Paul M. Antoine.
  8. * Copyright (C) 1999 Silicon Graphics, Inc.
  9. */
  10. #ifndef _ASM_STACKFRAME_H
  11. #define _ASM_STACKFRAME_H
  12. #include <linux/config.h>
  13. #include <linux/threads.h>
  14. #include <asm/asm.h>
  15. #include <asm/asmmacro.h>
  16. #include <asm/mipsregs.h>
  17. #include <asm/asm-offsets.h>
  18. #ifdef CONFIG_MIPS_MT_SMTC
  19. #include <asm/mipsmtregs.h>
  20. #endif /* CONFIG_MIPS_MT_SMTC */
  21. .macro SAVE_AT
  22. .set push
  23. .set noat
  24. LONG_S $1, PT_R1(sp)
  25. .set pop
  26. .endm
  27. .macro SAVE_TEMP
  28. mfhi v1
  29. #ifdef CONFIG_32BIT
  30. LONG_S $8, PT_R8(sp)
  31. LONG_S $9, PT_R9(sp)
  32. #endif
  33. LONG_S v1, PT_HI(sp)
  34. mflo v1
  35. LONG_S $10, PT_R10(sp)
  36. LONG_S $11, PT_R11(sp)
  37. LONG_S v1, PT_LO(sp)
  38. LONG_S $12, PT_R12(sp)
  39. LONG_S $13, PT_R13(sp)
  40. LONG_S $14, PT_R14(sp)
  41. LONG_S $15, PT_R15(sp)
  42. LONG_S $24, PT_R24(sp)
  43. .endm
  44. .macro SAVE_STATIC
  45. LONG_S $16, PT_R16(sp)
  46. LONG_S $17, PT_R17(sp)
  47. LONG_S $18, PT_R18(sp)
  48. LONG_S $19, PT_R19(sp)
  49. LONG_S $20, PT_R20(sp)
  50. LONG_S $21, PT_R21(sp)
  51. LONG_S $22, PT_R22(sp)
  52. LONG_S $23, PT_R23(sp)
  53. LONG_S $30, PT_R30(sp)
  54. .endm
  55. #ifdef CONFIG_SMP
  56. .macro get_saved_sp /* SMP variation */
  57. #ifdef CONFIG_32BIT
  58. #ifdef CONFIG_MIPS_MT_SMTC
  59. .set mips32
  60. mfc0 k0, CP0_TCBIND;
  61. .set mips0
  62. lui k1, %hi(kernelsp)
  63. srl k0, k0, 19
  64. /* No need to shift down and up to clear bits 0-1 */
  65. #else
  66. mfc0 k0, CP0_CONTEXT
  67. lui k1, %hi(kernelsp)
  68. srl k0, k0, 23
  69. #endif
  70. addu k1, k0
  71. LONG_L k1, %lo(kernelsp)(k1)
  72. #endif
  73. #ifdef CONFIG_64BIT
  74. #ifdef CONFIG_MIPS_MT_SMTC
  75. .set mips64
  76. mfc0 k0, CP0_TCBIND;
  77. .set mips0
  78. lui k0, %highest(kernelsp)
  79. dsrl k1, 19
  80. /* No need to shift down and up to clear bits 0-2 */
  81. #else
  82. MFC0 k1, CP0_CONTEXT
  83. lui k0, %highest(kernelsp)
  84. dsrl k1, 23
  85. daddiu k0, %higher(kernelsp)
  86. dsll k0, k0, 16
  87. daddiu k0, %hi(kernelsp)
  88. dsll k0, k0, 16
  89. #endif /* CONFIG_MIPS_MT_SMTC */
  90. daddu k1, k1, k0
  91. LONG_L k1, %lo(kernelsp)(k1)
  92. #endif /* CONFIG_64BIT */
  93. .endm
  94. .macro set_saved_sp stackp temp temp2
  95. #ifdef CONFIG_32BIT
  96. #ifdef CONFIG_MIPS_MT_SMTC
  97. mfc0 \temp, CP0_TCBIND
  98. srl \temp, 19
  99. #else
  100. mfc0 \temp, CP0_CONTEXT
  101. srl \temp, 23
  102. #endif
  103. #endif
  104. #ifdef CONFIG_64BIT
  105. #ifdef CONFIG_MIPS_MT_SMTC
  106. mfc0 \temp, CP0_TCBIND
  107. dsrl \temp, 19
  108. #else
  109. MFC0 \temp, CP0_CONTEXT
  110. dsrl \temp, 23
  111. #endif
  112. #endif
  113. LONG_S \stackp, kernelsp(\temp)
  114. .endm
  115. #else
  116. .macro get_saved_sp /* Uniprocessor variation */
  117. #ifdef CONFIG_64BIT
  118. lui k1, %highest(kernelsp)
  119. daddiu k1, %higher(kernelsp)
  120. dsll k1, k1, 16
  121. daddiu k1, %hi(kernelsp)
  122. dsll k1, k1, 16
  123. #else
  124. lui k1, %hi(kernelsp)
  125. #endif
  126. LONG_L k1, %lo(kernelsp)(k1)
  127. .endm
  128. .macro set_saved_sp stackp temp temp2
  129. LONG_S \stackp, kernelsp
  130. .endm
  131. #endif
  132. .macro SAVE_SOME
  133. .set push
  134. .set noat
  135. .set reorder
  136. mfc0 k0, CP0_STATUS
  137. sll k0, 3 /* extract cu0 bit */
  138. .set noreorder
  139. bltz k0, 8f
  140. move k1, sp
  141. .set reorder
  142. /* Called from user mode, new stack. */
  143. get_saved_sp
  144. 8: move k0, sp
  145. PTR_SUBU sp, k1, PT_SIZE
  146. LONG_S k0, PT_R29(sp)
  147. LONG_S $3, PT_R3(sp)
  148. /*
  149. * You might think that you don't need to save $0,
  150. * but the FPU emulator and gdb remote debug stub
  151. * need it to operate correctly
  152. */
  153. LONG_S $0, PT_R0(sp)
  154. mfc0 v1, CP0_STATUS
  155. LONG_S $2, PT_R2(sp)
  156. LONG_S v1, PT_STATUS(sp)
  157. #ifdef CONFIG_MIPS_MT_SMTC
  158. /*
  159. * Ideally, these instructions would be shuffled in
  160. * to cover the pipeline delay.
  161. */
  162. .set mips32
  163. mfc0 v1, CP0_TCSTATUS
  164. .set mips0
  165. LONG_S v1, PT_TCSTATUS(sp)
  166. #endif /* CONFIG_MIPS_MT_SMTC */
  167. LONG_S $4, PT_R4(sp)
  168. mfc0 v1, CP0_CAUSE
  169. LONG_S $5, PT_R5(sp)
  170. LONG_S v1, PT_CAUSE(sp)
  171. LONG_S $6, PT_R6(sp)
  172. MFC0 v1, CP0_EPC
  173. LONG_S $7, PT_R7(sp)
  174. #ifdef CONFIG_64BIT
  175. LONG_S $8, PT_R8(sp)
  176. LONG_S $9, PT_R9(sp)
  177. #endif
  178. LONG_S v1, PT_EPC(sp)
  179. LONG_S $25, PT_R25(sp)
  180. LONG_S $28, PT_R28(sp)
  181. LONG_S $31, PT_R31(sp)
  182. ori $28, sp, _THREAD_MASK
  183. xori $28, _THREAD_MASK
  184. .set pop
  185. .endm
  186. .macro SAVE_ALL
  187. SAVE_SOME
  188. SAVE_AT
  189. SAVE_TEMP
  190. SAVE_STATIC
  191. .endm
  192. .macro RESTORE_AT
  193. .set push
  194. .set noat
  195. LONG_L $1, PT_R1(sp)
  196. .set pop
  197. .endm
  198. .macro RESTORE_TEMP
  199. LONG_L $24, PT_LO(sp)
  200. #ifdef CONFIG_32BIT
  201. LONG_L $8, PT_R8(sp)
  202. LONG_L $9, PT_R9(sp)
  203. #endif
  204. mtlo $24
  205. LONG_L $24, PT_HI(sp)
  206. LONG_L $10, PT_R10(sp)
  207. LONG_L $11, PT_R11(sp)
  208. mthi $24
  209. LONG_L $12, PT_R12(sp)
  210. LONG_L $13, PT_R13(sp)
  211. LONG_L $14, PT_R14(sp)
  212. LONG_L $15, PT_R15(sp)
  213. LONG_L $24, PT_R24(sp)
  214. .endm
  215. .macro RESTORE_STATIC
  216. LONG_L $16, PT_R16(sp)
  217. LONG_L $17, PT_R17(sp)
  218. LONG_L $18, PT_R18(sp)
  219. LONG_L $19, PT_R19(sp)
  220. LONG_L $20, PT_R20(sp)
  221. LONG_L $21, PT_R21(sp)
  222. LONG_L $22, PT_R22(sp)
  223. LONG_L $23, PT_R23(sp)
  224. LONG_L $30, PT_R30(sp)
  225. .endm
  226. #if defined(CONFIG_CPU_R3000) || defined(CONFIG_CPU_TX39XX)
  227. .macro RESTORE_SOME
  228. .set push
  229. .set reorder
  230. .set noat
  231. mfc0 a0, CP0_STATUS
  232. ori a0, 0x1f
  233. xori a0, 0x1f
  234. mtc0 a0, CP0_STATUS
  235. li v1, 0xff00
  236. and a0, v1
  237. LONG_L v0, PT_STATUS(sp)
  238. nor v1, $0, v1
  239. and v0, v1
  240. or v0, a0
  241. mtc0 v0, CP0_STATUS
  242. LONG_L $31, PT_R31(sp)
  243. LONG_L $28, PT_R28(sp)
  244. LONG_L $25, PT_R25(sp)
  245. #ifdef CONFIG_64BIT
  246. LONG_L $8, PT_R8(sp)
  247. LONG_L $9, PT_R9(sp)
  248. #endif
  249. LONG_L $7, PT_R7(sp)
  250. LONG_L $6, PT_R6(sp)
  251. LONG_L $5, PT_R5(sp)
  252. LONG_L $4, PT_R4(sp)
  253. LONG_L $3, PT_R3(sp)
  254. LONG_L $2, PT_R2(sp)
  255. .set pop
  256. .endm
  257. .macro RESTORE_SP_AND_RET
  258. .set push
  259. .set noreorder
  260. LONG_L k0, PT_EPC(sp)
  261. LONG_L sp, PT_R29(sp)
  262. jr k0
  263. rfe
  264. .set pop
  265. .endm
  266. #else
  267. /*
  268. * For SMTC kernel, global IE should be left set, and interrupts
  269. * controlled exclusively via IXMT.
  270. */
  271. #ifdef CONFIG_MIPS_MT_SMTC
  272. #define STATMASK 0x1e
  273. #else
  274. #define STATMASK 0x1f
  275. #endif
  276. .macro RESTORE_SOME
  277. .set push
  278. .set reorder
  279. .set noat
  280. #ifdef CONFIG_MIPS_MT_SMTC
  281. .set mips32r2
  282. /*
  283. * This may not really be necessary if ints are already
  284. * inhibited here.
  285. */
  286. mfc0 v0, CP0_TCSTATUS
  287. ori v0, TCSTATUS_IXMT
  288. mtc0 v0, CP0_TCSTATUS
  289. ehb
  290. DMT 5 # dmt a1
  291. jal mips_ihb
  292. #endif /* CONFIG_MIPS_MT_SMTC */
  293. mfc0 a0, CP0_STATUS
  294. ori a0, STATMASK
  295. xori a0, STATMASK
  296. mtc0 a0, CP0_STATUS
  297. li v1, 0xff00
  298. and a0, v1
  299. LONG_L v0, PT_STATUS(sp)
  300. nor v1, $0, v1
  301. and v0, v1
  302. or v0, a0
  303. mtc0 v0, CP0_STATUS
  304. #ifdef CONFIG_MIPS_MT_SMTC
  305. /*
  306. * Only after EXL/ERL have been restored to status can we
  307. * restore TCStatus.IXMT.
  308. */
  309. LONG_L v1, PT_TCSTATUS(sp)
  310. ehb
  311. mfc0 v0, CP0_TCSTATUS
  312. andi v1, TCSTATUS_IXMT
  313. /* We know that TCStatua.IXMT should be set from above */
  314. xori v0, v0, TCSTATUS_IXMT
  315. or v0, v0, v1
  316. mtc0 v0, CP0_TCSTATUS
  317. ehb
  318. andi a1, a1, VPECONTROL_TE
  319. beqz a1, 1f
  320. emt
  321. 1:
  322. .set mips0
  323. #endif /* CONFIG_MIPS_MT_SMTC */
  324. LONG_L v1, PT_EPC(sp)
  325. MTC0 v1, CP0_EPC
  326. LONG_L $31, PT_R31(sp)
  327. LONG_L $28, PT_R28(sp)
  328. LONG_L $25, PT_R25(sp)
  329. #ifdef CONFIG_64BIT
  330. LONG_L $8, PT_R8(sp)
  331. LONG_L $9, PT_R9(sp)
  332. #endif
  333. LONG_L $7, PT_R7(sp)
  334. LONG_L $6, PT_R6(sp)
  335. LONG_L $5, PT_R5(sp)
  336. LONG_L $4, PT_R4(sp)
  337. LONG_L $3, PT_R3(sp)
  338. LONG_L $2, PT_R2(sp)
  339. .set pop
  340. .endm
  341. .macro RESTORE_SP_AND_RET
  342. LONG_L sp, PT_R29(sp)
  343. .set mips3
  344. eret
  345. .set mips0
  346. .endm
  347. #endif
  348. .macro RESTORE_SP
  349. LONG_L sp, PT_R29(sp)
  350. .endm
  351. .macro RESTORE_ALL
  352. RESTORE_TEMP
  353. RESTORE_STATIC
  354. RESTORE_AT
  355. RESTORE_SOME
  356. RESTORE_SP
  357. .endm
  358. .macro RESTORE_ALL_AND_RET
  359. RESTORE_TEMP
  360. RESTORE_STATIC
  361. RESTORE_AT
  362. RESTORE_SOME
  363. RESTORE_SP_AND_RET
  364. .endm
  365. /*
  366. * Move to kernel mode and disable interrupts.
  367. * Set cp0 enable bit as sign that we're running on the kernel stack
  368. */
  369. .macro CLI
  370. #if !defined(CONFIG_MIPS_MT_SMTC)
  371. mfc0 t0, CP0_STATUS
  372. li t1, ST0_CU0 | 0x1f
  373. or t0, t1
  374. xori t0, 0x1f
  375. mtc0 t0, CP0_STATUS
  376. #else /* CONFIG_MIPS_MT_SMTC */
  377. /*
  378. * For SMTC, we need to set privilege
  379. * and disable interrupts only for the
  380. * current TC, using the TCStatus register.
  381. */
  382. mfc0 t0,CP0_TCSTATUS
  383. /* Fortunately CU 0 is in the same place in both registers */
  384. /* Set TCU0, TMX, TKSU (for later inversion) and IXMT */
  385. li t1, ST0_CU0 | 0x08001c00
  386. or t0,t1
  387. /* Clear TKSU, leave IXMT */
  388. xori t0, 0x00001800
  389. mtc0 t0, CP0_TCSTATUS
  390. ehb
  391. /* We need to leave the global IE bit set, but clear EXL...*/
  392. mfc0 t0, CP0_STATUS
  393. ori t0, ST0_EXL | ST0_ERL
  394. xori t0, ST0_EXL | ST0_ERL
  395. mtc0 t0, CP0_STATUS
  396. #endif /* CONFIG_MIPS_MT_SMTC */
  397. irq_disable_hazard
  398. .endm
  399. /*
  400. * Move to kernel mode and enable interrupts.
  401. * Set cp0 enable bit as sign that we're running on the kernel stack
  402. */
  403. .macro STI
  404. #if !defined(CONFIG_MIPS_MT_SMTC)
  405. mfc0 t0, CP0_STATUS
  406. li t1, ST0_CU0 | 0x1f
  407. or t0, t1
  408. xori t0, 0x1e
  409. mtc0 t0, CP0_STATUS
  410. #else /* CONFIG_MIPS_MT_SMTC */
  411. /*
  412. * For SMTC, we need to set privilege
  413. * and enable interrupts only for the
  414. * current TC, using the TCStatus register.
  415. */
  416. ehb
  417. mfc0 t0,CP0_TCSTATUS
  418. /* Fortunately CU 0 is in the same place in both registers */
  419. /* Set TCU0, TKSU (for later inversion) and IXMT */
  420. li t1, ST0_CU0 | 0x08001c00
  421. or t0,t1
  422. /* Clear TKSU *and* IXMT */
  423. xori t0, 0x00001c00
  424. mtc0 t0, CP0_TCSTATUS
  425. ehb
  426. /* We need to leave the global IE bit set, but clear EXL...*/
  427. mfc0 t0, CP0_STATUS
  428. ori t0, ST0_EXL
  429. xori t0, ST0_EXL
  430. mtc0 t0, CP0_STATUS
  431. /* irq_enable_hazard below should expand to EHB for 24K/34K cpus */
  432. #endif /* CONFIG_MIPS_MT_SMTC */
  433. irq_enable_hazard
  434. .endm
  435. /*
  436. * Just move to kernel mode and leave interrupts as they are.
  437. * Set cp0 enable bit as sign that we're running on the kernel stack
  438. */
  439. .macro KMODE
  440. #ifdef CONFIG_MIPS_MT_SMTC
  441. /*
  442. * This gets baroque in SMTC. We want to
  443. * protect the non-atomic clearing of EXL
  444. * with DMT/EMT, but we don't want to take
  445. * an interrupt while DMT is still in effect.
  446. */
  447. /* KMODE gets invoked from both reorder and noreorder code */
  448. .set push
  449. .set mips32r2
  450. .set noreorder
  451. mfc0 v0, CP0_TCSTATUS
  452. andi v1, v0, TCSTATUS_IXMT
  453. ori v0, TCSTATUS_IXMT
  454. mtc0 v0, CP0_TCSTATUS
  455. ehb
  456. DMT 2 # dmt v0
  457. /*
  458. * We don't know a priori if ra is "live"
  459. */
  460. move t0, ra
  461. jal mips_ihb
  462. nop /* delay slot */
  463. move ra, t0
  464. #endif /* CONFIG_MIPS_MT_SMTC */
  465. mfc0 t0, CP0_STATUS
  466. li t1, ST0_CU0 | 0x1e
  467. or t0, t1
  468. xori t0, 0x1e
  469. mtc0 t0, CP0_STATUS
  470. #ifdef CONFIG_MIPS_MT_SMTC
  471. ehb
  472. andi v0, v0, VPECONTROL_TE
  473. beqz v0, 2f
  474. nop /* delay slot */
  475. emt
  476. 2:
  477. mfc0 v0, CP0_TCSTATUS
  478. /* Clear IXMT, then OR in previous value */
  479. ori v0, TCSTATUS_IXMT
  480. xori v0, TCSTATUS_IXMT
  481. or v0, v1, v0
  482. mtc0 v0, CP0_TCSTATUS
  483. /*
  484. * irq_disable_hazard below should expand to EHB
  485. * on 24K/34K CPUS
  486. */
  487. .set pop
  488. #endif /* CONFIG_MIPS_MT_SMTC */
  489. irq_disable_hazard
  490. .endm
  491. #endif /* _ASM_STACKFRAME_H */