stackframe.h 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515
  1. /*
  2. * This file is subject to the terms and conditions of the GNU General Public
  3. * License. See the file "COPYING" in the main directory of this archive
  4. * for more details.
  5. *
  6. * Copyright (C) 1994, 95, 96, 99, 2001 Ralf Baechle
  7. * Copyright (C) 1994, 1995, 1996 Paul M. Antoine.
  8. * Copyright (C) 1999 Silicon Graphics, Inc.
  9. */
  10. #ifndef _ASM_STACKFRAME_H
  11. #define _ASM_STACKFRAME_H
  12. #include <linux/threads.h>
  13. #include <asm/asm.h>
  14. #include <asm/asmmacro.h>
  15. #include <asm/mipsregs.h>
  16. #include <asm/asm-offsets.h>
  17. /*
  18. * For SMTC kernel, global IE should be left set, and interrupts
  19. * controlled exclusively via IXMT.
  20. */
  21. #ifdef CONFIG_MIPS_MT_SMTC
  22. #define STATMASK 0x1e
  23. #elif defined(CONFIG_CPU_R3000) || defined(CONFIG_CPU_TX39XX)
  24. #define STATMASK 0x3f
  25. #else
  26. #define STATMASK 0x1f
  27. #endif
  28. #ifdef CONFIG_MIPS_MT_SMTC
  29. #include <asm/mipsmtregs.h>
  30. #endif /* CONFIG_MIPS_MT_SMTC */
  31. .macro SAVE_AT
  32. .set push
  33. .set noat
  34. LONG_S $1, PT_R1(sp)
  35. .set pop
  36. .endm
  37. .macro SAVE_TEMP
  38. #ifdef CONFIG_CPU_HAS_SMARTMIPS
  39. mflhxu v1
  40. LONG_S v1, PT_LO(sp)
  41. mflhxu v1
  42. LONG_S v1, PT_HI(sp)
  43. mflhxu v1
  44. LONG_S v1, PT_ACX(sp)
  45. #else
  46. mfhi v1
  47. LONG_S v1, PT_HI(sp)
  48. mflo v1
  49. LONG_S v1, PT_LO(sp)
  50. #endif
  51. #ifdef CONFIG_32BIT
  52. LONG_S $8, PT_R8(sp)
  53. LONG_S $9, PT_R9(sp)
  54. #endif
  55. LONG_S $10, PT_R10(sp)
  56. LONG_S $11, PT_R11(sp)
  57. LONG_S $12, PT_R12(sp)
  58. LONG_S $13, PT_R13(sp)
  59. LONG_S $14, PT_R14(sp)
  60. LONG_S $15, PT_R15(sp)
  61. LONG_S $24, PT_R24(sp)
  62. .endm
  63. .macro SAVE_STATIC
  64. LONG_S $16, PT_R16(sp)
  65. LONG_S $17, PT_R17(sp)
  66. LONG_S $18, PT_R18(sp)
  67. LONG_S $19, PT_R19(sp)
  68. LONG_S $20, PT_R20(sp)
  69. LONG_S $21, PT_R21(sp)
  70. LONG_S $22, PT_R22(sp)
  71. LONG_S $23, PT_R23(sp)
  72. LONG_S $30, PT_R30(sp)
  73. .endm
  74. #ifdef CONFIG_SMP
  75. #ifdef CONFIG_MIPS_MT_SMTC
  76. #define PTEBASE_SHIFT 19 /* TCBIND */
  77. #else
  78. #define PTEBASE_SHIFT 23 /* CONTEXT */
  79. #endif
  80. .macro get_saved_sp /* SMP variation */
  81. #ifdef CONFIG_MIPS_MT_SMTC
  82. mfc0 k0, CP0_TCBIND
  83. #else
  84. MFC0 k0, CP0_CONTEXT
  85. #endif
  86. #if defined(CONFIG_32BIT) || defined(KBUILD_64BIT_SYM32)
  87. lui k1, %hi(kernelsp)
  88. #else
  89. lui k1, %highest(kernelsp)
  90. daddiu k1, %higher(kernelsp)
  91. dsll k1, 16
  92. daddiu k1, %hi(kernelsp)
  93. dsll k1, 16
  94. #endif
  95. LONG_SRL k0, PTEBASE_SHIFT
  96. LONG_ADDU k1, k0
  97. LONG_L k1, %lo(kernelsp)(k1)
  98. .endm
  99. .macro set_saved_sp stackp temp temp2
  100. #ifdef CONFIG_MIPS_MT_SMTC
  101. mfc0 \temp, CP0_TCBIND
  102. #else
  103. MFC0 \temp, CP0_CONTEXT
  104. #endif
  105. LONG_SRL \temp, PTEBASE_SHIFT
  106. LONG_S \stackp, kernelsp(\temp)
  107. .endm
  108. #else
  109. .macro get_saved_sp /* Uniprocessor variation */
  110. #if defined(CONFIG_32BIT) || defined(KBUILD_64BIT_SYM32)
  111. lui k1, %hi(kernelsp)
  112. #else
  113. lui k1, %highest(kernelsp)
  114. daddiu k1, %higher(kernelsp)
  115. dsll k1, k1, 16
  116. daddiu k1, %hi(kernelsp)
  117. dsll k1, k1, 16
  118. #endif
  119. LONG_L k1, %lo(kernelsp)(k1)
  120. .endm
  121. .macro set_saved_sp stackp temp temp2
  122. LONG_S \stackp, kernelsp
  123. .endm
  124. #endif
  125. .macro SAVE_SOME
  126. .set push
  127. .set noat
  128. .set reorder
  129. mfc0 k0, CP0_STATUS
  130. sll k0, 3 /* extract cu0 bit */
  131. .set noreorder
  132. bltz k0, 8f
  133. move k1, sp
  134. .set reorder
  135. /* Called from user mode, new stack. */
  136. get_saved_sp
  137. 8: move k0, sp
  138. PTR_SUBU sp, k1, PT_SIZE
  139. LONG_S k0, PT_R29(sp)
  140. LONG_S $3, PT_R3(sp)
  141. /*
  142. * You might think that you don't need to save $0,
  143. * but the FPU emulator and gdb remote debug stub
  144. * need it to operate correctly
  145. */
  146. LONG_S $0, PT_R0(sp)
  147. mfc0 v1, CP0_STATUS
  148. LONG_S $2, PT_R2(sp)
  149. LONG_S v1, PT_STATUS(sp)
  150. #ifdef CONFIG_MIPS_MT_SMTC
  151. /*
  152. * Ideally, these instructions would be shuffled in
  153. * to cover the pipeline delay.
  154. */
  155. .set mips32
  156. mfc0 v1, CP0_TCSTATUS
  157. .set mips0
  158. LONG_S v1, PT_TCSTATUS(sp)
  159. #endif /* CONFIG_MIPS_MT_SMTC */
  160. LONG_S $4, PT_R4(sp)
  161. mfc0 v1, CP0_CAUSE
  162. LONG_S $5, PT_R5(sp)
  163. LONG_S v1, PT_CAUSE(sp)
  164. LONG_S $6, PT_R6(sp)
  165. MFC0 v1, CP0_EPC
  166. LONG_S $7, PT_R7(sp)
  167. #ifdef CONFIG_64BIT
  168. LONG_S $8, PT_R8(sp)
  169. LONG_S $9, PT_R9(sp)
  170. #endif
  171. LONG_S v1, PT_EPC(sp)
  172. LONG_S $25, PT_R25(sp)
  173. LONG_S $28, PT_R28(sp)
  174. LONG_S $31, PT_R31(sp)
  175. ori $28, sp, _THREAD_MASK
  176. xori $28, _THREAD_MASK
  177. .set pop
  178. .endm
  179. .macro SAVE_ALL
  180. SAVE_SOME
  181. SAVE_AT
  182. SAVE_TEMP
  183. SAVE_STATIC
  184. .endm
  185. .macro RESTORE_AT
  186. .set push
  187. .set noat
  188. LONG_L $1, PT_R1(sp)
  189. .set pop
  190. .endm
  191. .macro RESTORE_TEMP
  192. #ifdef CONFIG_CPU_HAS_SMARTMIPS
  193. LONG_L $24, PT_ACX(sp)
  194. mtlhx $24
  195. LONG_L $24, PT_HI(sp)
  196. mtlhx $24
  197. LONG_L $24, PT_LO(sp)
  198. mtlhx $24
  199. #else
  200. LONG_L $24, PT_LO(sp)
  201. mtlo $24
  202. LONG_L $24, PT_HI(sp)
  203. mthi $24
  204. #endif
  205. #ifdef CONFIG_32BIT
  206. LONG_L $8, PT_R8(sp)
  207. LONG_L $9, PT_R9(sp)
  208. #endif
  209. LONG_L $10, PT_R10(sp)
  210. LONG_L $11, PT_R11(sp)
  211. LONG_L $12, PT_R12(sp)
  212. LONG_L $13, PT_R13(sp)
  213. LONG_L $14, PT_R14(sp)
  214. LONG_L $15, PT_R15(sp)
  215. LONG_L $24, PT_R24(sp)
  216. .endm
  217. .macro RESTORE_STATIC
  218. LONG_L $16, PT_R16(sp)
  219. LONG_L $17, PT_R17(sp)
  220. LONG_L $18, PT_R18(sp)
  221. LONG_L $19, PT_R19(sp)
  222. LONG_L $20, PT_R20(sp)
  223. LONG_L $21, PT_R21(sp)
  224. LONG_L $22, PT_R22(sp)
  225. LONG_L $23, PT_R23(sp)
  226. LONG_L $30, PT_R30(sp)
  227. .endm
  228. #if defined(CONFIG_CPU_R3000) || defined(CONFIG_CPU_TX39XX)
  229. .macro RESTORE_SOME
  230. .set push
  231. .set reorder
  232. .set noat
  233. mfc0 a0, CP0_STATUS
  234. li v1, 0xff00
  235. ori a0, STATMASK
  236. xori a0, STATMASK
  237. mtc0 a0, CP0_STATUS
  238. and a0, v1
  239. LONG_L v0, PT_STATUS(sp)
  240. nor v1, $0, v1
  241. and v0, v1
  242. or v0, a0
  243. mtc0 v0, CP0_STATUS
  244. LONG_L $31, PT_R31(sp)
  245. LONG_L $28, PT_R28(sp)
  246. LONG_L $25, PT_R25(sp)
  247. LONG_L $7, PT_R7(sp)
  248. LONG_L $6, PT_R6(sp)
  249. LONG_L $5, PT_R5(sp)
  250. LONG_L $4, PT_R4(sp)
  251. LONG_L $3, PT_R3(sp)
  252. LONG_L $2, PT_R2(sp)
  253. .set pop
  254. .endm
  255. .macro RESTORE_SP_AND_RET
  256. .set push
  257. .set noreorder
  258. LONG_L k0, PT_EPC(sp)
  259. LONG_L sp, PT_R29(sp)
  260. jr k0
  261. rfe
  262. .set pop
  263. .endm
  264. #else
  265. .macro RESTORE_SOME
  266. .set push
  267. .set reorder
  268. .set noat
  269. #ifdef CONFIG_MIPS_MT_SMTC
  270. .set mips32r2
  271. /*
  272. * This may not really be necessary if ints are already
  273. * inhibited here.
  274. */
  275. mfc0 v0, CP0_TCSTATUS
  276. ori v0, TCSTATUS_IXMT
  277. mtc0 v0, CP0_TCSTATUS
  278. _ehb
  279. DMT 5 # dmt a1
  280. jal mips_ihb
  281. #endif /* CONFIG_MIPS_MT_SMTC */
  282. mfc0 a0, CP0_STATUS
  283. ori a0, STATMASK
  284. xori a0, STATMASK
  285. mtc0 a0, CP0_STATUS
  286. li v1, 0xff00
  287. and a0, v1
  288. LONG_L v0, PT_STATUS(sp)
  289. nor v1, $0, v1
  290. and v0, v1
  291. or v0, a0
  292. mtc0 v0, CP0_STATUS
  293. #ifdef CONFIG_MIPS_MT_SMTC
  294. /*
  295. * Only after EXL/ERL have been restored to status can we
  296. * restore TCStatus.IXMT.
  297. */
  298. LONG_L v1, PT_TCSTATUS(sp)
  299. _ehb
  300. mfc0 v0, CP0_TCSTATUS
  301. andi v1, TCSTATUS_IXMT
  302. /* We know that TCStatua.IXMT should be set from above */
  303. xori v0, v0, TCSTATUS_IXMT
  304. or v0, v0, v1
  305. mtc0 v0, CP0_TCSTATUS
  306. _ehb
  307. andi a1, a1, VPECONTROL_TE
  308. beqz a1, 1f
  309. emt
  310. 1:
  311. .set mips0
  312. #endif /* CONFIG_MIPS_MT_SMTC */
  313. LONG_L v1, PT_EPC(sp)
  314. MTC0 v1, CP0_EPC
  315. LONG_L $31, PT_R31(sp)
  316. LONG_L $28, PT_R28(sp)
  317. LONG_L $25, PT_R25(sp)
  318. #ifdef CONFIG_64BIT
  319. LONG_L $8, PT_R8(sp)
  320. LONG_L $9, PT_R9(sp)
  321. #endif
  322. LONG_L $7, PT_R7(sp)
  323. LONG_L $6, PT_R6(sp)
  324. LONG_L $5, PT_R5(sp)
  325. LONG_L $4, PT_R4(sp)
  326. LONG_L $3, PT_R3(sp)
  327. LONG_L $2, PT_R2(sp)
  328. .set pop
  329. .endm
  330. .macro RESTORE_SP_AND_RET
  331. LONG_L sp, PT_R29(sp)
  332. .set mips3
  333. eret
  334. .set mips0
  335. .endm
  336. #endif
  337. .macro RESTORE_SP
  338. LONG_L sp, PT_R29(sp)
  339. .endm
  340. .macro RESTORE_ALL
  341. RESTORE_TEMP
  342. RESTORE_STATIC
  343. RESTORE_AT
  344. RESTORE_SOME
  345. RESTORE_SP
  346. .endm
  347. .macro RESTORE_ALL_AND_RET
  348. RESTORE_TEMP
  349. RESTORE_STATIC
  350. RESTORE_AT
  351. RESTORE_SOME
  352. RESTORE_SP_AND_RET
  353. .endm
  354. /*
  355. * Move to kernel mode and disable interrupts.
  356. * Set cp0 enable bit as sign that we're running on the kernel stack
  357. */
  358. .macro CLI
  359. #if !defined(CONFIG_MIPS_MT_SMTC)
  360. mfc0 t0, CP0_STATUS
  361. li t1, ST0_CU0 | STATMASK
  362. or t0, t1
  363. xori t0, STATMASK
  364. mtc0 t0, CP0_STATUS
  365. #else /* CONFIG_MIPS_MT_SMTC */
  366. /*
  367. * For SMTC, we need to set privilege
  368. * and disable interrupts only for the
  369. * current TC, using the TCStatus register.
  370. */
  371. mfc0 t0, CP0_TCSTATUS
  372. /* Fortunately CU 0 is in the same place in both registers */
  373. /* Set TCU0, TMX, TKSU (for later inversion) and IXMT */
  374. li t1, ST0_CU0 | 0x08001c00
  375. or t0, t1
  376. /* Clear TKSU, leave IXMT */
  377. xori t0, 0x00001800
  378. mtc0 t0, CP0_TCSTATUS
  379. _ehb
  380. /* We need to leave the global IE bit set, but clear EXL...*/
  381. mfc0 t0, CP0_STATUS
  382. ori t0, ST0_EXL | ST0_ERL
  383. xori t0, ST0_EXL | ST0_ERL
  384. mtc0 t0, CP0_STATUS
  385. #endif /* CONFIG_MIPS_MT_SMTC */
  386. irq_disable_hazard
  387. .endm
  388. /*
  389. * Move to kernel mode and enable interrupts.
  390. * Set cp0 enable bit as sign that we're running on the kernel stack
  391. */
  392. .macro STI
  393. #if !defined(CONFIG_MIPS_MT_SMTC)
  394. mfc0 t0, CP0_STATUS
  395. li t1, ST0_CU0 | STATMASK
  396. or t0, t1
  397. xori t0, STATMASK & ~1
  398. mtc0 t0, CP0_STATUS
  399. #else /* CONFIG_MIPS_MT_SMTC */
  400. /*
  401. * For SMTC, we need to set privilege
  402. * and enable interrupts only for the
  403. * current TC, using the TCStatus register.
  404. */
  405. _ehb
  406. mfc0 t0, CP0_TCSTATUS
  407. /* Fortunately CU 0 is in the same place in both registers */
  408. /* Set TCU0, TKSU (for later inversion) and IXMT */
  409. li t1, ST0_CU0 | 0x08001c00
  410. or t0, t1
  411. /* Clear TKSU *and* IXMT */
  412. xori t0, 0x00001c00
  413. mtc0 t0, CP0_TCSTATUS
  414. _ehb
  415. /* We need to leave the global IE bit set, but clear EXL...*/
  416. mfc0 t0, CP0_STATUS
  417. ori t0, ST0_EXL
  418. xori t0, ST0_EXL
  419. mtc0 t0, CP0_STATUS
  420. /* irq_enable_hazard below should expand to EHB for 24K/34K cpus */
  421. #endif /* CONFIG_MIPS_MT_SMTC */
  422. irq_enable_hazard
  423. .endm
  424. /*
  425. * Just move to kernel mode and leave interrupts as they are. Note
  426. * for the R3000 this means copying the previous enable from IEp.
  427. * Set cp0 enable bit as sign that we're running on the kernel stack
  428. */
  429. .macro KMODE
  430. #ifdef CONFIG_MIPS_MT_SMTC
  431. /*
  432. * This gets baroque in SMTC. We want to
  433. * protect the non-atomic clearing of EXL
  434. * with DMT/EMT, but we don't want to take
  435. * an interrupt while DMT is still in effect.
  436. */
  437. /* KMODE gets invoked from both reorder and noreorder code */
  438. .set push
  439. .set mips32r2
  440. .set noreorder
  441. mfc0 v0, CP0_TCSTATUS
  442. andi v1, v0, TCSTATUS_IXMT
  443. ori v0, TCSTATUS_IXMT
  444. mtc0 v0, CP0_TCSTATUS
  445. _ehb
  446. DMT 2 # dmt v0
  447. /*
  448. * We don't know a priori if ra is "live"
  449. */
  450. move t0, ra
  451. jal mips_ihb
  452. nop /* delay slot */
  453. move ra, t0
  454. #endif /* CONFIG_MIPS_MT_SMTC */
  455. mfc0 t0, CP0_STATUS
  456. li t1, ST0_CU0 | (STATMASK & ~1)
  457. #if defined(CONFIG_CPU_R3000) || defined(CONFIG_CPU_TX39XX)
  458. andi t2, t0, ST0_IEP
  459. srl t2, 2
  460. or t0, t2
  461. #endif
  462. or t0, t1
  463. xori t0, STATMASK & ~1
  464. mtc0 t0, CP0_STATUS
  465. #ifdef CONFIG_MIPS_MT_SMTC
  466. _ehb
  467. andi v0, v0, VPECONTROL_TE
  468. beqz v0, 2f
  469. nop /* delay slot */
  470. emt
  471. 2:
  472. mfc0 v0, CP0_TCSTATUS
  473. /* Clear IXMT, then OR in previous value */
  474. ori v0, TCSTATUS_IXMT
  475. xori v0, TCSTATUS_IXMT
  476. or v0, v1, v0
  477. mtc0 v0, CP0_TCSTATUS
  478. /*
  479. * irq_disable_hazard below should expand to EHB
  480. * on 24K/34K CPUS
  481. */
  482. .set pop
  483. #endif /* CONFIG_MIPS_MT_SMTC */
  484. irq_disable_hazard
  485. .endm
  486. #endif /* _ASM_STACKFRAME_H */