stackframe.h 10 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511
  1. /*
  2. * This file is subject to the terms and conditions of the GNU General Public
  3. * License. See the file "COPYING" in the main directory of this archive
  4. * for more details.
  5. *
  6. * Copyright (C) 1994, 95, 96, 99, 2001 Ralf Baechle
  7. * Copyright (C) 1994, 1995, 1996 Paul M. Antoine.
  8. * Copyright (C) 1999 Silicon Graphics, Inc.
  9. */
  10. #ifndef _ASM_STACKFRAME_H
  11. #define _ASM_STACKFRAME_H
  12. #include <linux/threads.h>
  13. #include <asm/asm.h>
  14. #include <asm/asmmacro.h>
  15. #include <asm/mipsregs.h>
  16. #include <asm/asm-offsets.h>
  17. #ifdef CONFIG_MIPS_MT_SMTC
  18. #include <asm/mipsmtregs.h>
  19. #endif /* CONFIG_MIPS_MT_SMTC */
  20. .macro SAVE_AT
  21. .set push
  22. .set noat
  23. LONG_S $1, PT_R1(sp)
  24. .set pop
  25. .endm
  26. .macro SAVE_TEMP
  27. #ifdef CONFIG_CPU_HAS_SMARTMIPS
  28. mflhxu v1
  29. LONG_S v1, PT_LO(sp)
  30. mflhxu v1
  31. LONG_S v1, PT_HI(sp)
  32. mflhxu v1
  33. LONG_S v1, PT_ACX(sp)
  34. #else
  35. mfhi v1
  36. LONG_S v1, PT_HI(sp)
  37. mflo v1
  38. LONG_S v1, PT_LO(sp)
  39. #endif
  40. #ifdef CONFIG_32BIT
  41. LONG_S $8, PT_R8(sp)
  42. LONG_S $9, PT_R9(sp)
  43. #endif
  44. LONG_S $10, PT_R10(sp)
  45. LONG_S $11, PT_R11(sp)
  46. LONG_S $12, PT_R12(sp)
  47. LONG_S $13, PT_R13(sp)
  48. LONG_S $14, PT_R14(sp)
  49. LONG_S $15, PT_R15(sp)
  50. LONG_S $24, PT_R24(sp)
  51. .endm
  52. .macro SAVE_STATIC
  53. LONG_S $16, PT_R16(sp)
  54. LONG_S $17, PT_R17(sp)
  55. LONG_S $18, PT_R18(sp)
  56. LONG_S $19, PT_R19(sp)
  57. LONG_S $20, PT_R20(sp)
  58. LONG_S $21, PT_R21(sp)
  59. LONG_S $22, PT_R22(sp)
  60. LONG_S $23, PT_R23(sp)
  61. LONG_S $30, PT_R30(sp)
  62. .endm
  63. #ifdef CONFIG_SMP
  64. #ifdef CONFIG_MIPS_MT_SMTC
  65. #define PTEBASE_SHIFT 19 /* TCBIND */
  66. #else
  67. #define PTEBASE_SHIFT 23 /* CONTEXT */
  68. #endif
  69. .macro get_saved_sp /* SMP variation */
  70. #ifdef CONFIG_MIPS_MT_SMTC
  71. mfc0 k0, CP0_TCBIND
  72. #else
  73. MFC0 k0, CP0_CONTEXT
  74. #endif
  75. #if defined(CONFIG_BUILD_ELF64) || (defined(CONFIG_64BIT) && __GNUC__ < 4)
  76. lui k1, %highest(kernelsp)
  77. daddiu k1, %higher(kernelsp)
  78. dsll k1, 16
  79. daddiu k1, %hi(kernelsp)
  80. dsll k1, 16
  81. #else
  82. lui k1, %hi(kernelsp)
  83. #endif
  84. LONG_SRL k0, PTEBASE_SHIFT
  85. LONG_ADDU k1, k0
  86. LONG_L k1, %lo(kernelsp)(k1)
  87. .endm
  88. .macro set_saved_sp stackp temp temp2
  89. #ifdef CONFIG_MIPS_MT_SMTC
  90. mfc0 \temp, CP0_TCBIND
  91. #else
  92. MFC0 \temp, CP0_CONTEXT
  93. #endif
  94. LONG_SRL \temp, PTEBASE_SHIFT
  95. LONG_S \stackp, kernelsp(\temp)
  96. .endm
  97. #else
  98. .macro get_saved_sp /* Uniprocessor variation */
  99. #if defined(CONFIG_BUILD_ELF64) || (defined(CONFIG_64BIT) && __GNUC__ < 4)
  100. lui k1, %highest(kernelsp)
  101. daddiu k1, %higher(kernelsp)
  102. dsll k1, k1, 16
  103. daddiu k1, %hi(kernelsp)
  104. dsll k1, k1, 16
  105. #else
  106. lui k1, %hi(kernelsp)
  107. #endif
  108. LONG_L k1, %lo(kernelsp)(k1)
  109. .endm
  110. .macro set_saved_sp stackp temp temp2
  111. LONG_S \stackp, kernelsp
  112. .endm
  113. #endif
  114. .macro SAVE_SOME
  115. .set push
  116. .set noat
  117. .set reorder
  118. mfc0 k0, CP0_STATUS
  119. sll k0, 3 /* extract cu0 bit */
  120. .set noreorder
  121. bltz k0, 8f
  122. move k1, sp
  123. .set reorder
  124. /* Called from user mode, new stack. */
  125. get_saved_sp
  126. 8: move k0, sp
  127. PTR_SUBU sp, k1, PT_SIZE
  128. LONG_S k0, PT_R29(sp)
  129. LONG_S $3, PT_R3(sp)
  130. /*
  131. * You might think that you don't need to save $0,
  132. * but the FPU emulator and gdb remote debug stub
  133. * need it to operate correctly
  134. */
  135. LONG_S $0, PT_R0(sp)
  136. mfc0 v1, CP0_STATUS
  137. LONG_S $2, PT_R2(sp)
  138. LONG_S v1, PT_STATUS(sp)
  139. #ifdef CONFIG_MIPS_MT_SMTC
  140. /*
  141. * Ideally, these instructions would be shuffled in
  142. * to cover the pipeline delay.
  143. */
  144. .set mips32
  145. mfc0 v1, CP0_TCSTATUS
  146. .set mips0
  147. LONG_S v1, PT_TCSTATUS(sp)
  148. #endif /* CONFIG_MIPS_MT_SMTC */
  149. LONG_S $4, PT_R4(sp)
  150. mfc0 v1, CP0_CAUSE
  151. LONG_S $5, PT_R5(sp)
  152. LONG_S v1, PT_CAUSE(sp)
  153. LONG_S $6, PT_R6(sp)
  154. MFC0 v1, CP0_EPC
  155. LONG_S $7, PT_R7(sp)
  156. #ifdef CONFIG_64BIT
  157. LONG_S $8, PT_R8(sp)
  158. LONG_S $9, PT_R9(sp)
  159. #endif
  160. LONG_S v1, PT_EPC(sp)
  161. LONG_S $25, PT_R25(sp)
  162. LONG_S $28, PT_R28(sp)
  163. LONG_S $31, PT_R31(sp)
  164. ori $28, sp, _THREAD_MASK
  165. xori $28, _THREAD_MASK
  166. .set pop
  167. .endm
  168. .macro SAVE_ALL
  169. SAVE_SOME
  170. SAVE_AT
  171. SAVE_TEMP
  172. SAVE_STATIC
  173. .endm
  174. .macro RESTORE_AT
  175. .set push
  176. .set noat
  177. LONG_L $1, PT_R1(sp)
  178. .set pop
  179. .endm
  180. .macro RESTORE_TEMP
  181. #ifdef CONFIG_CPU_HAS_SMARTMIPS
  182. LONG_L $24, PT_ACX(sp)
  183. mtlhx $24
  184. LONG_L $24, PT_HI(sp)
  185. mtlhx $24
  186. LONG_L $24, PT_LO(sp)
  187. mtlhx $24
  188. #else
  189. LONG_L $24, PT_LO(sp)
  190. mtlo $24
  191. LONG_L $24, PT_HI(sp)
  192. mthi $24
  193. #endif
  194. #ifdef CONFIG_32BIT
  195. LONG_L $8, PT_R8(sp)
  196. LONG_L $9, PT_R9(sp)
  197. #endif
  198. LONG_L $10, PT_R10(sp)
  199. LONG_L $11, PT_R11(sp)
  200. LONG_L $12, PT_R12(sp)
  201. LONG_L $13, PT_R13(sp)
  202. LONG_L $14, PT_R14(sp)
  203. LONG_L $15, PT_R15(sp)
  204. LONG_L $24, PT_R24(sp)
  205. .endm
  206. .macro RESTORE_STATIC
  207. LONG_L $16, PT_R16(sp)
  208. LONG_L $17, PT_R17(sp)
  209. LONG_L $18, PT_R18(sp)
  210. LONG_L $19, PT_R19(sp)
  211. LONG_L $20, PT_R20(sp)
  212. LONG_L $21, PT_R21(sp)
  213. LONG_L $22, PT_R22(sp)
  214. LONG_L $23, PT_R23(sp)
  215. LONG_L $30, PT_R30(sp)
  216. .endm
  217. #if defined(CONFIG_CPU_R3000) || defined(CONFIG_CPU_TX39XX)
  218. .macro RESTORE_SOME
  219. .set push
  220. .set reorder
  221. .set noat
  222. mfc0 a0, CP0_STATUS
  223. ori a0, 0x1f
  224. xori a0, 0x1f
  225. mtc0 a0, CP0_STATUS
  226. li v1, 0xff00
  227. and a0, v1
  228. LONG_L v0, PT_STATUS(sp)
  229. nor v1, $0, v1
  230. and v0, v1
  231. or v0, a0
  232. mtc0 v0, CP0_STATUS
  233. LONG_L $31, PT_R31(sp)
  234. LONG_L $28, PT_R28(sp)
  235. LONG_L $25, PT_R25(sp)
  236. #ifdef CONFIG_64BIT
  237. LONG_L $8, PT_R8(sp)
  238. LONG_L $9, PT_R9(sp)
  239. #endif
  240. LONG_L $7, PT_R7(sp)
  241. LONG_L $6, PT_R6(sp)
  242. LONG_L $5, PT_R5(sp)
  243. LONG_L $4, PT_R4(sp)
  244. LONG_L $3, PT_R3(sp)
  245. LONG_L $2, PT_R2(sp)
  246. .set pop
  247. .endm
  248. .macro RESTORE_SP_AND_RET
  249. .set push
  250. .set noreorder
  251. LONG_L k0, PT_EPC(sp)
  252. LONG_L sp, PT_R29(sp)
  253. jr k0
  254. rfe
  255. .set pop
  256. .endm
  257. #else
  258. /*
  259. * For SMTC kernel, global IE should be left set, and interrupts
  260. * controlled exclusively via IXMT.
  261. */
  262. #ifdef CONFIG_MIPS_MT_SMTC
  263. #define STATMASK 0x1e
  264. #else
  265. #define STATMASK 0x1f
  266. #endif
  267. .macro RESTORE_SOME
  268. .set push
  269. .set reorder
  270. .set noat
  271. #ifdef CONFIG_MIPS_MT_SMTC
  272. .set mips32r2
  273. /*
  274. * This may not really be necessary if ints are already
  275. * inhibited here.
  276. */
  277. mfc0 v0, CP0_TCSTATUS
  278. ori v0, TCSTATUS_IXMT
  279. mtc0 v0, CP0_TCSTATUS
  280. _ehb
  281. DMT 5 # dmt a1
  282. jal mips_ihb
  283. #endif /* CONFIG_MIPS_MT_SMTC */
  284. mfc0 a0, CP0_STATUS
  285. ori a0, STATMASK
  286. xori a0, STATMASK
  287. mtc0 a0, CP0_STATUS
  288. li v1, 0xff00
  289. and a0, v1
  290. LONG_L v0, PT_STATUS(sp)
  291. nor v1, $0, v1
  292. and v0, v1
  293. or v0, a0
  294. mtc0 v0, CP0_STATUS
  295. #ifdef CONFIG_MIPS_MT_SMTC
  296. /*
  297. * Only after EXL/ERL have been restored to status can we
  298. * restore TCStatus.IXMT.
  299. */
  300. LONG_L v1, PT_TCSTATUS(sp)
  301. _ehb
  302. mfc0 v0, CP0_TCSTATUS
  303. andi v1, TCSTATUS_IXMT
  304. /* We know that TCStatua.IXMT should be set from above */
  305. xori v0, v0, TCSTATUS_IXMT
  306. or v0, v0, v1
  307. mtc0 v0, CP0_TCSTATUS
  308. _ehb
  309. andi a1, a1, VPECONTROL_TE
  310. beqz a1, 1f
  311. emt
  312. 1:
  313. .set mips0
  314. #endif /* CONFIG_MIPS_MT_SMTC */
  315. LONG_L v1, PT_EPC(sp)
  316. MTC0 v1, CP0_EPC
  317. LONG_L $31, PT_R31(sp)
  318. LONG_L $28, PT_R28(sp)
  319. LONG_L $25, PT_R25(sp)
  320. #ifdef CONFIG_64BIT
  321. LONG_L $8, PT_R8(sp)
  322. LONG_L $9, PT_R9(sp)
  323. #endif
  324. LONG_L $7, PT_R7(sp)
  325. LONG_L $6, PT_R6(sp)
  326. LONG_L $5, PT_R5(sp)
  327. LONG_L $4, PT_R4(sp)
  328. LONG_L $3, PT_R3(sp)
  329. LONG_L $2, PT_R2(sp)
  330. .set pop
  331. .endm
  332. .macro RESTORE_SP_AND_RET
  333. LONG_L sp, PT_R29(sp)
  334. .set mips3
  335. eret
  336. .set mips0
  337. .endm
  338. #endif
  339. .macro RESTORE_SP
  340. LONG_L sp, PT_R29(sp)
  341. .endm
  342. .macro RESTORE_ALL
  343. RESTORE_TEMP
  344. RESTORE_STATIC
  345. RESTORE_AT
  346. RESTORE_SOME
  347. RESTORE_SP
  348. .endm
  349. .macro RESTORE_ALL_AND_RET
  350. RESTORE_TEMP
  351. RESTORE_STATIC
  352. RESTORE_AT
  353. RESTORE_SOME
  354. RESTORE_SP_AND_RET
  355. .endm
  356. /*
  357. * Move to kernel mode and disable interrupts.
  358. * Set cp0 enable bit as sign that we're running on the kernel stack
  359. */
  360. .macro CLI
  361. #if !defined(CONFIG_MIPS_MT_SMTC)
  362. mfc0 t0, CP0_STATUS
  363. li t1, ST0_CU0 | 0x1f
  364. or t0, t1
  365. xori t0, 0x1f
  366. mtc0 t0, CP0_STATUS
  367. #else /* CONFIG_MIPS_MT_SMTC */
  368. /*
  369. * For SMTC, we need to set privilege
  370. * and disable interrupts only for the
  371. * current TC, using the TCStatus register.
  372. */
  373. mfc0 t0,CP0_TCSTATUS
  374. /* Fortunately CU 0 is in the same place in both registers */
  375. /* Set TCU0, TMX, TKSU (for later inversion) and IXMT */
  376. li t1, ST0_CU0 | 0x08001c00
  377. or t0,t1
  378. /* Clear TKSU, leave IXMT */
  379. xori t0, 0x00001800
  380. mtc0 t0, CP0_TCSTATUS
  381. _ehb
  382. /* We need to leave the global IE bit set, but clear EXL...*/
  383. mfc0 t0, CP0_STATUS
  384. ori t0, ST0_EXL | ST0_ERL
  385. xori t0, ST0_EXL | ST0_ERL
  386. mtc0 t0, CP0_STATUS
  387. #endif /* CONFIG_MIPS_MT_SMTC */
  388. irq_disable_hazard
  389. .endm
  390. /*
  391. * Move to kernel mode and enable interrupts.
  392. * Set cp0 enable bit as sign that we're running on the kernel stack
  393. */
  394. .macro STI
  395. #if !defined(CONFIG_MIPS_MT_SMTC)
  396. mfc0 t0, CP0_STATUS
  397. li t1, ST0_CU0 | 0x1f
  398. or t0, t1
  399. xori t0, 0x1e
  400. mtc0 t0, CP0_STATUS
  401. #else /* CONFIG_MIPS_MT_SMTC */
  402. /*
  403. * For SMTC, we need to set privilege
  404. * and enable interrupts only for the
  405. * current TC, using the TCStatus register.
  406. */
  407. _ehb
  408. mfc0 t0,CP0_TCSTATUS
  409. /* Fortunately CU 0 is in the same place in both registers */
  410. /* Set TCU0, TKSU (for later inversion) and IXMT */
  411. li t1, ST0_CU0 | 0x08001c00
  412. or t0,t1
  413. /* Clear TKSU *and* IXMT */
  414. xori t0, 0x00001c00
  415. mtc0 t0, CP0_TCSTATUS
  416. _ehb
  417. /* We need to leave the global IE bit set, but clear EXL...*/
  418. mfc0 t0, CP0_STATUS
  419. ori t0, ST0_EXL
  420. xori t0, ST0_EXL
  421. mtc0 t0, CP0_STATUS
  422. /* irq_enable_hazard below should expand to EHB for 24K/34K cpus */
  423. #endif /* CONFIG_MIPS_MT_SMTC */
  424. irq_enable_hazard
  425. .endm
  426. /*
  427. * Just move to kernel mode and leave interrupts as they are.
  428. * Set cp0 enable bit as sign that we're running on the kernel stack
  429. */
  430. .macro KMODE
  431. #ifdef CONFIG_MIPS_MT_SMTC
  432. /*
  433. * This gets baroque in SMTC. We want to
  434. * protect the non-atomic clearing of EXL
  435. * with DMT/EMT, but we don't want to take
  436. * an interrupt while DMT is still in effect.
  437. */
  438. /* KMODE gets invoked from both reorder and noreorder code */
  439. .set push
  440. .set mips32r2
  441. .set noreorder
  442. mfc0 v0, CP0_TCSTATUS
  443. andi v1, v0, TCSTATUS_IXMT
  444. ori v0, TCSTATUS_IXMT
  445. mtc0 v0, CP0_TCSTATUS
  446. _ehb
  447. DMT 2 # dmt v0
  448. /*
  449. * We don't know a priori if ra is "live"
  450. */
  451. move t0, ra
  452. jal mips_ihb
  453. nop /* delay slot */
  454. move ra, t0
  455. #endif /* CONFIG_MIPS_MT_SMTC */
  456. mfc0 t0, CP0_STATUS
  457. li t1, ST0_CU0 | 0x1e
  458. or t0, t1
  459. xori t0, 0x1e
  460. mtc0 t0, CP0_STATUS
  461. #ifdef CONFIG_MIPS_MT_SMTC
  462. _ehb
  463. andi v0, v0, VPECONTROL_TE
  464. beqz v0, 2f
  465. nop /* delay slot */
  466. emt
  467. 2:
  468. mfc0 v0, CP0_TCSTATUS
  469. /* Clear IXMT, then OR in previous value */
  470. ori v0, TCSTATUS_IXMT
  471. xori v0, TCSTATUS_IXMT
  472. or v0, v1, v0
  473. mtc0 v0, CP0_TCSTATUS
  474. /*
  475. * irq_disable_hazard below should expand to EHB
  476. * on 24K/34K CPUS
  477. */
  478. .set pop
  479. #endif /* CONFIG_MIPS_MT_SMTC */
  480. irq_disable_hazard
  481. .endm
  482. #endif /* _ASM_STACKFRAME_H */