break.S 20 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788
  1. /* break.S: Break interrupt handling (kept separate from entry.S)
  2. *
  3. * Copyright (C) 2003 Red Hat, Inc. All Rights Reserved.
  4. * Written by David Howells (dhowells@redhat.com)
  5. *
  6. * This program is free software; you can redistribute it and/or
  7. * modify it under the terms of the GNU General Public License
  8. * as published by the Free Software Foundation; either version
  9. * 2 of the License, or (at your option) any later version.
  10. */
  11. #include <linux/sys.h>
  12. #include <linux/linkage.h>
  13. #include <asm/setup.h>
  14. #include <asm/segment.h>
  15. #include <asm/ptrace.h>
  16. #include <asm/spr-regs.h>
  17. #include <asm/errno.h>
  18. #
  19. # the break handler has its own stack
  20. #
  21. .section .bss.stack
  22. .globl __break_user_context
  23. .balign 8192
  24. __break_stack:
  25. .space (8192 - (USER_CONTEXT_SIZE + REG__DEBUG_XTRA)) & ~7
  26. __break_stack_tos:
  27. .space REG__DEBUG_XTRA
  28. __break_user_context:
  29. .space USER_CONTEXT_SIZE
  30. #
  31. # miscellaneous variables
  32. #
  33. .section .bss
  34. #ifdef CONFIG_MMU
  35. .globl __break_tlb_miss_real_return_info
  36. __break_tlb_miss_real_return_info:
  37. .balign 8
  38. .space 2*4 /* saved PCSR, PSR for TLB-miss handler fixup */
  39. #endif
  40. __break_trace_through_exceptions:
  41. .space 4
  42. #define CS2_ECS1 0xe1200000
  43. #define CS2_USERLED 0x4
  44. .macro LEDS val,reg
  45. # sethi.p %hi(CS2_ECS1+CS2_USERLED),gr30
  46. # setlo %lo(CS2_ECS1+CS2_USERLED),gr30
  47. # setlos #~\val,\reg
  48. # st \reg,@(gr30,gr0)
  49. # setlos #0x5555,\reg
  50. # sethi.p %hi(0xffc00100),gr30
  51. # setlo %lo(0xffc00100),gr30
  52. # sth \reg,@(gr30,gr0)
  53. # membar
  54. .endm
  55. ###############################################################################
  56. #
  57. # entry point for Break Exceptions/Interrupts
  58. #
  59. ###############################################################################
  60. .text
  61. .balign 4
  62. .globl __entry_break
  63. __entry_break:
  64. #ifdef CONFIG_MMU
  65. movgs gr31,scr3
  66. #endif
  67. LEDS 0x1001,gr31
  68. sethi.p %hi(__break_user_context),gr31
  69. setlo %lo(__break_user_context),gr31
  70. stdi gr2,@(gr31,#REG_GR(2))
  71. movsg ccr,gr3
  72. sti gr3,@(gr31,#REG_CCR)
  73. # catch the return from a TLB-miss handler that had single-step disabled
  74. # traps will be enabled, so we have to do this now
  75. #ifdef CONFIG_MMU
  76. movsg bpcsr,gr3
  77. sethi.p %hi(__break_tlb_miss_return_breaks_here),gr2
  78. setlo %lo(__break_tlb_miss_return_breaks_here),gr2
  79. subcc gr2,gr3,gr0,icc0
  80. beq icc0,#2,__break_return_singlestep_tlbmiss
  81. #endif
  82. # determine whether we have stepped through into an exception
  83. # - we need to take special action to suspend h/w single stepping if we've done
  84. # that, so that the gdbstub doesn't get bogged down endlessly stepping through
  85. # external interrupt handling
  86. movsg bpsr,gr3
  87. andicc gr3,#BPSR_BET,gr0,icc0
  88. bne icc0,#2,__break_maybe_userspace /* jump if PSR.ET was 1 */
  89. LEDS 0x1003,gr2
  90. movsg brr,gr3
  91. andicc gr3,#BRR_ST,gr0,icc0
  92. andicc.p gr3,#BRR_SB,gr0,icc1
  93. bne icc0,#2,__break_step /* jump if single-step caused break */
  94. beq icc1,#2,__break_continue /* jump if BREAK didn't cause break */
  95. LEDS 0x1007,gr2
  96. # handle special breaks
  97. movsg bpcsr,gr3
  98. sethi.p %hi(__entry_return_singlestep_breaks_here),gr2
  99. setlo %lo(__entry_return_singlestep_breaks_here),gr2
  100. subcc gr2,gr3,gr0,icc0
  101. beq icc0,#2,__break_return_singlestep
  102. bra __break_continue
  103. ###############################################################################
  104. #
  105. # handle BREAK instruction in kernel-mode exception epilogue
  106. #
  107. ###############################################################################
  108. __break_return_singlestep:
  109. LEDS 0x100f,gr2
  110. # special break insn requests single-stepping to be turned back on
  111. # HERE RETT
  112. # PSR.ET 0 0
  113. # PSR.PS old PSR.S ?
  114. # PSR.S 1 1
  115. # BPSR.ET 0 1 (can't have caused orig excep otherwise)
  116. # BPSR.BS 1 old PSR.S
  117. movsg dcr,gr2
  118. sethi.p %hi(DCR_SE),gr3
  119. setlo %lo(DCR_SE),gr3
  120. or gr2,gr3,gr2
  121. movgs gr2,dcr
  122. movsg psr,gr2
  123. andi gr2,#PSR_PS,gr2
  124. slli gr2,#11,gr2 /* PSR.PS -> BPSR.BS */
  125. ori gr2,#BPSR_BET,gr2 /* 1 -> BPSR.BET */
  126. movgs gr2,bpsr
  127. # return to the invoker of the original kernel exception
  128. movsg pcsr,gr2
  129. movgs gr2,bpcsr
  130. LEDS 0x101f,gr2
  131. ldi @(gr31,#REG_CCR),gr3
  132. movgs gr3,ccr
  133. lddi.p @(gr31,#REG_GR(2)),gr2
  134. xor gr31,gr31,gr31
  135. movgs gr0,brr
  136. #ifdef CONFIG_MMU
  137. movsg scr3,gr31
  138. #endif
  139. rett #1
  140. ###############################################################################
  141. #
  142. # handle BREAK instruction in TLB-miss handler return path
  143. #
  144. ###############################################################################
  145. #ifdef CONFIG_MMU
  146. __break_return_singlestep_tlbmiss:
  147. LEDS 0x1100,gr2
  148. sethi.p %hi(__break_tlb_miss_real_return_info),gr3
  149. setlo %lo(__break_tlb_miss_real_return_info),gr3
  150. lddi @(gr3,#0),gr2
  151. movgs gr2,pcsr
  152. movgs gr3,psr
  153. bra __break_return_singlestep
  154. #endif
  155. ###############################################################################
  156. #
  157. # handle single stepping into an exception prologue from kernel mode
  158. # - we try and catch it whilst it is still in the main vector table
  159. # - if we catch it there, we have to jump to the fixup handler
  160. # - there is a fixup table that has a pointer for every 16b slot in the trap
  161. # table
  162. #
  163. ###############################################################################
  164. __break_step:
  165. LEDS 0x2003,gr2
  166. # external interrupts seem to escape from the trap table before single
  167. # step catches up with them
  168. movsg bpcsr,gr2
  169. sethi.p %hi(__entry_kernel_external_interrupt),gr3
  170. setlo %lo(__entry_kernel_external_interrupt),gr3
  171. subcc.p gr2,gr3,gr0,icc0
  172. sethi %hi(__entry_uspace_external_interrupt),gr3
  173. setlo.p %lo(__entry_uspace_external_interrupt),gr3
  174. beq icc0,#2,__break_step_kernel_external_interrupt
  175. subcc.p gr2,gr3,gr0,icc0
  176. sethi %hi(__entry_kernel_external_interrupt_virtually_disabled),gr3
  177. setlo.p %lo(__entry_kernel_external_interrupt_virtually_disabled),gr3
  178. beq icc0,#2,__break_step_uspace_external_interrupt
  179. subcc.p gr2,gr3,gr0,icc0
  180. sethi %hi(__entry_kernel_external_interrupt_virtual_reenable),gr3
  181. setlo.p %lo(__entry_kernel_external_interrupt_virtual_reenable),gr3
  182. beq icc0,#2,__break_step_kernel_external_interrupt_virtually_disabled
  183. subcc gr2,gr3,gr0,icc0
  184. beq icc0,#2,__break_step_kernel_external_interrupt_virtual_reenable
  185. LEDS 0x2007,gr2
  186. # the two main vector tables are adjacent on one 8Kb slab
  187. movsg bpcsr,gr2
  188. setlos #0xffffe000,gr3
  189. and gr2,gr3,gr2
  190. sethi.p %hi(__trap_tables),gr3
  191. setlo %lo(__trap_tables),gr3
  192. subcc gr2,gr3,gr0,icc0
  193. bne icc0,#2,__break_continue
  194. LEDS 0x200f,gr2
  195. # skip workaround if so requested by GDB
  196. sethi.p %hi(__break_trace_through_exceptions),gr3
  197. setlo %lo(__break_trace_through_exceptions),gr3
  198. ld @(gr3,gr0),gr3
  199. subcc gr3,gr0,gr0,icc0
  200. bne icc0,#0,__break_continue
  201. LEDS 0x201f,gr2
  202. # access the fixup table - there's a 1:1 mapping between the slots in the trap tables and
  203. # the slots in the trap fixup tables allowing us to simply divide the offset into the
  204. # former by 4 to access the latter
  205. sethi.p %hi(__trap_tables),gr3
  206. setlo %lo(__trap_tables),gr3
  207. movsg bpcsr,gr2
  208. sub gr2,gr3,gr2
  209. srli.p gr2,#2,gr2
  210. sethi %hi(__trap_fixup_tables),gr3
  211. setlo.p %lo(__trap_fixup_tables),gr3
  212. andi gr2,#~3,gr2
  213. ld @(gr2,gr3),gr2
  214. jmpil @(gr2,#0)
  215. # step through an internal exception from kernel mode
  216. .globl __break_step_kernel_softprog_interrupt
  217. __break_step_kernel_softprog_interrupt:
  218. sethi.p %hi(__entry_kernel_softprog_interrupt_reentry),gr3
  219. setlo %lo(__entry_kernel_softprog_interrupt_reentry),gr3
  220. bra __break_return_as_kernel_prologue
  221. # step through an external interrupt from kernel mode
  222. .globl __break_step_kernel_external_interrupt
  223. __break_step_kernel_external_interrupt:
  224. # deal with virtual interrupt disablement
  225. beq icc2,#0,__break_step_kernel_external_interrupt_virtually_disabled
  226. sethi.p %hi(__entry_kernel_external_interrupt_reentry),gr3
  227. setlo %lo(__entry_kernel_external_interrupt_reentry),gr3
  228. __break_return_as_kernel_prologue:
  229. LEDS 0x203f,gr2
  230. movgs gr3,bpcsr
  231. # do the bit we had to skip
  232. #ifdef CONFIG_MMU
  233. movsg ear0,gr2 /* EAR0 can get clobbered by gdb-stub (ICI/ICEI) */
  234. movgs gr2,scr2
  235. #endif
  236. or.p sp,gr0,gr2 /* set up the stack pointer */
  237. subi sp,#REG__END,sp
  238. sti.p gr2,@(sp,#REG_SP)
  239. setlos #REG__STATUS_STEP,gr2
  240. sti gr2,@(sp,#REG__STATUS) /* record single step status */
  241. # cancel single-stepping mode
  242. movsg dcr,gr2
  243. sethi.p %hi(~DCR_SE),gr3
  244. setlo %lo(~DCR_SE),gr3
  245. and gr2,gr3,gr2
  246. movgs gr2,dcr
  247. LEDS 0x207f,gr2
  248. ldi @(gr31,#REG_CCR),gr3
  249. movgs gr3,ccr
  250. lddi.p @(gr31,#REG_GR(2)),gr2
  251. xor gr31,gr31,gr31
  252. movgs gr0,brr
  253. #ifdef CONFIG_MMU
  254. movsg scr3,gr31
  255. #endif
  256. rett #1
  257. # we single-stepped into an interrupt handler whilst interrupts were merely virtually disabled
  258. # need to really disable interrupts, set flag, fix up and return
  259. __break_step_kernel_external_interrupt_virtually_disabled:
  260. movsg psr,gr2
  261. andi gr2,#~PSR_PIL,gr2
  262. ori gr2,#PSR_PIL_14,gr2 /* debugging interrupts only */
  263. movgs gr2,psr
  264. ldi @(gr31,#REG_CCR),gr3
  265. movgs gr3,ccr
  266. subcc.p gr0,gr0,gr0,icc2 /* leave Z set, clear C */
  267. # exceptions must've been enabled and we must've been in supervisor mode
  268. setlos BPSR_BET|BPSR_BS,gr3
  269. movgs gr3,bpsr
  270. # return to where the interrupt happened
  271. movsg pcsr,gr2
  272. movgs gr2,bpcsr
  273. lddi.p @(gr31,#REG_GR(2)),gr2
  274. xor gr31,gr31,gr31
  275. movgs gr0,brr
  276. #ifdef CONFIG_MMU
  277. movsg scr3,gr31
  278. #endif
  279. rett #1
  280. # we stepped through into the virtual interrupt reenablement trap
  281. #
  282. # we also want to single step anyway, but after fixing up so that we get an event on the
  283. # instruction after the broken-into exception returns
  284. .globl __break_step_kernel_external_interrupt_virtual_reenable
  285. __break_step_kernel_external_interrupt_virtual_reenable:
  286. movsg psr,gr2
  287. andi gr2,#~PSR_PIL,gr2
  288. movgs gr2,psr
  289. ldi @(gr31,#REG_CCR),gr3
  290. movgs gr3,ccr
  291. subicc gr0,#1,gr0,icc2 /* clear Z, set C */
  292. # save the adjusted ICC2
  293. movsg ccr,gr3
  294. sti gr3,@(gr31,#REG_CCR)
  295. # exceptions must've been enabled and we must've been in supervisor mode
  296. setlos BPSR_BET|BPSR_BS,gr3
  297. movgs gr3,bpsr
  298. # return to where the trap happened
  299. movsg pcsr,gr2
  300. movgs gr2,bpcsr
  301. # and then process the single step
  302. bra __break_continue
  303. # step through an internal exception from uspace mode
  304. .globl __break_step_uspace_softprog_interrupt
  305. __break_step_uspace_softprog_interrupt:
  306. sethi.p %hi(__entry_uspace_softprog_interrupt_reentry),gr3
  307. setlo %lo(__entry_uspace_softprog_interrupt_reentry),gr3
  308. bra __break_return_as_uspace_prologue
  309. # step through an external interrupt from kernel mode
  310. .globl __break_step_uspace_external_interrupt
  311. __break_step_uspace_external_interrupt:
  312. sethi.p %hi(__entry_uspace_external_interrupt_reentry),gr3
  313. setlo %lo(__entry_uspace_external_interrupt_reentry),gr3
  314. __break_return_as_uspace_prologue:
  315. LEDS 0x20ff,gr2
  316. movgs gr3,bpcsr
  317. # do the bit we had to skip
  318. sethi.p %hi(__kernel_frame0_ptr),gr28
  319. setlo %lo(__kernel_frame0_ptr),gr28
  320. ldi.p @(gr28,#0),gr28
  321. setlos #REG__STATUS_STEP,gr2
  322. sti gr2,@(gr28,#REG__STATUS) /* record single step status */
  323. # cancel single-stepping mode
  324. movsg dcr,gr2
  325. sethi.p %hi(~DCR_SE),gr3
  326. setlo %lo(~DCR_SE),gr3
  327. and gr2,gr3,gr2
  328. movgs gr2,dcr
  329. LEDS 0x20fe,gr2
  330. ldi @(gr31,#REG_CCR),gr3
  331. movgs gr3,ccr
  332. lddi.p @(gr31,#REG_GR(2)),gr2
  333. xor gr31,gr31,gr31
  334. movgs gr0,brr
  335. #ifdef CONFIG_MMU
  336. movsg scr3,gr31
  337. #endif
  338. rett #1
  339. #ifdef CONFIG_MMU
  340. # step through an ITLB-miss handler from user mode
  341. .globl __break_user_insn_tlb_miss
  342. __break_user_insn_tlb_miss:
  343. # we'll want to try the trap stub again
  344. sethi.p %hi(__trap_user_insn_tlb_miss),gr2
  345. setlo %lo(__trap_user_insn_tlb_miss),gr2
  346. movgs gr2,bpcsr
  347. __break_tlb_miss_common:
  348. LEDS 0x2101,gr2
  349. # cancel single-stepping mode
  350. movsg dcr,gr2
  351. sethi.p %hi(~DCR_SE),gr3
  352. setlo %lo(~DCR_SE),gr3
  353. and gr2,gr3,gr2
  354. movgs gr2,dcr
  355. # we'll swap the real return address for one with a BREAK insn so that we can re-enable
  356. # single stepping on return
  357. movsg pcsr,gr2
  358. sethi.p %hi(__break_tlb_miss_real_return_info),gr3
  359. setlo %lo(__break_tlb_miss_real_return_info),gr3
  360. sti gr2,@(gr3,#0)
  361. sethi.p %hi(__break_tlb_miss_return_break),gr2
  362. setlo %lo(__break_tlb_miss_return_break),gr2
  363. movgs gr2,pcsr
  364. # we also have to fudge PSR because the return BREAK is in kernel space and we want
  365. # to get a BREAK fault not an access violation should the return be to userspace
  366. movsg psr,gr2
  367. sti.p gr2,@(gr3,#4)
  368. ori gr2,#PSR_PS,gr2
  369. movgs gr2,psr
  370. LEDS 0x2102,gr2
  371. ldi @(gr31,#REG_CCR),gr3
  372. movgs gr3,ccr
  373. lddi @(gr31,#REG_GR(2)),gr2
  374. movsg scr3,gr31
  375. movgs gr0,brr
  376. rett #1
  377. # step through a DTLB-miss handler from user mode
  378. .globl __break_user_data_tlb_miss
  379. __break_user_data_tlb_miss:
  380. # we'll want to try the trap stub again
  381. sethi.p %hi(__trap_user_data_tlb_miss),gr2
  382. setlo %lo(__trap_user_data_tlb_miss),gr2
  383. movgs gr2,bpcsr
  384. bra __break_tlb_miss_common
  385. # step through an ITLB-miss handler from kernel mode
  386. .globl __break_kernel_insn_tlb_miss
  387. __break_kernel_insn_tlb_miss:
  388. # we'll want to try the trap stub again
  389. sethi.p %hi(__trap_kernel_insn_tlb_miss),gr2
  390. setlo %lo(__trap_kernel_insn_tlb_miss),gr2
  391. movgs gr2,bpcsr
  392. bra __break_tlb_miss_common
  393. # step through a DTLB-miss handler from kernel mode
  394. .globl __break_kernel_data_tlb_miss
  395. __break_kernel_data_tlb_miss:
  396. # we'll want to try the trap stub again
  397. sethi.p %hi(__trap_kernel_data_tlb_miss),gr2
  398. setlo %lo(__trap_kernel_data_tlb_miss),gr2
  399. movgs gr2,bpcsr
  400. bra __break_tlb_miss_common
  401. #endif
  402. ###############################################################################
  403. #
  404. # handle debug events originating with userspace
  405. #
  406. ###############################################################################
  407. __break_maybe_userspace:
  408. LEDS 0x3003,gr2
  409. setlos #BPSR_BS,gr2
  410. andcc gr3,gr2,gr0,icc0
  411. bne icc0,#0,__break_continue /* skip if PSR.S was 1 */
  412. movsg brr,gr2
  413. andicc gr2,#BRR_ST|BRR_SB,gr0,icc0
  414. beq icc0,#0,__break_continue /* jump if not BREAK or single-step */
  415. LEDS 0x3007,gr2
  416. # do the first part of the exception prologue here
  417. sethi.p %hi(__kernel_frame0_ptr),gr28
  418. setlo %lo(__kernel_frame0_ptr),gr28
  419. ldi @(gr28,#0),gr28
  420. andi gr28,#~7,gr28
  421. # set up the kernel stack pointer
  422. sti sp ,@(gr28,#REG_SP)
  423. ori gr28,0,sp
  424. sti gr0 ,@(gr28,#REG_GR(28))
  425. stdi gr20,@(gr28,#REG_GR(20))
  426. stdi gr22,@(gr28,#REG_GR(22))
  427. movsg tbr,gr20
  428. movsg bpcsr,gr21
  429. movsg psr,gr22
  430. # determine the exception type and cancel single-stepping mode
  431. or gr0,gr0,gr23
  432. movsg dcr,gr2
  433. sethi.p %hi(DCR_SE),gr3
  434. setlo %lo(DCR_SE),gr3
  435. andcc gr2,gr3,gr0,icc0
  436. beq icc0,#0,__break_no_user_sstep /* must have been a BREAK insn */
  437. not gr3,gr3
  438. and gr2,gr3,gr2
  439. movgs gr2,dcr
  440. ori gr23,#REG__STATUS_STEP,gr23
  441. __break_no_user_sstep:
  442. LEDS 0x300f,gr2
  443. movsg brr,gr2
  444. andi gr2,#BRR_ST|BRR_SB,gr2
  445. slli gr2,#1,gr2
  446. or gr23,gr2,gr23
  447. sti.p gr23,@(gr28,#REG__STATUS) /* record single step status */
  448. # adjust the value acquired from TBR - this indicates the exception
  449. setlos #~TBR_TT,gr2
  450. and.p gr20,gr2,gr20
  451. setlos #TBR_TT_BREAK,gr2
  452. or.p gr20,gr2,gr20
  453. # fudge PSR.PS and BPSR.BS to return to kernel mode through the trap
  454. # table as trap 126
  455. andi gr22,#~PSR_PS,gr22 /* PSR.PS should be 0 */
  456. movgs gr22,psr
  457. setlos #BPSR_BS,gr2 /* BPSR.BS should be 1 and BPSR.BET 0 */
  458. movgs gr2,bpsr
  459. # return through remainder of the exception prologue
  460. # - need to load gr23 with return handler address
  461. sethi.p %hi(__entry_return_from_user_exception),gr23
  462. setlo %lo(__entry_return_from_user_exception),gr23
  463. sethi.p %hi(__entry_common),gr3
  464. setlo %lo(__entry_common),gr3
  465. movgs gr3,bpcsr
  466. LEDS 0x301f,gr2
  467. ldi @(gr31,#REG_CCR),gr3
  468. movgs gr3,ccr
  469. lddi.p @(gr31,#REG_GR(2)),gr2
  470. xor gr31,gr31,gr31
  471. movgs gr0,brr
  472. #ifdef CONFIG_MMU
  473. movsg scr3,gr31
  474. #endif
  475. rett #1
  476. ###############################################################################
  477. #
  478. # resume normal debug-mode entry
  479. #
  480. ###############################################################################
  481. __break_continue:
  482. LEDS 0x4003,gr2
  483. # set up the kernel stack pointer
  484. sti sp,@(gr31,#REG_SP)
  485. sethi.p %hi(__break_stack_tos),sp
  486. setlo %lo(__break_stack_tos),sp
  487. # finish building the exception frame
  488. stdi gr4 ,@(gr31,#REG_GR(4))
  489. stdi gr6 ,@(gr31,#REG_GR(6))
  490. stdi gr8 ,@(gr31,#REG_GR(8))
  491. stdi gr10,@(gr31,#REG_GR(10))
  492. stdi gr12,@(gr31,#REG_GR(12))
  493. stdi gr14,@(gr31,#REG_GR(14))
  494. stdi gr16,@(gr31,#REG_GR(16))
  495. stdi gr18,@(gr31,#REG_GR(18))
  496. stdi gr20,@(gr31,#REG_GR(20))
  497. stdi gr22,@(gr31,#REG_GR(22))
  498. stdi gr24,@(gr31,#REG_GR(24))
  499. stdi gr26,@(gr31,#REG_GR(26))
  500. sti gr0 ,@(gr31,#REG_GR(28)) /* NULL frame pointer */
  501. sti gr29,@(gr31,#REG_GR(29))
  502. sti gr30,@(gr31,#REG_GR(30))
  503. sti gr8 ,@(gr31,#REG_ORIG_GR8)
  504. #ifdef CONFIG_MMU
  505. movsg scr3,gr19
  506. sti gr19,@(gr31,#REG_GR(31))
  507. #endif
  508. movsg bpsr ,gr19
  509. movsg tbr ,gr20
  510. movsg bpcsr,gr21
  511. movsg psr ,gr22
  512. movsg isr ,gr23
  513. movsg cccr ,gr25
  514. movsg lr ,gr26
  515. movsg lcr ,gr27
  516. andi.p gr22,#~(PSR_S|PSR_ET),gr5 /* rebuild PSR */
  517. andi gr19,#PSR_ET,gr4
  518. or.p gr4,gr5,gr5
  519. srli gr19,#10,gr4
  520. andi gr4,#PSR_S,gr4
  521. or.p gr4,gr5,gr5
  522. setlos #-1,gr6
  523. sti gr20,@(gr31,#REG_TBR)
  524. sti gr21,@(gr31,#REG_PC)
  525. sti gr5 ,@(gr31,#REG_PSR)
  526. sti gr23,@(gr31,#REG_ISR)
  527. sti gr25,@(gr31,#REG_CCCR)
  528. stdi gr26,@(gr31,#REG_LR)
  529. sti gr6 ,@(gr31,#REG_SYSCALLNO)
  530. # store CPU-specific regs
  531. movsg iacc0h,gr4
  532. movsg iacc0l,gr5
  533. stdi gr4,@(gr31,#REG_IACC0)
  534. movsg gner0,gr4
  535. movsg gner1,gr5
  536. stdi gr4,@(gr31,#REG_GNER0)
  537. # build the debug register frame
  538. movsg brr,gr4
  539. movgs gr0,brr
  540. movsg nmar,gr5
  541. movsg dcr,gr6
  542. stdi gr4 ,@(gr31,#REG_BRR)
  543. sti gr19,@(gr31,#REG_BPSR)
  544. sti.p gr6 ,@(gr31,#REG_DCR)
  545. # trap exceptions during break handling and disable h/w breakpoints/watchpoints
  546. sethi %hi(DCR_EBE),gr5
  547. setlo.p %lo(DCR_EBE),gr5
  548. sethi %hi(__entry_breaktrap_table),gr4
  549. setlo %lo(__entry_breaktrap_table),gr4
  550. movgs gr5,dcr
  551. movgs gr4,tbr
  552. # set up kernel global registers
  553. sethi.p %hi(__kernel_current_task),gr5
  554. setlo %lo(__kernel_current_task),gr5
  555. ld @(gr5,gr0),gr29
  556. ldi.p @(gr29,#4),gr15 ; __current_thread_info = current->thread_info
  557. sethi %hi(_gp),gr16
  558. setlo.p %lo(_gp),gr16
  559. # make sure we (the kernel) get div-zero and misalignment exceptions
  560. setlos #ISR_EDE|ISR_DTT_DIVBYZERO|ISR_EMAM_EXCEPTION,gr5
  561. movgs gr5,isr
  562. # enter the GDB stub
  563. LEDS 0x4007,gr2
  564. or.p gr0,gr0,fp
  565. call debug_stub
  566. LEDS 0x403f,gr2
  567. # return from break
  568. lddi @(gr31,#REG_IACC0),gr4
  569. movgs gr4,iacc0h
  570. movgs gr5,iacc0l
  571. lddi @(gr31,#REG_GNER0),gr4
  572. movgs gr4,gner0
  573. movgs gr5,gner1
  574. lddi @(gr31,#REG_LR) ,gr26
  575. lddi @(gr31,#REG_CCR) ,gr24
  576. lddi @(gr31,#REG_PSR) ,gr22
  577. ldi @(gr31,#REG_PC) ,gr21
  578. ldi @(gr31,#REG_TBR) ,gr20
  579. ldi.p @(gr31,#REG_DCR) ,gr6
  580. andi gr22,#PSR_S,gr19 /* rebuild BPSR */
  581. andi.p gr22,#PSR_ET,gr5
  582. slli gr19,#10,gr19
  583. or gr5,gr19,gr19
  584. movgs gr6 ,dcr
  585. movgs gr19,bpsr
  586. movgs gr20,tbr
  587. movgs gr21,bpcsr
  588. movgs gr23,isr
  589. movgs gr24,ccr
  590. movgs gr25,cccr
  591. movgs gr26,lr
  592. movgs gr27,lcr
  593. LEDS 0x407f,gr2
  594. #ifdef CONFIG_MMU
  595. ldi @(gr31,#REG_GR(31)),gr2
  596. movgs gr2,scr3
  597. #endif
  598. ldi @(gr31,#REG_GR(30)),gr30
  599. ldi @(gr31,#REG_GR(29)),gr29
  600. lddi @(gr31,#REG_GR(26)),gr26
  601. lddi @(gr31,#REG_GR(24)),gr24
  602. lddi @(gr31,#REG_GR(22)),gr22
  603. lddi @(gr31,#REG_GR(20)),gr20
  604. lddi @(gr31,#REG_GR(18)),gr18
  605. lddi @(gr31,#REG_GR(16)),gr16
  606. lddi @(gr31,#REG_GR(14)),gr14
  607. lddi @(gr31,#REG_GR(12)),gr12
  608. lddi @(gr31,#REG_GR(10)),gr10
  609. lddi @(gr31,#REG_GR(8)) ,gr8
  610. lddi @(gr31,#REG_GR(6)) ,gr6
  611. lddi @(gr31,#REG_GR(4)) ,gr4
  612. lddi @(gr31,#REG_GR(2)) ,gr2
  613. ldi.p @(gr31,#REG_SP) ,sp
  614. xor gr31,gr31,gr31
  615. movgs gr0,brr
  616. #ifdef CONFIG_MMU
  617. movsg scr3,gr31
  618. #endif
  619. rett #1
  620. ###################################################################################################
  621. #
  622. # GDB stub "system calls"
  623. #
  624. ###################################################################################################
  625. #ifdef CONFIG_GDBSTUB
  626. # void gdbstub_console_write(struct console *con, const char *p, unsigned n)
  627. .globl gdbstub_console_write
  628. gdbstub_console_write:
  629. break
  630. bralr
  631. #endif
  632. # GDB stub BUG() trap
  633. # GR8 is the proposed signal number
  634. .globl __debug_bug_trap
  635. __debug_bug_trap:
  636. break
  637. bralr
  638. # transfer kernel exeception to GDB for handling
  639. .globl __break_hijack_kernel_event
  640. __break_hijack_kernel_event:
  641. break
  642. .globl __break_hijack_kernel_event_breaks_here
  643. __break_hijack_kernel_event_breaks_here:
  644. nop
  645. #ifdef CONFIG_MMU
  646. # handle a return from TLB-miss that requires single-step reactivation
  647. .globl __break_tlb_miss_return_break
  648. __break_tlb_miss_return_break:
  649. break
  650. __break_tlb_miss_return_breaks_here:
  651. nop
  652. #endif
  653. # guard the first .text label in the next file from confusion
  654. nop