interrupts_head.S 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441
  1. #define VCPU_USR_REG(_reg_nr) (VCPU_USR_REGS + (_reg_nr * 4))
  2. #define VCPU_USR_SP (VCPU_USR_REG(13))
  3. #define VCPU_USR_LR (VCPU_USR_REG(14))
  4. #define CP15_OFFSET(_cp15_reg_idx) (VCPU_CP15 + (_cp15_reg_idx * 4))
  5. /*
  6. * Many of these macros need to access the VCPU structure, which is always
  7. * held in r0. These macros should never clobber r1, as it is used to hold the
  8. * exception code on the return path (except of course the macro that switches
  9. * all the registers before the final jump to the VM).
  10. */
  11. vcpu .req r0 @ vcpu pointer always in r0
  12. /* Clobbers {r2-r6} */
  13. .macro store_vfp_state vfp_base
  14. @ The VFPFMRX and VFPFMXR macros are the VMRS and VMSR instructions
  15. VFPFMRX r2, FPEXC
  16. @ Make sure VFP is enabled so we can touch the registers.
  17. orr r6, r2, #FPEXC_EN
  18. VFPFMXR FPEXC, r6
  19. VFPFMRX r3, FPSCR
  20. tst r2, #FPEXC_EX @ Check for VFP Subarchitecture
  21. beq 1f
  22. @ If FPEXC_EX is 0, then FPINST/FPINST2 reads are upredictable, so
  23. @ we only need to save them if FPEXC_EX is set.
  24. VFPFMRX r4, FPINST
  25. tst r2, #FPEXC_FP2V
  26. VFPFMRX r5, FPINST2, ne @ vmrsne
  27. bic r6, r2, #FPEXC_EX @ FPEXC_EX disable
  28. VFPFMXR FPEXC, r6
  29. 1:
  30. VFPFSTMIA \vfp_base, r6 @ Save VFP registers
  31. stm \vfp_base, {r2-r5} @ Save FPEXC, FPSCR, FPINST, FPINST2
  32. .endm
  33. /* Assume FPEXC_EN is on and FPEXC_EX is off, clobbers {r2-r6} */
  34. .macro restore_vfp_state vfp_base
  35. VFPFLDMIA \vfp_base, r6 @ Load VFP registers
  36. ldm \vfp_base, {r2-r5} @ Load FPEXC, FPSCR, FPINST, FPINST2
  37. VFPFMXR FPSCR, r3
  38. tst r2, #FPEXC_EX @ Check for VFP Subarchitecture
  39. beq 1f
  40. VFPFMXR FPINST, r4
  41. tst r2, #FPEXC_FP2V
  42. VFPFMXR FPINST2, r5, ne
  43. 1:
  44. VFPFMXR FPEXC, r2 @ FPEXC (last, in case !EN)
  45. .endm
  46. /* These are simply for the macros to work - value don't have meaning */
  47. .equ usr, 0
  48. .equ svc, 1
  49. .equ abt, 2
  50. .equ und, 3
  51. .equ irq, 4
  52. .equ fiq, 5
  53. .macro push_host_regs_mode mode
  54. mrs r2, SP_\mode
  55. mrs r3, LR_\mode
  56. mrs r4, SPSR_\mode
  57. push {r2, r3, r4}
  58. .endm
  59. /*
  60. * Store all host persistent registers on the stack.
  61. * Clobbers all registers, in all modes, except r0 and r1.
  62. */
  63. .macro save_host_regs
  64. /* Hyp regs. Only ELR_hyp (SPSR_hyp already saved) */
  65. mrs r2, ELR_hyp
  66. push {r2}
  67. /* usr regs */
  68. push {r4-r12} @ r0-r3 are always clobbered
  69. mrs r2, SP_usr
  70. mov r3, lr
  71. push {r2, r3}
  72. push_host_regs_mode svc
  73. push_host_regs_mode abt
  74. push_host_regs_mode und
  75. push_host_regs_mode irq
  76. /* fiq regs */
  77. mrs r2, r8_fiq
  78. mrs r3, r9_fiq
  79. mrs r4, r10_fiq
  80. mrs r5, r11_fiq
  81. mrs r6, r12_fiq
  82. mrs r7, SP_fiq
  83. mrs r8, LR_fiq
  84. mrs r9, SPSR_fiq
  85. push {r2-r9}
  86. .endm
  87. .macro pop_host_regs_mode mode
  88. pop {r2, r3, r4}
  89. msr SP_\mode, r2
  90. msr LR_\mode, r3
  91. msr SPSR_\mode, r4
  92. .endm
  93. /*
  94. * Restore all host registers from the stack.
  95. * Clobbers all registers, in all modes, except r0 and r1.
  96. */
  97. .macro restore_host_regs
  98. pop {r2-r9}
  99. msr r8_fiq, r2
  100. msr r9_fiq, r3
  101. msr r10_fiq, r4
  102. msr r11_fiq, r5
  103. msr r12_fiq, r6
  104. msr SP_fiq, r7
  105. msr LR_fiq, r8
  106. msr SPSR_fiq, r9
  107. pop_host_regs_mode irq
  108. pop_host_regs_mode und
  109. pop_host_regs_mode abt
  110. pop_host_regs_mode svc
  111. pop {r2, r3}
  112. msr SP_usr, r2
  113. mov lr, r3
  114. pop {r4-r12}
  115. pop {r2}
  116. msr ELR_hyp, r2
  117. .endm
  118. /*
  119. * Restore SP, LR and SPSR for a given mode. offset is the offset of
  120. * this mode's registers from the VCPU base.
  121. *
  122. * Assumes vcpu pointer in vcpu reg
  123. *
  124. * Clobbers r1, r2, r3, r4.
  125. */
  126. .macro restore_guest_regs_mode mode, offset
  127. add r1, vcpu, \offset
  128. ldm r1, {r2, r3, r4}
  129. msr SP_\mode, r2
  130. msr LR_\mode, r3
  131. msr SPSR_\mode, r4
  132. .endm
  133. /*
  134. * Restore all guest registers from the vcpu struct.
  135. *
  136. * Assumes vcpu pointer in vcpu reg
  137. *
  138. * Clobbers *all* registers.
  139. */
  140. .macro restore_guest_regs
  141. restore_guest_regs_mode svc, #VCPU_SVC_REGS
  142. restore_guest_regs_mode abt, #VCPU_ABT_REGS
  143. restore_guest_regs_mode und, #VCPU_UND_REGS
  144. restore_guest_regs_mode irq, #VCPU_IRQ_REGS
  145. add r1, vcpu, #VCPU_FIQ_REGS
  146. ldm r1, {r2-r9}
  147. msr r8_fiq, r2
  148. msr r9_fiq, r3
  149. msr r10_fiq, r4
  150. msr r11_fiq, r5
  151. msr r12_fiq, r6
  152. msr SP_fiq, r7
  153. msr LR_fiq, r8
  154. msr SPSR_fiq, r9
  155. @ Load return state
  156. ldr r2, [vcpu, #VCPU_PC]
  157. ldr r3, [vcpu, #VCPU_CPSR]
  158. msr ELR_hyp, r2
  159. msr SPSR_cxsf, r3
  160. @ Load user registers
  161. ldr r2, [vcpu, #VCPU_USR_SP]
  162. ldr r3, [vcpu, #VCPU_USR_LR]
  163. msr SP_usr, r2
  164. mov lr, r3
  165. add vcpu, vcpu, #(VCPU_USR_REGS)
  166. ldm vcpu, {r0-r12}
  167. .endm
  168. /*
  169. * Save SP, LR and SPSR for a given mode. offset is the offset of
  170. * this mode's registers from the VCPU base.
  171. *
  172. * Assumes vcpu pointer in vcpu reg
  173. *
  174. * Clobbers r2, r3, r4, r5.
  175. */
  176. .macro save_guest_regs_mode mode, offset
  177. add r2, vcpu, \offset
  178. mrs r3, SP_\mode
  179. mrs r4, LR_\mode
  180. mrs r5, SPSR_\mode
  181. stm r2, {r3, r4, r5}
  182. .endm
  183. /*
  184. * Save all guest registers to the vcpu struct
  185. * Expects guest's r0, r1, r2 on the stack.
  186. *
  187. * Assumes vcpu pointer in vcpu reg
  188. *
  189. * Clobbers r2, r3, r4, r5.
  190. */
  191. .macro save_guest_regs
  192. @ Store usr registers
  193. add r2, vcpu, #VCPU_USR_REG(3)
  194. stm r2, {r3-r12}
  195. add r2, vcpu, #VCPU_USR_REG(0)
  196. pop {r3, r4, r5} @ r0, r1, r2
  197. stm r2, {r3, r4, r5}
  198. mrs r2, SP_usr
  199. mov r3, lr
  200. str r2, [vcpu, #VCPU_USR_SP]
  201. str r3, [vcpu, #VCPU_USR_LR]
  202. @ Store return state
  203. mrs r2, ELR_hyp
  204. mrs r3, spsr
  205. str r2, [vcpu, #VCPU_PC]
  206. str r3, [vcpu, #VCPU_CPSR]
  207. @ Store other guest registers
  208. save_guest_regs_mode svc, #VCPU_SVC_REGS
  209. save_guest_regs_mode abt, #VCPU_ABT_REGS
  210. save_guest_regs_mode und, #VCPU_UND_REGS
  211. save_guest_regs_mode irq, #VCPU_IRQ_REGS
  212. .endm
  213. /* Reads cp15 registers from hardware and stores them in memory
  214. * @store_to_vcpu: If 0, registers are written in-order to the stack,
  215. * otherwise to the VCPU struct pointed to by vcpup
  216. *
  217. * Assumes vcpu pointer in vcpu reg
  218. *
  219. * Clobbers r2 - r12
  220. */
  221. .macro read_cp15_state store_to_vcpu
  222. mrc p15, 0, r2, c1, c0, 0 @ SCTLR
  223. mrc p15, 0, r3, c1, c0, 2 @ CPACR
  224. mrc p15, 0, r4, c2, c0, 2 @ TTBCR
  225. mrc p15, 0, r5, c3, c0, 0 @ DACR
  226. mrrc p15, 0, r6, r7, c2 @ TTBR 0
  227. mrrc p15, 1, r8, r9, c2 @ TTBR 1
  228. mrc p15, 0, r10, c10, c2, 0 @ PRRR
  229. mrc p15, 0, r11, c10, c2, 1 @ NMRR
  230. mrc p15, 2, r12, c0, c0, 0 @ CSSELR
  231. .if \store_to_vcpu == 0
  232. push {r2-r12} @ Push CP15 registers
  233. .else
  234. str r2, [vcpu, #CP15_OFFSET(c1_SCTLR)]
  235. str r3, [vcpu, #CP15_OFFSET(c1_CPACR)]
  236. str r4, [vcpu, #CP15_OFFSET(c2_TTBCR)]
  237. str r5, [vcpu, #CP15_OFFSET(c3_DACR)]
  238. add r2, vcpu, #CP15_OFFSET(c2_TTBR0)
  239. strd r6, r7, [r2]
  240. add r2, vcpu, #CP15_OFFSET(c2_TTBR1)
  241. strd r8, r9, [r2]
  242. str r10, [vcpu, #CP15_OFFSET(c10_PRRR)]
  243. str r11, [vcpu, #CP15_OFFSET(c10_NMRR)]
  244. str r12, [vcpu, #CP15_OFFSET(c0_CSSELR)]
  245. .endif
  246. mrc p15, 0, r2, c13, c0, 1 @ CID
  247. mrc p15, 0, r3, c13, c0, 2 @ TID_URW
  248. mrc p15, 0, r4, c13, c0, 3 @ TID_URO
  249. mrc p15, 0, r5, c13, c0, 4 @ TID_PRIV
  250. mrc p15, 0, r6, c5, c0, 0 @ DFSR
  251. mrc p15, 0, r7, c5, c0, 1 @ IFSR
  252. mrc p15, 0, r8, c5, c1, 0 @ ADFSR
  253. mrc p15, 0, r9, c5, c1, 1 @ AIFSR
  254. mrc p15, 0, r10, c6, c0, 0 @ DFAR
  255. mrc p15, 0, r11, c6, c0, 2 @ IFAR
  256. mrc p15, 0, r12, c12, c0, 0 @ VBAR
  257. .if \store_to_vcpu == 0
  258. push {r2-r12} @ Push CP15 registers
  259. .else
  260. str r2, [vcpu, #CP15_OFFSET(c13_CID)]
  261. str r3, [vcpu, #CP15_OFFSET(c13_TID_URW)]
  262. str r4, [vcpu, #CP15_OFFSET(c13_TID_URO)]
  263. str r5, [vcpu, #CP15_OFFSET(c13_TID_PRIV)]
  264. str r6, [vcpu, #CP15_OFFSET(c5_DFSR)]
  265. str r7, [vcpu, #CP15_OFFSET(c5_IFSR)]
  266. str r8, [vcpu, #CP15_OFFSET(c5_ADFSR)]
  267. str r9, [vcpu, #CP15_OFFSET(c5_AIFSR)]
  268. str r10, [vcpu, #CP15_OFFSET(c6_DFAR)]
  269. str r11, [vcpu, #CP15_OFFSET(c6_IFAR)]
  270. str r12, [vcpu, #CP15_OFFSET(c12_VBAR)]
  271. .endif
  272. .endm
  273. /*
  274. * Reads cp15 registers from memory and writes them to hardware
  275. * @read_from_vcpu: If 0, registers are read in-order from the stack,
  276. * otherwise from the VCPU struct pointed to by vcpup
  277. *
  278. * Assumes vcpu pointer in vcpu reg
  279. */
  280. .macro write_cp15_state read_from_vcpu
  281. .if \read_from_vcpu == 0
  282. pop {r2-r12}
  283. .else
  284. ldr r2, [vcpu, #CP15_OFFSET(c13_CID)]
  285. ldr r3, [vcpu, #CP15_OFFSET(c13_TID_URW)]
  286. ldr r4, [vcpu, #CP15_OFFSET(c13_TID_URO)]
  287. ldr r5, [vcpu, #CP15_OFFSET(c13_TID_PRIV)]
  288. ldr r6, [vcpu, #CP15_OFFSET(c5_DFSR)]
  289. ldr r7, [vcpu, #CP15_OFFSET(c5_IFSR)]
  290. ldr r8, [vcpu, #CP15_OFFSET(c5_ADFSR)]
  291. ldr r9, [vcpu, #CP15_OFFSET(c5_AIFSR)]
  292. ldr r10, [vcpu, #CP15_OFFSET(c6_DFAR)]
  293. ldr r11, [vcpu, #CP15_OFFSET(c6_IFAR)]
  294. ldr r12, [vcpu, #CP15_OFFSET(c12_VBAR)]
  295. .endif
  296. mcr p15, 0, r2, c13, c0, 1 @ CID
  297. mcr p15, 0, r3, c13, c0, 2 @ TID_URW
  298. mcr p15, 0, r4, c13, c0, 3 @ TID_URO
  299. mcr p15, 0, r5, c13, c0, 4 @ TID_PRIV
  300. mcr p15, 0, r6, c5, c0, 0 @ DFSR
  301. mcr p15, 0, r7, c5, c0, 1 @ IFSR
  302. mcr p15, 0, r8, c5, c1, 0 @ ADFSR
  303. mcr p15, 0, r9, c5, c1, 1 @ AIFSR
  304. mcr p15, 0, r10, c6, c0, 0 @ DFAR
  305. mcr p15, 0, r11, c6, c0, 2 @ IFAR
  306. mcr p15, 0, r12, c12, c0, 0 @ VBAR
  307. .if \read_from_vcpu == 0
  308. pop {r2-r12}
  309. .else
  310. ldr r2, [vcpu, #CP15_OFFSET(c1_SCTLR)]
  311. ldr r3, [vcpu, #CP15_OFFSET(c1_CPACR)]
  312. ldr r4, [vcpu, #CP15_OFFSET(c2_TTBCR)]
  313. ldr r5, [vcpu, #CP15_OFFSET(c3_DACR)]
  314. add r12, vcpu, #CP15_OFFSET(c2_TTBR0)
  315. ldrd r6, r7, [r12]
  316. add r12, vcpu, #CP15_OFFSET(c2_TTBR1)
  317. ldrd r8, r9, [r12]
  318. ldr r10, [vcpu, #CP15_OFFSET(c10_PRRR)]
  319. ldr r11, [vcpu, #CP15_OFFSET(c10_NMRR)]
  320. ldr r12, [vcpu, #CP15_OFFSET(c0_CSSELR)]
  321. .endif
  322. mcr p15, 0, r2, c1, c0, 0 @ SCTLR
  323. mcr p15, 0, r3, c1, c0, 2 @ CPACR
  324. mcr p15, 0, r4, c2, c0, 2 @ TTBCR
  325. mcr p15, 0, r5, c3, c0, 0 @ DACR
  326. mcrr p15, 0, r6, r7, c2 @ TTBR 0
  327. mcrr p15, 1, r8, r9, c2 @ TTBR 1
  328. mcr p15, 0, r10, c10, c2, 0 @ PRRR
  329. mcr p15, 0, r11, c10, c2, 1 @ NMRR
  330. mcr p15, 2, r12, c0, c0, 0 @ CSSELR
  331. .endm
  332. /*
  333. * Save the VGIC CPU state into memory
  334. *
  335. * Assumes vcpu pointer in vcpu reg
  336. */
  337. .macro save_vgic_state
  338. .endm
  339. /*
  340. * Restore the VGIC CPU state from memory
  341. *
  342. * Assumes vcpu pointer in vcpu reg
  343. */
  344. .macro restore_vgic_state
  345. .endm
  346. .equ vmentry, 0
  347. .equ vmexit, 1
  348. /* Configures the HSTR (Hyp System Trap Register) on entry/return
  349. * (hardware reset value is 0) */
  350. .macro set_hstr operation
  351. mrc p15, 4, r2, c1, c1, 3
  352. ldr r3, =HSTR_T(15)
  353. .if \operation == vmentry
  354. orr r2, r2, r3 @ Trap CR{15}
  355. .else
  356. bic r2, r2, r3 @ Don't trap any CRx accesses
  357. .endif
  358. mcr p15, 4, r2, c1, c1, 3
  359. .endm
  360. /* Configures the HCPTR (Hyp Coprocessor Trap Register) on entry/return
  361. * (hardware reset value is 0). Keep previous value in r2. */
  362. .macro set_hcptr operation, mask
  363. mrc p15, 4, r2, c1, c1, 2
  364. ldr r3, =\mask
  365. .if \operation == vmentry
  366. orr r3, r2, r3 @ Trap coproc-accesses defined in mask
  367. .else
  368. bic r3, r2, r3 @ Don't trap defined coproc-accesses
  369. .endif
  370. mcr p15, 4, r3, c1, c1, 2
  371. .endm
  372. /* Configures the HDCR (Hyp Debug Configuration Register) on entry/return
  373. * (hardware reset value is 0) */
  374. .macro set_hdcr operation
  375. mrc p15, 4, r2, c1, c1, 1
  376. ldr r3, =(HDCR_TPM|HDCR_TPMCR)
  377. .if \operation == vmentry
  378. orr r2, r2, r3 @ Trap some perfmon accesses
  379. .else
  380. bic r2, r2, r3 @ Don't trap any perfmon accesses
  381. .endif
  382. mcr p15, 4, r2, c1, c1, 1
  383. .endm
  384. /* Enable/Disable: stage-2 trans., trap interrupts, trap wfi, trap smc */
  385. .macro configure_hyp_role operation
  386. mrc p15, 4, r2, c1, c1, 0 @ HCR
  387. bic r2, r2, #HCR_VIRT_EXCP_MASK
  388. ldr r3, =HCR_GUEST_MASK
  389. .if \operation == vmentry
  390. orr r2, r2, r3
  391. ldr r3, [vcpu, #VCPU_IRQ_LINES]
  392. orr r2, r2, r3
  393. .else
  394. bic r2, r2, r3
  395. .endif
  396. mcr p15, 4, r2, c1, c1, 0
  397. .endm
  398. .macro load_vcpu
  399. mrc p15, 4, vcpu, c13, c0, 2 @ HTPIDR
  400. .endm