interrupts_head.S 15 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615
  1. #include <linux/irqchip/arm-gic.h>
  2. #define VCPU_USR_REG(_reg_nr) (VCPU_USR_REGS + (_reg_nr * 4))
  3. #define VCPU_USR_SP (VCPU_USR_REG(13))
  4. #define VCPU_USR_LR (VCPU_USR_REG(14))
  5. #define CP15_OFFSET(_cp15_reg_idx) (VCPU_CP15 + (_cp15_reg_idx * 4))
  6. /*
  7. * Many of these macros need to access the VCPU structure, which is always
  8. * held in r0. These macros should never clobber r1, as it is used to hold the
  9. * exception code on the return path (except of course the macro that switches
  10. * all the registers before the final jump to the VM).
  11. */
  12. vcpu .req r0 @ vcpu pointer always in r0
  13. /* Clobbers {r2-r6} */
  14. .macro store_vfp_state vfp_base
  15. @ The VFPFMRX and VFPFMXR macros are the VMRS and VMSR instructions
  16. VFPFMRX r2, FPEXC
  17. @ Make sure VFP is enabled so we can touch the registers.
  18. orr r6, r2, #FPEXC_EN
  19. VFPFMXR FPEXC, r6
  20. VFPFMRX r3, FPSCR
  21. tst r2, #FPEXC_EX @ Check for VFP Subarchitecture
  22. beq 1f
  23. @ If FPEXC_EX is 0, then FPINST/FPINST2 reads are upredictable, so
  24. @ we only need to save them if FPEXC_EX is set.
  25. VFPFMRX r4, FPINST
  26. tst r2, #FPEXC_FP2V
  27. VFPFMRX r5, FPINST2, ne @ vmrsne
  28. bic r6, r2, #FPEXC_EX @ FPEXC_EX disable
  29. VFPFMXR FPEXC, r6
  30. 1:
  31. VFPFSTMIA \vfp_base, r6 @ Save VFP registers
  32. stm \vfp_base, {r2-r5} @ Save FPEXC, FPSCR, FPINST, FPINST2
  33. .endm
  34. /* Assume FPEXC_EN is on and FPEXC_EX is off, clobbers {r2-r6} */
  35. .macro restore_vfp_state vfp_base
  36. VFPFLDMIA \vfp_base, r6 @ Load VFP registers
  37. ldm \vfp_base, {r2-r5} @ Load FPEXC, FPSCR, FPINST, FPINST2
  38. VFPFMXR FPSCR, r3
  39. tst r2, #FPEXC_EX @ Check for VFP Subarchitecture
  40. beq 1f
  41. VFPFMXR FPINST, r4
  42. tst r2, #FPEXC_FP2V
  43. VFPFMXR FPINST2, r5, ne
  44. 1:
  45. VFPFMXR FPEXC, r2 @ FPEXC (last, in case !EN)
  46. .endm
  47. /* These are simply for the macros to work - value don't have meaning */
  48. .equ usr, 0
  49. .equ svc, 1
  50. .equ abt, 2
  51. .equ und, 3
  52. .equ irq, 4
  53. .equ fiq, 5
  54. .macro push_host_regs_mode mode
  55. mrs r2, SP_\mode
  56. mrs r3, LR_\mode
  57. mrs r4, SPSR_\mode
  58. push {r2, r3, r4}
  59. .endm
  60. /*
  61. * Store all host persistent registers on the stack.
  62. * Clobbers all registers, in all modes, except r0 and r1.
  63. */
  64. .macro save_host_regs
  65. /* Hyp regs. Only ELR_hyp (SPSR_hyp already saved) */
  66. mrs r2, ELR_hyp
  67. push {r2}
  68. /* usr regs */
  69. push {r4-r12} @ r0-r3 are always clobbered
  70. mrs r2, SP_usr
  71. mov r3, lr
  72. push {r2, r3}
  73. push_host_regs_mode svc
  74. push_host_regs_mode abt
  75. push_host_regs_mode und
  76. push_host_regs_mode irq
  77. /* fiq regs */
  78. mrs r2, r8_fiq
  79. mrs r3, r9_fiq
  80. mrs r4, r10_fiq
  81. mrs r5, r11_fiq
  82. mrs r6, r12_fiq
  83. mrs r7, SP_fiq
  84. mrs r8, LR_fiq
  85. mrs r9, SPSR_fiq
  86. push {r2-r9}
  87. .endm
  88. .macro pop_host_regs_mode mode
  89. pop {r2, r3, r4}
  90. msr SP_\mode, r2
  91. msr LR_\mode, r3
  92. msr SPSR_\mode, r4
  93. .endm
  94. /*
  95. * Restore all host registers from the stack.
  96. * Clobbers all registers, in all modes, except r0 and r1.
  97. */
  98. .macro restore_host_regs
  99. pop {r2-r9}
  100. msr r8_fiq, r2
  101. msr r9_fiq, r3
  102. msr r10_fiq, r4
  103. msr r11_fiq, r5
  104. msr r12_fiq, r6
  105. msr SP_fiq, r7
  106. msr LR_fiq, r8
  107. msr SPSR_fiq, r9
  108. pop_host_regs_mode irq
  109. pop_host_regs_mode und
  110. pop_host_regs_mode abt
  111. pop_host_regs_mode svc
  112. pop {r2, r3}
  113. msr SP_usr, r2
  114. mov lr, r3
  115. pop {r4-r12}
  116. pop {r2}
  117. msr ELR_hyp, r2
  118. .endm
  119. /*
  120. * Restore SP, LR and SPSR for a given mode. offset is the offset of
  121. * this mode's registers from the VCPU base.
  122. *
  123. * Assumes vcpu pointer in vcpu reg
  124. *
  125. * Clobbers r1, r2, r3, r4.
  126. */
  127. .macro restore_guest_regs_mode mode, offset
  128. add r1, vcpu, \offset
  129. ldm r1, {r2, r3, r4}
  130. msr SP_\mode, r2
  131. msr LR_\mode, r3
  132. msr SPSR_\mode, r4
  133. .endm
  134. /*
  135. * Restore all guest registers from the vcpu struct.
  136. *
  137. * Assumes vcpu pointer in vcpu reg
  138. *
  139. * Clobbers *all* registers.
  140. */
  141. .macro restore_guest_regs
  142. restore_guest_regs_mode svc, #VCPU_SVC_REGS
  143. restore_guest_regs_mode abt, #VCPU_ABT_REGS
  144. restore_guest_regs_mode und, #VCPU_UND_REGS
  145. restore_guest_regs_mode irq, #VCPU_IRQ_REGS
  146. add r1, vcpu, #VCPU_FIQ_REGS
  147. ldm r1, {r2-r9}
  148. msr r8_fiq, r2
  149. msr r9_fiq, r3
  150. msr r10_fiq, r4
  151. msr r11_fiq, r5
  152. msr r12_fiq, r6
  153. msr SP_fiq, r7
  154. msr LR_fiq, r8
  155. msr SPSR_fiq, r9
  156. @ Load return state
  157. ldr r2, [vcpu, #VCPU_PC]
  158. ldr r3, [vcpu, #VCPU_CPSR]
  159. msr ELR_hyp, r2
  160. msr SPSR_cxsf, r3
  161. @ Load user registers
  162. ldr r2, [vcpu, #VCPU_USR_SP]
  163. ldr r3, [vcpu, #VCPU_USR_LR]
  164. msr SP_usr, r2
  165. mov lr, r3
  166. add vcpu, vcpu, #(VCPU_USR_REGS)
  167. ldm vcpu, {r0-r12}
  168. .endm
  169. /*
  170. * Save SP, LR and SPSR for a given mode. offset is the offset of
  171. * this mode's registers from the VCPU base.
  172. *
  173. * Assumes vcpu pointer in vcpu reg
  174. *
  175. * Clobbers r2, r3, r4, r5.
  176. */
  177. .macro save_guest_regs_mode mode, offset
  178. add r2, vcpu, \offset
  179. mrs r3, SP_\mode
  180. mrs r4, LR_\mode
  181. mrs r5, SPSR_\mode
  182. stm r2, {r3, r4, r5}
  183. .endm
  184. /*
  185. * Save all guest registers to the vcpu struct
  186. * Expects guest's r0, r1, r2 on the stack.
  187. *
  188. * Assumes vcpu pointer in vcpu reg
  189. *
  190. * Clobbers r2, r3, r4, r5.
  191. */
  192. .macro save_guest_regs
  193. @ Store usr registers
  194. add r2, vcpu, #VCPU_USR_REG(3)
  195. stm r2, {r3-r12}
  196. add r2, vcpu, #VCPU_USR_REG(0)
  197. pop {r3, r4, r5} @ r0, r1, r2
  198. stm r2, {r3, r4, r5}
  199. mrs r2, SP_usr
  200. mov r3, lr
  201. str r2, [vcpu, #VCPU_USR_SP]
  202. str r3, [vcpu, #VCPU_USR_LR]
  203. @ Store return state
  204. mrs r2, ELR_hyp
  205. mrs r3, spsr
  206. str r2, [vcpu, #VCPU_PC]
  207. str r3, [vcpu, #VCPU_CPSR]
  208. @ Store other guest registers
  209. save_guest_regs_mode svc, #VCPU_SVC_REGS
  210. save_guest_regs_mode abt, #VCPU_ABT_REGS
  211. save_guest_regs_mode und, #VCPU_UND_REGS
  212. save_guest_regs_mode irq, #VCPU_IRQ_REGS
  213. .endm
  214. /* Reads cp15 registers from hardware and stores them in memory
  215. * @store_to_vcpu: If 0, registers are written in-order to the stack,
  216. * otherwise to the VCPU struct pointed to by vcpup
  217. *
  218. * Assumes vcpu pointer in vcpu reg
  219. *
  220. * Clobbers r2 - r12
  221. */
  222. .macro read_cp15_state store_to_vcpu
  223. mrc p15, 0, r2, c1, c0, 0 @ SCTLR
  224. mrc p15, 0, r3, c1, c0, 2 @ CPACR
  225. mrc p15, 0, r4, c2, c0, 2 @ TTBCR
  226. mrc p15, 0, r5, c3, c0, 0 @ DACR
  227. mrrc p15, 0, r6, r7, c2 @ TTBR 0
  228. mrrc p15, 1, r8, r9, c2 @ TTBR 1
  229. mrc p15, 0, r10, c10, c2, 0 @ PRRR
  230. mrc p15, 0, r11, c10, c2, 1 @ NMRR
  231. mrc p15, 2, r12, c0, c0, 0 @ CSSELR
  232. .if \store_to_vcpu == 0
  233. push {r2-r12} @ Push CP15 registers
  234. .else
  235. str r2, [vcpu, #CP15_OFFSET(c1_SCTLR)]
  236. str r3, [vcpu, #CP15_OFFSET(c1_CPACR)]
  237. str r4, [vcpu, #CP15_OFFSET(c2_TTBCR)]
  238. str r5, [vcpu, #CP15_OFFSET(c3_DACR)]
  239. add r2, vcpu, #CP15_OFFSET(c2_TTBR0)
  240. strd r6, r7, [r2]
  241. add r2, vcpu, #CP15_OFFSET(c2_TTBR1)
  242. strd r8, r9, [r2]
  243. str r10, [vcpu, #CP15_OFFSET(c10_PRRR)]
  244. str r11, [vcpu, #CP15_OFFSET(c10_NMRR)]
  245. str r12, [vcpu, #CP15_OFFSET(c0_CSSELR)]
  246. .endif
  247. mrc p15, 0, r2, c13, c0, 1 @ CID
  248. mrc p15, 0, r3, c13, c0, 2 @ TID_URW
  249. mrc p15, 0, r4, c13, c0, 3 @ TID_URO
  250. mrc p15, 0, r5, c13, c0, 4 @ TID_PRIV
  251. mrc p15, 0, r6, c5, c0, 0 @ DFSR
  252. mrc p15, 0, r7, c5, c0, 1 @ IFSR
  253. mrc p15, 0, r8, c5, c1, 0 @ ADFSR
  254. mrc p15, 0, r9, c5, c1, 1 @ AIFSR
  255. mrc p15, 0, r10, c6, c0, 0 @ DFAR
  256. mrc p15, 0, r11, c6, c0, 2 @ IFAR
  257. mrc p15, 0, r12, c12, c0, 0 @ VBAR
  258. .if \store_to_vcpu == 0
  259. push {r2-r12} @ Push CP15 registers
  260. .else
  261. str r2, [vcpu, #CP15_OFFSET(c13_CID)]
  262. str r3, [vcpu, #CP15_OFFSET(c13_TID_URW)]
  263. str r4, [vcpu, #CP15_OFFSET(c13_TID_URO)]
  264. str r5, [vcpu, #CP15_OFFSET(c13_TID_PRIV)]
  265. str r6, [vcpu, #CP15_OFFSET(c5_DFSR)]
  266. str r7, [vcpu, #CP15_OFFSET(c5_IFSR)]
  267. str r8, [vcpu, #CP15_OFFSET(c5_ADFSR)]
  268. str r9, [vcpu, #CP15_OFFSET(c5_AIFSR)]
  269. str r10, [vcpu, #CP15_OFFSET(c6_DFAR)]
  270. str r11, [vcpu, #CP15_OFFSET(c6_IFAR)]
  271. str r12, [vcpu, #CP15_OFFSET(c12_VBAR)]
  272. .endif
  273. mrc p15, 0, r2, c14, c1, 0 @ CNTKCTL
  274. mrrc p15, 0, r4, r5, c7 @ PAR
  275. .if \store_to_vcpu == 0
  276. push {r2,r4-r5}
  277. .else
  278. str r2, [vcpu, #CP15_OFFSET(c14_CNTKCTL)]
  279. add r12, vcpu, #CP15_OFFSET(c7_PAR)
  280. strd r4, r5, [r12]
  281. .endif
  282. .endm
  283. /*
  284. * Reads cp15 registers from memory and writes them to hardware
  285. * @read_from_vcpu: If 0, registers are read in-order from the stack,
  286. * otherwise from the VCPU struct pointed to by vcpup
  287. *
  288. * Assumes vcpu pointer in vcpu reg
  289. */
  290. .macro write_cp15_state read_from_vcpu
  291. .if \read_from_vcpu == 0
  292. pop {r2,r4-r5}
  293. .else
  294. ldr r2, [vcpu, #CP15_OFFSET(c14_CNTKCTL)]
  295. add r12, vcpu, #CP15_OFFSET(c7_PAR)
  296. ldrd r4, r5, [r12]
  297. .endif
  298. mcr p15, 0, r2, c14, c1, 0 @ CNTKCTL
  299. mcrr p15, 0, r4, r5, c7 @ PAR
  300. .if \read_from_vcpu == 0
  301. pop {r2-r12}
  302. .else
  303. ldr r2, [vcpu, #CP15_OFFSET(c13_CID)]
  304. ldr r3, [vcpu, #CP15_OFFSET(c13_TID_URW)]
  305. ldr r4, [vcpu, #CP15_OFFSET(c13_TID_URO)]
  306. ldr r5, [vcpu, #CP15_OFFSET(c13_TID_PRIV)]
  307. ldr r6, [vcpu, #CP15_OFFSET(c5_DFSR)]
  308. ldr r7, [vcpu, #CP15_OFFSET(c5_IFSR)]
  309. ldr r8, [vcpu, #CP15_OFFSET(c5_ADFSR)]
  310. ldr r9, [vcpu, #CP15_OFFSET(c5_AIFSR)]
  311. ldr r10, [vcpu, #CP15_OFFSET(c6_DFAR)]
  312. ldr r11, [vcpu, #CP15_OFFSET(c6_IFAR)]
  313. ldr r12, [vcpu, #CP15_OFFSET(c12_VBAR)]
  314. .endif
  315. mcr p15, 0, r2, c13, c0, 1 @ CID
  316. mcr p15, 0, r3, c13, c0, 2 @ TID_URW
  317. mcr p15, 0, r4, c13, c0, 3 @ TID_URO
  318. mcr p15, 0, r5, c13, c0, 4 @ TID_PRIV
  319. mcr p15, 0, r6, c5, c0, 0 @ DFSR
  320. mcr p15, 0, r7, c5, c0, 1 @ IFSR
  321. mcr p15, 0, r8, c5, c1, 0 @ ADFSR
  322. mcr p15, 0, r9, c5, c1, 1 @ AIFSR
  323. mcr p15, 0, r10, c6, c0, 0 @ DFAR
  324. mcr p15, 0, r11, c6, c0, 2 @ IFAR
  325. mcr p15, 0, r12, c12, c0, 0 @ VBAR
  326. .if \read_from_vcpu == 0
  327. pop {r2-r12}
  328. .else
  329. ldr r2, [vcpu, #CP15_OFFSET(c1_SCTLR)]
  330. ldr r3, [vcpu, #CP15_OFFSET(c1_CPACR)]
  331. ldr r4, [vcpu, #CP15_OFFSET(c2_TTBCR)]
  332. ldr r5, [vcpu, #CP15_OFFSET(c3_DACR)]
  333. add r12, vcpu, #CP15_OFFSET(c2_TTBR0)
  334. ldrd r6, r7, [r12]
  335. add r12, vcpu, #CP15_OFFSET(c2_TTBR1)
  336. ldrd r8, r9, [r12]
  337. ldr r10, [vcpu, #CP15_OFFSET(c10_PRRR)]
  338. ldr r11, [vcpu, #CP15_OFFSET(c10_NMRR)]
  339. ldr r12, [vcpu, #CP15_OFFSET(c0_CSSELR)]
  340. .endif
  341. mcr p15, 0, r2, c1, c0, 0 @ SCTLR
  342. mcr p15, 0, r3, c1, c0, 2 @ CPACR
  343. mcr p15, 0, r4, c2, c0, 2 @ TTBCR
  344. mcr p15, 0, r5, c3, c0, 0 @ DACR
  345. mcrr p15, 0, r6, r7, c2 @ TTBR 0
  346. mcrr p15, 1, r8, r9, c2 @ TTBR 1
  347. mcr p15, 0, r10, c10, c2, 0 @ PRRR
  348. mcr p15, 0, r11, c10, c2, 1 @ NMRR
  349. mcr p15, 2, r12, c0, c0, 0 @ CSSELR
  350. .endm
  351. /*
  352. * Save the VGIC CPU state into memory
  353. *
  354. * Assumes vcpu pointer in vcpu reg
  355. */
  356. .macro save_vgic_state
  357. #ifdef CONFIG_KVM_ARM_VGIC
  358. /* Get VGIC VCTRL base into r2 */
  359. ldr r2, [vcpu, #VCPU_KVM]
  360. ldr r2, [r2, #KVM_VGIC_VCTRL]
  361. cmp r2, #0
  362. beq 2f
  363. /* Compute the address of struct vgic_cpu */
  364. add r11, vcpu, #VCPU_VGIC_CPU
  365. /* Save all interesting registers */
  366. ldr r3, [r2, #GICH_HCR]
  367. ldr r4, [r2, #GICH_VMCR]
  368. ldr r5, [r2, #GICH_MISR]
  369. ldr r6, [r2, #GICH_EISR0]
  370. ldr r7, [r2, #GICH_EISR1]
  371. ldr r8, [r2, #GICH_ELRSR0]
  372. ldr r9, [r2, #GICH_ELRSR1]
  373. ldr r10, [r2, #GICH_APR]
  374. str r3, [r11, #VGIC_CPU_HCR]
  375. str r4, [r11, #VGIC_CPU_VMCR]
  376. str r5, [r11, #VGIC_CPU_MISR]
  377. str r6, [r11, #VGIC_CPU_EISR]
  378. str r7, [r11, #(VGIC_CPU_EISR + 4)]
  379. str r8, [r11, #VGIC_CPU_ELRSR]
  380. str r9, [r11, #(VGIC_CPU_ELRSR + 4)]
  381. str r10, [r11, #VGIC_CPU_APR]
  382. /* Clear GICH_HCR */
  383. mov r5, #0
  384. str r5, [r2, #GICH_HCR]
  385. /* Save list registers */
  386. add r2, r2, #GICH_LR0
  387. add r3, r11, #VGIC_CPU_LR
  388. ldr r4, [r11, #VGIC_CPU_NR_LR]
  389. 1: ldr r6, [r2], #4
  390. str r6, [r3], #4
  391. subs r4, r4, #1
  392. bne 1b
  393. 2:
  394. #endif
  395. .endm
  396. /*
  397. * Restore the VGIC CPU state from memory
  398. *
  399. * Assumes vcpu pointer in vcpu reg
  400. */
  401. .macro restore_vgic_state
  402. #ifdef CONFIG_KVM_ARM_VGIC
  403. /* Get VGIC VCTRL base into r2 */
  404. ldr r2, [vcpu, #VCPU_KVM]
  405. ldr r2, [r2, #KVM_VGIC_VCTRL]
  406. cmp r2, #0
  407. beq 2f
  408. /* Compute the address of struct vgic_cpu */
  409. add r11, vcpu, #VCPU_VGIC_CPU
  410. /* We only restore a minimal set of registers */
  411. ldr r3, [r11, #VGIC_CPU_HCR]
  412. ldr r4, [r11, #VGIC_CPU_VMCR]
  413. ldr r8, [r11, #VGIC_CPU_APR]
  414. str r3, [r2, #GICH_HCR]
  415. str r4, [r2, #GICH_VMCR]
  416. str r8, [r2, #GICH_APR]
  417. /* Restore list registers */
  418. add r2, r2, #GICH_LR0
  419. add r3, r11, #VGIC_CPU_LR
  420. ldr r4, [r11, #VGIC_CPU_NR_LR]
  421. 1: ldr r6, [r3], #4
  422. str r6, [r2], #4
  423. subs r4, r4, #1
  424. bne 1b
  425. 2:
  426. #endif
  427. .endm
  428. #define CNTHCTL_PL1PCTEN (1 << 0)
  429. #define CNTHCTL_PL1PCEN (1 << 1)
  430. /*
  431. * Save the timer state onto the VCPU and allow physical timer/counter access
  432. * for the host.
  433. *
  434. * Assumes vcpu pointer in vcpu reg
  435. * Clobbers r2-r5
  436. */
  437. .macro save_timer_state
  438. #ifdef CONFIG_KVM_ARM_TIMER
  439. ldr r4, [vcpu, #VCPU_KVM]
  440. ldr r2, [r4, #KVM_TIMER_ENABLED]
  441. cmp r2, #0
  442. beq 1f
  443. mrc p15, 0, r2, c14, c3, 1 @ CNTV_CTL
  444. str r2, [vcpu, #VCPU_TIMER_CNTV_CTL]
  445. bic r2, #1 @ Clear ENABLE
  446. mcr p15, 0, r2, c14, c3, 1 @ CNTV_CTL
  447. isb
  448. mrrc p15, 3, r2, r3, c14 @ CNTV_CVAL
  449. ldr r4, =VCPU_TIMER_CNTV_CVAL
  450. add r5, vcpu, r4
  451. strd r2, r3, [r5]
  452. @ Ensure host CNTVCT == CNTPCT
  453. mov r2, #0
  454. mcrr p15, 4, r2, r2, c14 @ CNTVOFF
  455. 1:
  456. #endif
  457. @ Allow physical timer/counter access for the host
  458. mrc p15, 4, r2, c14, c1, 0 @ CNTHCTL
  459. orr r2, r2, #(CNTHCTL_PL1PCEN | CNTHCTL_PL1PCTEN)
  460. mcr p15, 4, r2, c14, c1, 0 @ CNTHCTL
  461. .endm
  462. /*
  463. * Load the timer state from the VCPU and deny physical timer/counter access
  464. * for the host.
  465. *
  466. * Assumes vcpu pointer in vcpu reg
  467. * Clobbers r2-r5
  468. */
  469. .macro restore_timer_state
  470. @ Disallow physical timer access for the guest
  471. @ Physical counter access is allowed
  472. mrc p15, 4, r2, c14, c1, 0 @ CNTHCTL
  473. orr r2, r2, #CNTHCTL_PL1PCTEN
  474. bic r2, r2, #CNTHCTL_PL1PCEN
  475. mcr p15, 4, r2, c14, c1, 0 @ CNTHCTL
  476. #ifdef CONFIG_KVM_ARM_TIMER
  477. ldr r4, [vcpu, #VCPU_KVM]
  478. ldr r2, [r4, #KVM_TIMER_ENABLED]
  479. cmp r2, #0
  480. beq 1f
  481. ldr r2, [r4, #KVM_TIMER_CNTVOFF]
  482. ldr r3, [r4, #(KVM_TIMER_CNTVOFF + 4)]
  483. mcrr p15, 4, r2, r3, c14 @ CNTVOFF
  484. ldr r4, =VCPU_TIMER_CNTV_CVAL
  485. add r5, vcpu, r4
  486. ldrd r2, r3, [r5]
  487. mcrr p15, 3, r2, r3, c14 @ CNTV_CVAL
  488. isb
  489. ldr r2, [vcpu, #VCPU_TIMER_CNTV_CTL]
  490. and r2, r2, #3
  491. mcr p15, 0, r2, c14, c3, 1 @ CNTV_CTL
  492. 1:
  493. #endif
  494. .endm
  495. .equ vmentry, 0
  496. .equ vmexit, 1
  497. /* Configures the HSTR (Hyp System Trap Register) on entry/return
  498. * (hardware reset value is 0) */
  499. .macro set_hstr operation
  500. mrc p15, 4, r2, c1, c1, 3
  501. ldr r3, =HSTR_T(15)
  502. .if \operation == vmentry
  503. orr r2, r2, r3 @ Trap CR{15}
  504. .else
  505. bic r2, r2, r3 @ Don't trap any CRx accesses
  506. .endif
  507. mcr p15, 4, r2, c1, c1, 3
  508. .endm
  509. /* Configures the HCPTR (Hyp Coprocessor Trap Register) on entry/return
  510. * (hardware reset value is 0). Keep previous value in r2. */
  511. .macro set_hcptr operation, mask
  512. mrc p15, 4, r2, c1, c1, 2
  513. ldr r3, =\mask
  514. .if \operation == vmentry
  515. orr r3, r2, r3 @ Trap coproc-accesses defined in mask
  516. .else
  517. bic r3, r2, r3 @ Don't trap defined coproc-accesses
  518. .endif
  519. mcr p15, 4, r3, c1, c1, 2
  520. .endm
  521. /* Configures the HDCR (Hyp Debug Configuration Register) on entry/return
  522. * (hardware reset value is 0) */
  523. .macro set_hdcr operation
  524. mrc p15, 4, r2, c1, c1, 1
  525. ldr r3, =(HDCR_TPM|HDCR_TPMCR)
  526. .if \operation == vmentry
  527. orr r2, r2, r3 @ Trap some perfmon accesses
  528. .else
  529. bic r2, r2, r3 @ Don't trap any perfmon accesses
  530. .endif
  531. mcr p15, 4, r2, c1, c1, 1
  532. .endm
  533. /* Enable/Disable: stage-2 trans., trap interrupts, trap wfi, trap smc */
  534. .macro configure_hyp_role operation
  535. mrc p15, 4, r2, c1, c1, 0 @ HCR
  536. bic r2, r2, #HCR_VIRT_EXCP_MASK
  537. ldr r3, =HCR_GUEST_MASK
  538. .if \operation == vmentry
  539. orr r2, r2, r3
  540. ldr r3, [vcpu, #VCPU_IRQ_LINES]
  541. orr r2, r2, r3
  542. .else
  543. bic r2, r2, r3
  544. .endif
  545. mcr p15, 4, r2, c1, c1, 0
  546. .endm
  547. .macro load_vcpu
  548. mrc p15, 4, vcpu, c13, c0, 2 @ HTPIDR
  549. .endm