sun4v_tlb_miss.S 8.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350
  1. /* sun4v_tlb_miss.S: Sun4v TLB miss handlers.
  2. *
  3. * Copyright (C) 2006 <davem@davemloft.net>
  4. */
  5. .text
  6. .align 32
  7. /* Load ITLB fault information into VADDR and CTX, using BASE. */
  8. #define LOAD_ITLB_INFO(BASE, VADDR, CTX) \
  9. ldx [BASE + HV_FAULT_I_ADDR_OFFSET], VADDR; \
  10. ldx [BASE + HV_FAULT_I_CTX_OFFSET], CTX;
  11. /* Load DTLB fault information into VADDR and CTX, using BASE. */
  12. #define LOAD_DTLB_INFO(BASE, VADDR, CTX) \
  13. ldx [BASE + HV_FAULT_D_ADDR_OFFSET], VADDR; \
  14. ldx [BASE + HV_FAULT_D_CTX_OFFSET], CTX;
  15. /* DEST = (CTX << 48) | (VADDR >> 22)
  16. *
  17. * Branch to ZERO_CTX_LABEL is context is zero.
  18. */
  19. #define COMPUTE_TAG_TARGET(DEST, VADDR, CTX, TMP, ZERO_CTX_LABEL) \
  20. srlx VADDR, 22, TMP; \
  21. sllx CTX, 48, DEST; \
  22. brz,pn CTX, ZERO_CTX_LABEL; \
  23. or DEST, TMP, DEST;
  24. /* Create TSB pointer. This is something like:
  25. *
  26. * index_mask = (512 << (tsb_reg & 0x7UL)) - 1UL;
  27. * tsb_base = tsb_reg & ~0x7UL;
  28. * tsb_index = ((vaddr >> PAGE_SHIFT) & tsb_mask);
  29. * tsb_ptr = tsb_base + (tsb_index * 16);
  30. */
  31. #define COMPUTE_TSB_PTR(TSB_PTR, VADDR, TMP1, TMP2) \
  32. and TSB_PTR, 0x7, TMP1; \
  33. mov 512, TMP2; \
  34. andn TSB_PTR, 0x7, TSB_PTR; \
  35. sllx TMP2, TMP1, TMP2; \
  36. srlx VADDR, PAGE_SHIFT, TMP1; \
  37. sub TMP2, 1, TMP2; \
  38. and TMP1, TMP2, TMP1; \
  39. sllx TMP1, 4, TMP1; \
  40. add TSB_PTR, TMP1, TSB_PTR;
  41. sun4v_itlb_miss:
  42. /* Load MMU Miss base into %g2. */
  43. ldxa [%g0] ASI_SCRATCHPAD, %g2
  44. /* Load UTSB reg into %g1. */
  45. mov SCRATCHPAD_UTSBREG1, %g1
  46. ldxa [%g1] ASI_SCRATCHPAD, %g1
  47. LOAD_ITLB_INFO(%g2, %g4, %g5)
  48. COMPUTE_TAG_TARGET(%g6, %g4, %g5, %g3, kvmap_itlb_4v)
  49. COMPUTE_TSB_PTR(%g1, %g4, %g3, %g7)
  50. /* Load TSB tag/pte into %g2/%g3 and compare the tag. */
  51. ldda [%g1] ASI_QUAD_LDD_PHYS, %g2
  52. cmp %g2, %g6
  53. sethi %hi(_PAGE_EXEC), %g7
  54. bne,a,pn %xcc, tsb_miss_page_table_walk
  55. mov FAULT_CODE_ITLB, %g3
  56. andcc %g3, %g7, %g0
  57. be,a,pn %xcc, tsb_do_fault
  58. mov FAULT_CODE_ITLB, %g3
  59. /* We have a valid entry, make hypervisor call to load
  60. * I-TLB and return from trap.
  61. *
  62. * %g3: PTE
  63. * %g4: vaddr
  64. * %g6: TAG TARGET (only "CTX << 48" part matters)
  65. */
  66. sun4v_itlb_load:
  67. mov %o0, %g1 ! save %o0
  68. mov %o1, %g2 ! save %o1
  69. mov %o2, %g5 ! save %o2
  70. mov %o3, %g7 ! save %o3
  71. mov %g4, %o0 ! vaddr
  72. srlx %g6, 48, %o1 ! ctx
  73. mov %g3, %o2 ! PTE
  74. mov HV_MMU_IMMU, %o3 ! flags
  75. ta HV_MMU_MAP_ADDR_TRAP
  76. mov %g1, %o0 ! restore %o0
  77. mov %g2, %o1 ! restore %o1
  78. mov %g5, %o2 ! restore %o2
  79. mov %g7, %o3 ! restore %o3
  80. retry
  81. sun4v_dtlb_miss:
  82. /* Load MMU Miss base into %g2. */
  83. ldxa [%g0] ASI_SCRATCHPAD, %g2
  84. /* Load UTSB reg into %g1. */
  85. mov SCRATCHPAD_UTSBREG1, %g1
  86. ldxa [%g1] ASI_SCRATCHPAD, %g1
  87. LOAD_DTLB_INFO(%g2, %g4, %g5)
  88. COMPUTE_TAG_TARGET(%g6, %g4, %g5, %g3, kvmap_dtlb_4v)
  89. COMPUTE_TSB_PTR(%g1, %g4, %g3, %g7)
  90. /* Load TSB tag/pte into %g2/%g3 and compare the tag. */
  91. ldda [%g1] ASI_QUAD_LDD_PHYS, %g2
  92. cmp %g2, %g6
  93. bne,a,pn %xcc, tsb_miss_page_table_walk
  94. mov FAULT_CODE_ITLB, %g3
  95. /* We have a valid entry, make hypervisor call to load
  96. * D-TLB and return from trap.
  97. *
  98. * %g3: PTE
  99. * %g4: vaddr
  100. * %g6: TAG TARGET (only "CTX << 48" part matters)
  101. */
  102. sun4v_dtlb_load:
  103. mov %o0, %g1 ! save %o0
  104. mov %o1, %g2 ! save %o1
  105. mov %o2, %g5 ! save %o2
  106. mov %o3, %g7 ! save %o3
  107. mov %g4, %o0 ! vaddr
  108. srlx %g6, 48, %o1 ! ctx
  109. mov %g3, %o2 ! PTE
  110. mov HV_MMU_DMMU, %o3 ! flags
  111. ta HV_MMU_MAP_ADDR_TRAP
  112. mov %g1, %o0 ! restore %o0
  113. mov %g2, %o1 ! restore %o1
  114. mov %g5, %o2 ! restore %o2
  115. mov %g7, %o3 ! restore %o3
  116. retry
  117. sun4v_dtlb_prot:
  118. /* Load MMU Miss base into %g2. */
  119. ldxa [%g0] ASI_SCRATCHPAD, %g2
  120. ldx [%g2 + HV_FAULT_D_ADDR_OFFSET], %g5
  121. rdpr %tl, %g1
  122. cmp %g1, 1
  123. bgu,pn %xcc, winfix_trampoline
  124. nop
  125. ba,pt %xcc, sparc64_realfault_common
  126. mov FAULT_CODE_DTLB | FAULT_CODE_WRITE, %g4
  127. /* Called from trap table with TAG TARGET placed into
  128. * %g6, SCRATCHPAD_UTSBREG1 contents in %g1, and
  129. * SCRATCHPAD_MMU_MISS contents in %g2.
  130. */
  131. sun4v_itsb_miss:
  132. mov SCRATCHPAD_UTSBREG1, %g1
  133. ldxa [%g1] ASI_SCRATCHPAD, %g1
  134. brz,pn %g5, kvmap_itlb_4v
  135. mov FAULT_CODE_ITLB, %g3
  136. /* Called from trap table with TAG TARGET placed into
  137. * %g6 and SCRATCHPAD_UTSBREG1 contents in %g1.
  138. */
  139. sun4v_dtsb_miss:
  140. mov SCRATCHPAD_UTSBREG1, %g1
  141. ldxa [%g1] ASI_SCRATCHPAD, %g1
  142. brz,pn %g5, kvmap_dtlb_4v
  143. mov FAULT_CODE_DTLB, %g3
  144. /* Create TSB pointer into %g1. This is something like:
  145. *
  146. * index_mask = (512 << (tsb_reg & 0x7UL)) - 1UL;
  147. * tsb_base = tsb_reg & ~0x7UL;
  148. * tsb_index = ((vaddr >> PAGE_SHIFT) & tsb_mask);
  149. * tsb_ptr = tsb_base + (tsb_index * 16);
  150. */
  151. sun4v_tsb_miss_common:
  152. COMPUTE_TSB_PTR(%g1, %g4, %g5, %g7)
  153. /* Branch directly to page table lookup. We have SCRATCHPAD_MMU_MISS
  154. * still in %g2, so it's quite trivial to get at the PGD PHYS value
  155. * so we can preload it into %g7.
  156. */
  157. sub %g2, TRAP_PER_CPU_FAULT_INFO, %g2
  158. ba,pt %xcc, tsb_miss_page_table_walk_sun4v_fastpath
  159. ldx [%g2 + TRAP_PER_CPU_PGD_PADDR], %g7
  160. /* Instruction Access Exception, tl0. */
  161. sun4v_iacc:
  162. ldxa [%g0] ASI_SCRATCHPAD, %g2
  163. ldx [%g2 + HV_FAULT_I_TYPE_OFFSET], %g3
  164. ldx [%g2 + HV_FAULT_I_ADDR_OFFSET], %g4
  165. ldx [%g2 + HV_FAULT_I_CTX_OFFSET], %g5
  166. sllx %g3, 16, %g3
  167. or %g5, %g3, %g5
  168. ba,pt %xcc, etrap
  169. rd %pc, %g7
  170. mov %l4, %o1
  171. mov %l5, %o2
  172. call sun4v_insn_access_exception
  173. add %sp, PTREGS_OFF, %o0
  174. ba,a,pt %xcc, rtrap_clr_l6
  175. /* Instruction Access Exception, tl1. */
  176. sun4v_iacc_tl1:
  177. ldxa [%g0] ASI_SCRATCHPAD, %g2
  178. ldx [%g2 + HV_FAULT_I_TYPE_OFFSET], %g3
  179. ldx [%g2 + HV_FAULT_I_ADDR_OFFSET], %g4
  180. ldx [%g2 + HV_FAULT_I_CTX_OFFSET], %g5
  181. sllx %g3, 16, %g3
  182. or %g5, %g3, %g5
  183. ba,pt %xcc, etraptl1
  184. rd %pc, %g7
  185. mov %l4, %o1
  186. mov %l5, %o2
  187. call sun4v_insn_access_exception_tl1
  188. add %sp, PTREGS_OFF, %o0
  189. ba,a,pt %xcc, rtrap_clr_l6
  190. /* Data Access Exception, tl0. */
  191. sun4v_dacc:
  192. ldxa [%g0] ASI_SCRATCHPAD, %g2
  193. ldx [%g2 + HV_FAULT_D_TYPE_OFFSET], %g3
  194. ldx [%g2 + HV_FAULT_D_ADDR_OFFSET], %g4
  195. ldx [%g2 + HV_FAULT_D_CTX_OFFSET], %g5
  196. sllx %g3, 16, %g3
  197. or %g5, %g3, %g5
  198. ba,pt %xcc, etrap
  199. rd %pc, %g7
  200. mov %l4, %o1
  201. mov %l5, %o2
  202. call sun4v_data_access_exception
  203. add %sp, PTREGS_OFF, %o0
  204. ba,a,pt %xcc, rtrap_clr_l6
  205. /* Data Access Exception, tl1. */
  206. sun4v_dacc_tl1:
  207. ldxa [%g0] ASI_SCRATCHPAD, %g2
  208. ldx [%g2 + HV_FAULT_D_TYPE_OFFSET], %g3
  209. ldx [%g2 + HV_FAULT_D_ADDR_OFFSET], %g4
  210. ldx [%g2 + HV_FAULT_D_CTX_OFFSET], %g5
  211. sllx %g3, 16, %g3
  212. or %g5, %g3, %g5
  213. ba,pt %xcc, etraptl1
  214. rd %pc, %g7
  215. mov %l4, %o1
  216. mov %l5, %o2
  217. call sun4v_data_access_exception_tl1
  218. add %sp, PTREGS_OFF, %o0
  219. ba,a,pt %xcc, rtrap_clr_l6
  220. /* Memory Address Unaligned. */
  221. sun4v_mna:
  222. ldxa [%g0] ASI_SCRATCHPAD, %g2
  223. mov HV_FAULT_TYPE_UNALIGNED, %g3
  224. ldx [%g2 + HV_FAULT_D_ADDR_OFFSET], %g4
  225. ldx [%g2 + HV_FAULT_D_CTX_OFFSET], %g5
  226. sllx %g3, 16, %g3
  227. or %g5, %g3, %g5
  228. /* Window fixup? */
  229. rdpr %tl, %g2
  230. cmp %g2, 1
  231. bgu,pn %icc, winfix_mna
  232. rdpr %tpc, %g3
  233. ba,pt %xcc, etrap
  234. rd %pc, %g7
  235. mov %l4, %o1
  236. mov %l5, %o2
  237. call sun4v_mna
  238. add %sp, PTREGS_OFF, %o0
  239. ba,a,pt %xcc, rtrap_clr_l6
  240. /* Privileged Action. */
  241. sun4v_privact:
  242. ba,pt %xcc, etrap
  243. rd %pc, %g7
  244. call do_privact
  245. add %sp, PTREGS_OFF, %o0
  246. ba,a,pt %xcc, rtrap_clr_l6
  247. /* Unaligned ldd float, tl0. */
  248. sun4v_lddfmna:
  249. ldxa [%g0] ASI_SCRATCHPAD, %g2
  250. ldx [%g2 + HV_FAULT_D_TYPE_OFFSET], %g3
  251. ldx [%g2 + HV_FAULT_D_ADDR_OFFSET], %g4
  252. ldx [%g2 + HV_FAULT_D_CTX_OFFSET], %g5
  253. sllx %g3, 16, %g3
  254. or %g5, %g3, %g5
  255. ba,pt %xcc, etrap
  256. rd %pc, %g7
  257. mov %l4, %o1
  258. mov %l5, %o2
  259. call handle_lddfmna
  260. add %sp, PTREGS_OFF, %o0
  261. ba,a,pt %xcc, rtrap_clr_l6
  262. /* Unaligned std float, tl0. */
  263. sun4v_stdfmna:
  264. ldxa [%g0] ASI_SCRATCHPAD, %g2
  265. ldx [%g2 + HV_FAULT_D_TYPE_OFFSET], %g3
  266. ldx [%g2 + HV_FAULT_D_ADDR_OFFSET], %g4
  267. ldx [%g2 + HV_FAULT_D_CTX_OFFSET], %g5
  268. sllx %g3, 16, %g3
  269. or %g5, %g3, %g5
  270. ba,pt %xcc, etrap
  271. rd %pc, %g7
  272. mov %l4, %o1
  273. mov %l5, %o2
  274. call handle_stdfmna
  275. add %sp, PTREGS_OFF, %o0
  276. ba,a,pt %xcc, rtrap_clr_l6
  277. #define BRANCH_ALWAYS 0x10680000
  278. #define NOP 0x01000000
  279. #define SUN4V_DO_PATCH(OLD, NEW) \
  280. sethi %hi(NEW), %g1; \
  281. or %g1, %lo(NEW), %g1; \
  282. sethi %hi(OLD), %g2; \
  283. or %g2, %lo(OLD), %g2; \
  284. sub %g1, %g2, %g1; \
  285. sethi %hi(BRANCH_ALWAYS), %g3; \
  286. sll %g1, 11, %g1; \
  287. srl %g1, 11 + 2, %g1; \
  288. or %g3, %lo(BRANCH_ALWAYS), %g3; \
  289. or %g3, %g1, %g3; \
  290. stw %g3, [%g2]; \
  291. sethi %hi(NOP), %g3; \
  292. or %g3, %lo(NOP), %g3; \
  293. stw %g3, [%g2 + 0x4]; \
  294. flush %g2;
  295. .globl sun4v_patch_tlb_handlers
  296. .type sun4v_patch_tlb_handlers,#function
  297. sun4v_patch_tlb_handlers:
  298. SUN4V_DO_PATCH(tl0_iamiss, sun4v_itlb_miss)
  299. SUN4V_DO_PATCH(tl1_iamiss, sun4v_itlb_miss)
  300. SUN4V_DO_PATCH(tl0_damiss, sun4v_dtlb_miss)
  301. SUN4V_DO_PATCH(tl1_damiss, sun4v_dtlb_miss)
  302. SUN4V_DO_PATCH(tl0_daprot, sun4v_dtlb_prot)
  303. SUN4V_DO_PATCH(tl1_daprot, sun4v_dtlb_prot)
  304. SUN4V_DO_PATCH(tl0_iax, sun4v_iacc)
  305. SUN4V_DO_PATCH(tl1_iax, sun4v_iacc_tl1)
  306. SUN4V_DO_PATCH(tl0_dax, sun4v_dacc)
  307. SUN4V_DO_PATCH(tl1_dax, sun4v_dacc_tl1)
  308. SUN4V_DO_PATCH(tl0_mna, sun4v_mna)
  309. SUN4V_DO_PATCH(tl1_mna, sun4v_mna)
  310. SUN4V_DO_PATCH(tl0_lddfmna, sun4v_lddfmna)
  311. SUN4V_DO_PATCH(tl0_stdfmna, sun4v_stdfmna)
  312. SUN4V_DO_PATCH(tl0_privact, sun4v_privact)
  313. retl
  314. nop
  315. .size sun4v_patch_tlb_handlers,.-sun4v_patch_tlb_handlers