sun4v_tlb_miss.S 9.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351
  1. /* sun4v_tlb_miss.S: Sun4v TLB miss handlers.
  2. *
  3. * Copyright (C) 2006 <davem@davemloft.net>
  4. */
  5. .text
  6. .align 32
  7. /* Load ITLB fault information into VADDR and CTX, using BASE. */
  8. #define LOAD_ITLB_INFO(BASE, VADDR, CTX) \
  9. ldx [BASE + HV_FAULT_I_ADDR_OFFSET], VADDR; \
  10. ldx [BASE + HV_FAULT_I_CTX_OFFSET], CTX;
  11. /* Load DTLB fault information into VADDR and CTX, using BASE. */
  12. #define LOAD_DTLB_INFO(BASE, VADDR, CTX) \
  13. ldx [BASE + HV_FAULT_D_ADDR_OFFSET], VADDR; \
  14. ldx [BASE + HV_FAULT_D_CTX_OFFSET], CTX;
  15. /* DEST = (CTX << 48) | (VADDR >> 22)
  16. *
  17. * Branch to ZERO_CTX_LABEL is context is zero.
  18. */
  19. #define COMPUTE_TAG_TARGET(DEST, VADDR, CTX, TMP, ZERO_CTX_LABEL) \
  20. srlx VADDR, 22, TMP; \
  21. sllx CTX, 48, DEST; \
  22. brz,pn CTX, ZERO_CTX_LABEL; \
  23. or DEST, TMP, DEST;
  24. /* Create TSB pointer. This is something like:
  25. *
  26. * index_mask = (512 << (tsb_reg & 0x7UL)) - 1UL;
  27. * tsb_base = tsb_reg & ~0x7UL;
  28. * tsb_index = ((vaddr >> PAGE_SHIFT) & tsb_mask);
  29. * tsb_ptr = tsb_base + (tsb_index * 16);
  30. */
  31. #define COMPUTE_TSB_PTR(TSB_PTR, VADDR, TMP1, TMP2) \
  32. and TSB_PTR, 0x7, TMP1; \
  33. mov 512, TMP2; \
  34. andn TSB_PTR, 0x7, TSB_PTR; \
  35. sllx TMP2, TMP1, TMP2; \
  36. srlx VADDR, PAGE_SHIFT, TMP1; \
  37. sub TMP2, 1, TMP2; \
  38. and TMP1, TMP2, TMP1; \
  39. sllx TMP1, 4, TMP1; \
  40. add TSB_PTR, TMP1, TSB_PTR;
  41. sun4v_itlb_miss:
  42. /* Load MMU Miss base into %g2. */
  43. ldxa [%g0] ASI_SCRATCHPAD, %g2
  44. /* Load UTSB reg into %g1. */
  45. mov SCRATCHPAD_UTSBREG1, %g1
  46. ldxa [%g1] ASI_SCRATCHPAD, %g1
  47. LOAD_ITLB_INFO(%g2, %g4, %g5)
  48. COMPUTE_TAG_TARGET(%g6, %g4, %g5, %g3, kvmap_itlb_4v)
  49. COMPUTE_TSB_PTR(%g1, %g4, %g3, %g7)
  50. /* Load TSB tag/pte into %g2/%g3 and compare the tag. */
  51. ldda [%g1] ASI_QUAD_LDD_PHYS, %g2
  52. cmp %g2, %g6
  53. sethi %hi(PAGE_EXEC), %g7
  54. ldx [%g7 + %lo(PAGE_EXEC)], %g7
  55. bne,a,pn %xcc, tsb_miss_page_table_walk
  56. mov FAULT_CODE_ITLB, %g3
  57. andcc %g3, %g7, %g0
  58. be,a,pn %xcc, tsb_do_fault
  59. mov FAULT_CODE_ITLB, %g3
  60. /* We have a valid entry, make hypervisor call to load
  61. * I-TLB and return from trap.
  62. *
  63. * %g3: PTE
  64. * %g4: vaddr
  65. * %g6: TAG TARGET (only "CTX << 48" part matters)
  66. */
  67. sun4v_itlb_load:
  68. mov %o0, %g1 ! save %o0
  69. mov %o1, %g2 ! save %o1
  70. mov %o2, %g5 ! save %o2
  71. mov %o3, %g7 ! save %o3
  72. mov %g4, %o0 ! vaddr
  73. srlx %g6, 48, %o1 ! ctx
  74. mov %g3, %o2 ! PTE
  75. mov HV_MMU_IMMU, %o3 ! flags
  76. ta HV_MMU_MAP_ADDR_TRAP
  77. mov %g1, %o0 ! restore %o0
  78. mov %g2, %o1 ! restore %o1
  79. mov %g5, %o2 ! restore %o2
  80. mov %g7, %o3 ! restore %o3
  81. retry
  82. sun4v_dtlb_miss:
  83. /* Load MMU Miss base into %g2. */
  84. ldxa [%g0] ASI_SCRATCHPAD, %g2
  85. /* Load UTSB reg into %g1. */
  86. mov SCRATCHPAD_UTSBREG1, %g1
  87. ldxa [%g1] ASI_SCRATCHPAD, %g1
  88. LOAD_DTLB_INFO(%g2, %g4, %g5)
  89. COMPUTE_TAG_TARGET(%g6, %g4, %g5, %g3, kvmap_dtlb_4v)
  90. COMPUTE_TSB_PTR(%g1, %g4, %g3, %g7)
  91. /* Load TSB tag/pte into %g2/%g3 and compare the tag. */
  92. ldda [%g1] ASI_QUAD_LDD_PHYS, %g2
  93. cmp %g2, %g6
  94. bne,a,pn %xcc, tsb_miss_page_table_walk
  95. mov FAULT_CODE_ITLB, %g3
  96. /* We have a valid entry, make hypervisor call to load
  97. * D-TLB and return from trap.
  98. *
  99. * %g3: PTE
  100. * %g4: vaddr
  101. * %g6: TAG TARGET (only "CTX << 48" part matters)
  102. */
  103. sun4v_dtlb_load:
  104. mov %o0, %g1 ! save %o0
  105. mov %o1, %g2 ! save %o1
  106. mov %o2, %g5 ! save %o2
  107. mov %o3, %g7 ! save %o3
  108. mov %g4, %o0 ! vaddr
  109. srlx %g6, 48, %o1 ! ctx
  110. mov %g3, %o2 ! PTE
  111. mov HV_MMU_DMMU, %o3 ! flags
  112. ta HV_MMU_MAP_ADDR_TRAP
  113. mov %g1, %o0 ! restore %o0
  114. mov %g2, %o1 ! restore %o1
  115. mov %g5, %o2 ! restore %o2
  116. mov %g7, %o3 ! restore %o3
  117. retry
  118. sun4v_dtlb_prot:
  119. /* Load MMU Miss base into %g2. */
  120. ldxa [%g0] ASI_SCRATCHPAD, %g2
  121. ldx [%g2 + HV_FAULT_D_ADDR_OFFSET], %g5
  122. rdpr %tl, %g1
  123. cmp %g1, 1
  124. bgu,pn %xcc, winfix_trampoline
  125. nop
  126. ba,pt %xcc, sparc64_realfault_common
  127. mov FAULT_CODE_DTLB | FAULT_CODE_WRITE, %g4
  128. /* Called from trap table with TAG TARGET placed into
  129. * %g6, SCRATCHPAD_UTSBREG1 contents in %g1, and
  130. * SCRATCHPAD_MMU_MISS contents in %g2.
  131. */
  132. sun4v_itsb_miss:
  133. mov SCRATCHPAD_UTSBREG1, %g1
  134. ldxa [%g1] ASI_SCRATCHPAD, %g1
  135. brz,pn %g5, kvmap_itlb_4v
  136. mov FAULT_CODE_ITLB, %g3
  137. /* Called from trap table with TAG TARGET placed into
  138. * %g6 and SCRATCHPAD_UTSBREG1 contents in %g1.
  139. */
  140. sun4v_dtsb_miss:
  141. mov SCRATCHPAD_UTSBREG1, %g1
  142. ldxa [%g1] ASI_SCRATCHPAD, %g1
  143. brz,pn %g5, kvmap_dtlb_4v
  144. mov FAULT_CODE_DTLB, %g3
  145. /* Create TSB pointer into %g1. This is something like:
  146. *
  147. * index_mask = (512 << (tsb_reg & 0x7UL)) - 1UL;
  148. * tsb_base = tsb_reg & ~0x7UL;
  149. * tsb_index = ((vaddr >> PAGE_SHIFT) & tsb_mask);
  150. * tsb_ptr = tsb_base + (tsb_index * 16);
  151. */
  152. sun4v_tsb_miss_common:
  153. COMPUTE_TSB_PTR(%g1, %g4, %g5, %g7)
  154. /* Branch directly to page table lookup. We have SCRATCHPAD_MMU_MISS
  155. * still in %g2, so it's quite trivial to get at the PGD PHYS value
  156. * so we can preload it into %g7.
  157. */
  158. sub %g2, TRAP_PER_CPU_FAULT_INFO, %g2
  159. ba,pt %xcc, tsb_miss_page_table_walk_sun4v_fastpath
  160. ldx [%g2 + TRAP_PER_CPU_PGD_PADDR], %g7
  161. /* Instruction Access Exception, tl0. */
  162. sun4v_iacc:
  163. ldxa [%g0] ASI_SCRATCHPAD, %g2
  164. ldx [%g2 + HV_FAULT_I_TYPE_OFFSET], %g3
  165. ldx [%g2 + HV_FAULT_I_ADDR_OFFSET], %g4
  166. ldx [%g2 + HV_FAULT_I_CTX_OFFSET], %g5
  167. sllx %g3, 16, %g3
  168. or %g5, %g3, %g5
  169. ba,pt %xcc, etrap
  170. rd %pc, %g7
  171. mov %l4, %o1
  172. mov %l5, %o2
  173. call sun4v_insn_access_exception
  174. add %sp, PTREGS_OFF, %o0
  175. ba,a,pt %xcc, rtrap_clr_l6
  176. /* Instruction Access Exception, tl1. */
  177. sun4v_iacc_tl1:
  178. ldxa [%g0] ASI_SCRATCHPAD, %g2
  179. ldx [%g2 + HV_FAULT_I_TYPE_OFFSET], %g3
  180. ldx [%g2 + HV_FAULT_I_ADDR_OFFSET], %g4
  181. ldx [%g2 + HV_FAULT_I_CTX_OFFSET], %g5
  182. sllx %g3, 16, %g3
  183. or %g5, %g3, %g5
  184. ba,pt %xcc, etraptl1
  185. rd %pc, %g7
  186. mov %l4, %o1
  187. mov %l5, %o2
  188. call sun4v_insn_access_exception_tl1
  189. add %sp, PTREGS_OFF, %o0
  190. ba,a,pt %xcc, rtrap_clr_l6
  191. /* Data Access Exception, tl0. */
  192. sun4v_dacc:
  193. ldxa [%g0] ASI_SCRATCHPAD, %g2
  194. ldx [%g2 + HV_FAULT_D_TYPE_OFFSET], %g3
  195. ldx [%g2 + HV_FAULT_D_ADDR_OFFSET], %g4
  196. ldx [%g2 + HV_FAULT_D_CTX_OFFSET], %g5
  197. sllx %g3, 16, %g3
  198. or %g5, %g3, %g5
  199. ba,pt %xcc, etrap
  200. rd %pc, %g7
  201. mov %l4, %o1
  202. mov %l5, %o2
  203. call sun4v_data_access_exception
  204. add %sp, PTREGS_OFF, %o0
  205. ba,a,pt %xcc, rtrap_clr_l6
  206. /* Data Access Exception, tl1. */
  207. sun4v_dacc_tl1:
  208. ldxa [%g0] ASI_SCRATCHPAD, %g2
  209. ldx [%g2 + HV_FAULT_D_TYPE_OFFSET], %g3
  210. ldx [%g2 + HV_FAULT_D_ADDR_OFFSET], %g4
  211. ldx [%g2 + HV_FAULT_D_CTX_OFFSET], %g5
  212. sllx %g3, 16, %g3
  213. or %g5, %g3, %g5
  214. ba,pt %xcc, etraptl1
  215. rd %pc, %g7
  216. mov %l4, %o1
  217. mov %l5, %o2
  218. call sun4v_data_access_exception_tl1
  219. add %sp, PTREGS_OFF, %o0
  220. ba,a,pt %xcc, rtrap_clr_l6
  221. /* Memory Address Unaligned. */
  222. sun4v_mna:
  223. ldxa [%g0] ASI_SCRATCHPAD, %g2
  224. mov HV_FAULT_TYPE_UNALIGNED, %g3
  225. ldx [%g2 + HV_FAULT_D_ADDR_OFFSET], %g4
  226. ldx [%g2 + HV_FAULT_D_CTX_OFFSET], %g5
  227. sllx %g3, 16, %g3
  228. or %g5, %g3, %g5
  229. /* Window fixup? */
  230. rdpr %tl, %g2
  231. cmp %g2, 1
  232. bgu,pn %icc, winfix_mna
  233. rdpr %tpc, %g3
  234. ba,pt %xcc, etrap
  235. rd %pc, %g7
  236. mov %l4, %o1
  237. mov %l5, %o2
  238. call sun4v_mna
  239. add %sp, PTREGS_OFF, %o0
  240. ba,a,pt %xcc, rtrap_clr_l6
  241. /* Privileged Action. */
  242. sun4v_privact:
  243. ba,pt %xcc, etrap
  244. rd %pc, %g7
  245. call do_privact
  246. add %sp, PTREGS_OFF, %o0
  247. ba,a,pt %xcc, rtrap_clr_l6
  248. /* Unaligned ldd float, tl0. */
  249. sun4v_lddfmna:
  250. ldxa [%g0] ASI_SCRATCHPAD, %g2
  251. ldx [%g2 + HV_FAULT_D_TYPE_OFFSET], %g3
  252. ldx [%g2 + HV_FAULT_D_ADDR_OFFSET], %g4
  253. ldx [%g2 + HV_FAULT_D_CTX_OFFSET], %g5
  254. sllx %g3, 16, %g3
  255. or %g5, %g3, %g5
  256. ba,pt %xcc, etrap
  257. rd %pc, %g7
  258. mov %l4, %o1
  259. mov %l5, %o2
  260. call handle_lddfmna
  261. add %sp, PTREGS_OFF, %o0
  262. ba,a,pt %xcc, rtrap_clr_l6
  263. /* Unaligned std float, tl0. */
  264. sun4v_stdfmna:
  265. ldxa [%g0] ASI_SCRATCHPAD, %g2
  266. ldx [%g2 + HV_FAULT_D_TYPE_OFFSET], %g3
  267. ldx [%g2 + HV_FAULT_D_ADDR_OFFSET], %g4
  268. ldx [%g2 + HV_FAULT_D_CTX_OFFSET], %g5
  269. sllx %g3, 16, %g3
  270. or %g5, %g3, %g5
  271. ba,pt %xcc, etrap
  272. rd %pc, %g7
  273. mov %l4, %o1
  274. mov %l5, %o2
  275. call handle_stdfmna
  276. add %sp, PTREGS_OFF, %o0
  277. ba,a,pt %xcc, rtrap_clr_l6
  278. #define BRANCH_ALWAYS 0x10680000
  279. #define NOP 0x01000000
  280. #define SUN4V_DO_PATCH(OLD, NEW) \
  281. sethi %hi(NEW), %g1; \
  282. or %g1, %lo(NEW), %g1; \
  283. sethi %hi(OLD), %g2; \
  284. or %g2, %lo(OLD), %g2; \
  285. sub %g1, %g2, %g1; \
  286. sethi %hi(BRANCH_ALWAYS), %g3; \
  287. sll %g1, 11, %g1; \
  288. srl %g1, 11 + 2, %g1; \
  289. or %g3, %lo(BRANCH_ALWAYS), %g3; \
  290. or %g3, %g1, %g3; \
  291. stw %g3, [%g2]; \
  292. sethi %hi(NOP), %g3; \
  293. or %g3, %lo(NOP), %g3; \
  294. stw %g3, [%g2 + 0x4]; \
  295. flush %g2;
  296. .globl sun4v_patch_tlb_handlers
  297. .type sun4v_patch_tlb_handlers,#function
  298. sun4v_patch_tlb_handlers:
  299. SUN4V_DO_PATCH(tl0_iamiss, sun4v_itlb_miss)
  300. SUN4V_DO_PATCH(tl1_iamiss, sun4v_itlb_miss)
  301. SUN4V_DO_PATCH(tl0_damiss, sun4v_dtlb_miss)
  302. SUN4V_DO_PATCH(tl1_damiss, sun4v_dtlb_miss)
  303. SUN4V_DO_PATCH(tl0_daprot, sun4v_dtlb_prot)
  304. SUN4V_DO_PATCH(tl1_daprot, sun4v_dtlb_prot)
  305. SUN4V_DO_PATCH(tl0_iax, sun4v_iacc)
  306. SUN4V_DO_PATCH(tl1_iax, sun4v_iacc_tl1)
  307. SUN4V_DO_PATCH(tl0_dax, sun4v_dacc)
  308. SUN4V_DO_PATCH(tl1_dax, sun4v_dacc_tl1)
  309. SUN4V_DO_PATCH(tl0_mna, sun4v_mna)
  310. SUN4V_DO_PATCH(tl1_mna, sun4v_mna)
  311. SUN4V_DO_PATCH(tl0_lddfmna, sun4v_lddfmna)
  312. SUN4V_DO_PATCH(tl0_stdfmna, sun4v_stdfmna)
  313. SUN4V_DO_PATCH(tl0_privact, sun4v_privact)
  314. retl
  315. nop
  316. .size sun4v_patch_tlb_handlers,.-sun4v_patch_tlb_handlers