sun4v_tlb_miss.S 9.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368
  1. /* sun4v_tlb_miss.S: Sun4v TLB miss handlers.
  2. *
  3. * Copyright (C) 2006 <davem@davemloft.net>
  4. */
  5. .text
  6. .align 32
  7. sun4v_itlb_miss:
  8. /* Load MMU Miss base into %g2. */
  9. ldxa [%g0] ASI_SCRATCHPAD, %g3
  10. /* Load UTSB reg into %g1. */
  11. mov SCRATCHPAD_UTSBREG1, %g1
  12. ldxa [%g1] ASI_SCRATCHPAD, %g1
  13. /* Create a TAG TARGET, "(vaddr>>22) | (ctx << 48)", in %g6.
  14. * Branch if kernel TLB miss. The kernel TSB and user TSB miss
  15. * code wants the missing virtual address in %g4, so that value
  16. * cannot be modified through the entirety of this handler.
  17. */
  18. ldx [%g2 + HV_FAULT_I_ADDR_OFFSET], %g4
  19. ldx [%g2 + HV_FAULT_I_CTX_OFFSET], %g5
  20. srlx %g4, 22, %g3
  21. sllx %g5, 48, %g6
  22. or %g6, %g3, %g6
  23. brz,pn %g5, kvmap_itlb_4v
  24. nop
  25. /* Create TSB pointer. This is something like:
  26. *
  27. * index_mask = (512 << (tsb_reg & 0x7UL)) - 1UL;
  28. * tsb_base = tsb_reg & ~0x7UL;
  29. */
  30. and %g1, 0x7, %g3
  31. andn %g1, 0x7, %g1
  32. mov 512, %g7
  33. sllx %g7, %g3, %g7
  34. sub %g7, 1, %g7
  35. /* TSB index mask is in %g7, tsb base is in %g1. Compute
  36. * the TSB entry pointer into %g1:
  37. *
  38. * tsb_index = ((vaddr >> PAGE_SHIFT) & tsb_mask);
  39. * tsb_ptr = tsb_base + (tsb_index * 16);
  40. */
  41. srlx %g4, PAGE_SHIFT, %g3
  42. and %g3, %g7, %g3
  43. sllx %g3, 4, %g3
  44. add %g1, %g3, %g1
  45. /* Load TSB tag/pte into %g2/%g3 and compare the tag. */
  46. ldda [%g1] ASI_QUAD_LDD_PHYS, %g2
  47. cmp %g2, %g6
  48. sethi %hi(_PAGE_EXEC), %g7
  49. bne,a,pn %xcc, tsb_miss_page_table_walk
  50. mov FAULT_CODE_ITLB, %g3
  51. andcc %g3, %g7, %g0
  52. be,a,pn %xcc, tsb_do_fault
  53. mov FAULT_CODE_ITLB, %g3
  54. /* We have a valid entry, make hypervisor call to load
  55. * I-TLB and return from trap.
  56. *
  57. * %g3: PTE
  58. * %g4: vaddr
  59. * %g6: TAG TARGET (only "CTX << 48" part matters)
  60. */
  61. sun4v_itlb_load:
  62. mov %o0, %g1 ! save %o0
  63. mov %o1, %g2 ! save %o1
  64. mov %o2, %g5 ! save %o2
  65. mov %o3, %g7 ! save %o3
  66. mov %g4, %o0 ! vaddr
  67. srlx %g6, 48, %o1 ! ctx
  68. mov %g3, %o2 ! PTE
  69. mov HV_MMU_IMMU, %o3 ! flags
  70. ta HV_MMU_MAP_ADDR_TRAP
  71. mov %g1, %o0 ! restore %o0
  72. mov %g2, %o1 ! restore %o1
  73. mov %g5, %o2 ! restore %o2
  74. mov %g7, %o3 ! restore %o3
  75. retry
  76. sun4v_dtlb_miss:
  77. /* Load MMU Miss base into %g2. */
  78. ldxa [%g0] ASI_SCRATCHPAD, %g2
  79. /* Load UTSB reg into %g1. */
  80. mov SCRATCHPAD_UTSBREG1, %g1
  81. ldxa [%g1 + %g1] ASI_SCRATCHPAD, %g1
  82. /* Create a TAG TARGET, "(vaddr>>22) | (ctx << 48)", in %g6.
  83. * Branch if kernel TLB miss. The kernel TSB and user TSB miss
  84. * code wants the missing virtual address in %g4, so that value
  85. * cannot be modified through the entirety of this handler.
  86. */
  87. ldx [%g2 + HV_FAULT_D_ADDR_OFFSET], %g4
  88. ldx [%g2 + HV_FAULT_D_CTX_OFFSET], %g5
  89. srlx %g4, 22, %g3
  90. sllx %g5, 48, %g6
  91. or %g6, %g3, %g6
  92. brz,pn %g5, kvmap_dtlb_4v
  93. nop
  94. /* Create TSB pointer. This is something like:
  95. *
  96. * index_mask = (512 << (tsb_reg & 0x7UL)) - 1UL;
  97. * tsb_base = tsb_reg & ~0x7UL;
  98. */
  99. and %g1, 0x7, %g3
  100. andn %g1, 0x7, %g1
  101. mov 512, %g7
  102. sllx %g7, %g3, %g7
  103. sub %g7, 1, %g7
  104. /* TSB index mask is in %g7, tsb base is in %g1. Compute
  105. * the TSB entry pointer into %g1:
  106. *
  107. * tsb_index = ((vaddr >> PAGE_SHIFT) & tsb_mask);
  108. * tsb_ptr = tsb_base + (tsb_index * 16);
  109. */
  110. srlx %g4, PAGE_SHIFT, %g3
  111. and %g3, %g7, %g3
  112. sllx %g3, 4, %g3
  113. add %g1, %g3, %g1
  114. /* Load TSB tag/pte into %g2/%g3 and compare the tag. */
  115. ldda [%g1] ASI_QUAD_LDD_PHYS, %g2
  116. cmp %g2, %g6
  117. bne,a,pn %xcc, tsb_miss_page_table_walk
  118. mov FAULT_CODE_ITLB, %g3
  119. /* We have a valid entry, make hypervisor call to load
  120. * D-TLB and return from trap.
  121. *
  122. * %g3: PTE
  123. * %g4: vaddr
  124. * %g6: TAG TARGET (only "CTX << 48" part matters)
  125. */
  126. sun4v_dtlb_load:
  127. mov %o0, %g1 ! save %o0
  128. mov %o1, %g2 ! save %o1
  129. mov %o2, %g5 ! save %o2
  130. mov %o3, %g7 ! save %o3
  131. mov %g4, %o0 ! vaddr
  132. srlx %g6, 48, %o1 ! ctx
  133. mov %g3, %o2 ! PTE
  134. mov HV_MMU_DMMU, %o3 ! flags
  135. ta HV_MMU_MAP_ADDR_TRAP
  136. mov %g1, %o0 ! restore %o0
  137. mov %g2, %o1 ! restore %o1
  138. mov %g5, %o2 ! restore %o2
  139. mov %g7, %o3 ! restore %o3
  140. retry
  141. sun4v_dtlb_prot:
  142. /* Load MMU Miss base into %g2. */
  143. ldxa [%g0] ASI_SCRATCHPAD, %g2
  144. ldx [%g2 + HV_FAULT_D_ADDR_OFFSET], %g5
  145. rdpr %tl, %g1
  146. cmp %g1, 1
  147. bgu,pn %xcc, winfix_trampoline
  148. nop
  149. ba,pt %xcc, sparc64_realfault_common
  150. mov FAULT_CODE_DTLB | FAULT_CODE_WRITE, %g4
  151. /* Called from trap table with TAG TARGET placed into
  152. * %g6 and SCRATCHPAD_UTSBREG1 contents in %g1.
  153. */
  154. sun4v_itsb_miss:
  155. ba,pt %xcc, sun4v_tsb_miss_common
  156. mov FAULT_CODE_ITLB, %g3
  157. /* Called from trap table with TAG TARGET placed into
  158. * %g6 and SCRATCHPAD_UTSBREG1 contents in %g1.
  159. */
  160. sun4v_dtsb_miss:
  161. mov FAULT_CODE_DTLB, %g3
  162. /* Create TSB pointer into %g1. This is something like:
  163. *
  164. * index_mask = (512 << (tsb_reg & 0x7UL)) - 1UL;
  165. * tsb_base = tsb_reg & ~0x7UL;
  166. * tsb_index = ((vaddr >> PAGE_SHIFT) & tsb_mask);
  167. * tsb_ptr = tsb_base + (tsb_index * 16);
  168. */
  169. sun4v_tsb_miss_common:
  170. and %g1, 0x7, %g2
  171. andn %g1, 0x7, %g1
  172. mov 512, %g7
  173. sllx %g7, %g2, %g7
  174. sub %g7, 1, %g7
  175. srlx %g4, PAGE_SHIFT, %g2
  176. and %g2, %g7, %g2
  177. sllx %g2, 4, %g2
  178. ba,pt %xcc, tsb_miss_page_table_walk
  179. add %g1, %g2, %g1
  180. /* Instruction Access Exception, tl0. */
  181. sun4v_iacc:
  182. ldxa [%g0] ASI_SCRATCHPAD, %g2
  183. ldx [%g2 + HV_FAULT_I_TYPE_OFFSET], %g3
  184. ldx [%g2 + HV_FAULT_I_ADDR_OFFSET], %g4
  185. ldx [%g2 + HV_FAULT_I_CTX_OFFSET], %g5
  186. sllx %g3, 16, %g3
  187. or %g5, %g3, %g5
  188. ba,pt %xcc, etrap
  189. rd %pc, %g7
  190. mov %l4, %o1
  191. mov %l5, %o2
  192. call sun4v_insn_access_exception
  193. add %sp, PTREGS_OFF, %o0
  194. ba,a,pt %xcc, rtrap_clr_l6
  195. /* Instruction Access Exception, tl1. */
  196. sun4v_iacc_tl1:
  197. ldxa [%g0] ASI_SCRATCHPAD, %g2
  198. ldx [%g2 + HV_FAULT_I_TYPE_OFFSET], %g3
  199. ldx [%g2 + HV_FAULT_I_ADDR_OFFSET], %g4
  200. ldx [%g2 + HV_FAULT_I_CTX_OFFSET], %g5
  201. sllx %g3, 16, %g3
  202. or %g5, %g3, %g5
  203. ba,pt %xcc, etraptl1
  204. rd %pc, %g7
  205. mov %l4, %o1
  206. mov %l5, %o2
  207. call sun4v_insn_access_exception_tl1
  208. add %sp, PTREGS_OFF, %o0
  209. ba,a,pt %xcc, rtrap_clr_l6
  210. /* Data Access Exception, tl0. */
  211. sun4v_dacc:
  212. ldxa [%g0] ASI_SCRATCHPAD, %g2
  213. ldx [%g2 + HV_FAULT_D_TYPE_OFFSET], %g3
  214. ldx [%g2 + HV_FAULT_D_ADDR_OFFSET], %g4
  215. ldx [%g2 + HV_FAULT_D_CTX_OFFSET], %g5
  216. sllx %g3, 16, %g3
  217. or %g5, %g3, %g5
  218. ba,pt %xcc, etrap
  219. rd %pc, %g7
  220. mov %l4, %o1
  221. mov %l5, %o2
  222. call sun4v_data_access_exception
  223. add %sp, PTREGS_OFF, %o0
  224. ba,a,pt %xcc, rtrap_clr_l6
  225. /* Data Access Exception, tl1. */
  226. sun4v_dacc_tl1:
  227. ldxa [%g0] ASI_SCRATCHPAD, %g2
  228. ldx [%g2 + HV_FAULT_D_TYPE_OFFSET], %g3
  229. ldx [%g2 + HV_FAULT_D_ADDR_OFFSET], %g4
  230. ldx [%g2 + HV_FAULT_D_CTX_OFFSET], %g5
  231. sllx %g3, 16, %g3
  232. or %g5, %g3, %g5
  233. ba,pt %xcc, etraptl1
  234. rd %pc, %g7
  235. mov %l4, %o1
  236. mov %l5, %o2
  237. call sun4v_data_access_exception_tl1
  238. add %sp, PTREGS_OFF, %o0
  239. ba,a,pt %xcc, rtrap_clr_l6
  240. /* Memory Address Unaligned. */
  241. sun4v_mna:
  242. ldxa [%g0] ASI_SCRATCHPAD, %g2
  243. mov HV_FAULT_TYPE_UNALIGNED, %g3
  244. ldx [%g2 + HV_FAULT_D_ADDR_OFFSET], %g4
  245. ldx [%g2 + HV_FAULT_D_CTX_OFFSET], %g5
  246. sllx %g3, 16, %g3
  247. or %g5, %g3, %g5
  248. /* Window fixup? */
  249. rdpr %tl, %g2
  250. cmp %g2, 1
  251. bgu,pn %icc, winfix_mna
  252. rdpr %tpc, %g3
  253. ba,pt %xcc, etrap
  254. rd %pc, %g7
  255. mov %l4, %o1
  256. mov %l5, %o2
  257. call sun4v_mna
  258. add %sp, PTREGS_OFF, %o0
  259. ba,a,pt %xcc, rtrap_clr_l6
  260. /* Privileged Action. */
  261. sun4v_privact:
  262. ba,pt %xcc, etrap
  263. rd %pc, %g7
  264. call do_privact
  265. add %sp, PTREGS_OFF, %o0
  266. ba,a,pt %xcc, rtrap_clr_l6
  267. /* Unaligned ldd float, tl0. */
  268. sun4v_lddfmna:
  269. ldxa [%g0] ASI_SCRATCHPAD, %g2
  270. ldx [%g2 + HV_FAULT_D_TYPE_OFFSET], %g3
  271. ldx [%g2 + HV_FAULT_D_ADDR_OFFSET], %g4
  272. ldx [%g2 + HV_FAULT_D_CTX_OFFSET], %g5
  273. sllx %g3, 16, %g3
  274. or %g5, %g3, %g5
  275. ba,pt %xcc, etrap
  276. rd %pc, %g7
  277. mov %l4, %o1
  278. mov %l5, %o2
  279. call handle_lddfmna
  280. add %sp, PTREGS_OFF, %o0
  281. ba,a,pt %xcc, rtrap_clr_l6
  282. /* Unaligned std float, tl0. */
  283. sun4v_stdfmna:
  284. ldxa [%g0] ASI_SCRATCHPAD, %g2
  285. ldx [%g2 + HV_FAULT_D_TYPE_OFFSET], %g3
  286. ldx [%g2 + HV_FAULT_D_ADDR_OFFSET], %g4
  287. ldx [%g2 + HV_FAULT_D_CTX_OFFSET], %g5
  288. sllx %g3, 16, %g3
  289. or %g5, %g3, %g5
  290. ba,pt %xcc, etrap
  291. rd %pc, %g7
  292. mov %l4, %o1
  293. mov %l5, %o2
  294. call handle_stdfmna
  295. add %sp, PTREGS_OFF, %o0
  296. ba,a,pt %xcc, rtrap_clr_l6
  297. #define BRANCH_ALWAYS 0x10680000
  298. #define NOP 0x01000000
  299. #define SUN4V_DO_PATCH(OLD, NEW) \
  300. sethi %hi(NEW), %g1; \
  301. or %g1, %lo(NEW), %g1; \
  302. sethi %hi(OLD), %g2; \
  303. or %g2, %lo(OLD), %g2; \
  304. sub %g1, %g2, %g1; \
  305. sethi %hi(BRANCH_ALWAYS), %g3; \
  306. srl %g1, 2, %g1; \
  307. or %g3, %lo(BRANCH_ALWAYS), %g3; \
  308. or %g3, %g1, %g3; \
  309. stw %g3, [%g2]; \
  310. sethi %hi(NOP), %g3; \
  311. or %g3, %lo(NOP), %g3; \
  312. stw %g3, [%g2 + 0x4]; \
  313. flush %g2;
  314. .globl sun4v_patch_tlb_handlers
  315. .type sun4v_patch_tlb_handlers,#function
  316. sun4v_patch_tlb_handlers:
  317. SUN4V_DO_PATCH(tl0_iamiss, sun4v_itlb_miss)
  318. SUN4V_DO_PATCH(tl1_iamiss, sun4v_itlb_miss)
  319. SUN4V_DO_PATCH(tl0_damiss, sun4v_dtlb_miss)
  320. SUN4V_DO_PATCH(tl1_damiss, sun4v_dtlb_miss)
  321. SUN4V_DO_PATCH(tl0_daprot, sun4v_dtlb_prot)
  322. SUN4V_DO_PATCH(tl1_daprot, sun4v_dtlb_prot)
  323. SUN4V_DO_PATCH(tl0_iax, sun4v_iacc)
  324. SUN4V_DO_PATCH(tl1_iax, sun4v_iacc_tl1)
  325. SUN4V_DO_PATCH(tl0_dax, sun4v_dacc)
  326. SUN4V_DO_PATCH(tl1_dax, sun4v_dacc_tl1)
  327. SUN4V_DO_PATCH(tl0_mna, sun4v_mna)
  328. SUN4V_DO_PATCH(tl1_mna, sun4v_mna)
  329. SUN4V_DO_PATCH(tl0_lddfmna, sun4v_lddfmna)
  330. SUN4V_DO_PATCH(tl0_stdfmna, sun4v_stdfmna)
  331. SUN4V_DO_PATCH(tl0_privact, sun4v_privact)
  332. retl
  333. nop
  334. .size sun4v_patch_tlb_handlers,.-sun4v_patch_tlb_handlers