sun4v_tlb_miss.S 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440
  1. /* sun4v_tlb_miss.S: Sun4v TLB miss handlers.
  2. *
  3. * Copyright (C) 2006 <davem@davemloft.net>
  4. */
  5. .text
  6. .align 32
  7. sun4v_itlb_miss:
  8. /* Load CPU ID into %g3. */
  9. mov SCRATCHPAD_CPUID, %g1
  10. ldxa [%g1] ASI_SCRATCHPAD, %g3
  11. /* Load UTSB reg into %g1. */
  12. ldxa [%g1 + %g1] ASI_SCRATCHPAD, %g1
  13. /* Load &trap_block[smp_processor_id()] into %g2. */
  14. sethi %hi(trap_block), %g2
  15. or %g2, %lo(trap_block), %g2
  16. sllx %g3, TRAP_BLOCK_SZ_SHIFT, %g3
  17. add %g2, %g3, %g2
  18. /* Create a TAG TARGET, "(vaddr>>22) | (ctx << 48)", in %g6.
  19. * Branch if kernel TLB miss. The kernel TSB and user TSB miss
  20. * code wants the missing virtual address in %g4, so that value
  21. * cannot be modified through the entirety of this handler.
  22. */
  23. ldx [%g2 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_I_ADDR_OFFSET], %g4
  24. ldx [%g2 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_I_CTX_OFFSET], %g5
  25. srlx %g4, 22, %g3
  26. sllx %g5, 48, %g6
  27. or %g6, %g3, %g6
  28. brz,pn %g5, kvmap_itlb_4v
  29. nop
  30. /* Create TSB pointer. This is something like:
  31. *
  32. * index_mask = (512 << (tsb_reg & 0x7UL)) - 1UL;
  33. * tsb_base = tsb_reg & ~0x7UL;
  34. */
  35. and %g1, 0x7, %g3
  36. andn %g1, 0x7, %g1
  37. mov 512, %g7
  38. sllx %g7, %g3, %g7
  39. sub %g7, 1, %g7
  40. /* TSB index mask is in %g7, tsb base is in %g1. Compute
  41. * the TSB entry pointer into %g1:
  42. *
  43. * tsb_index = ((vaddr >> PAGE_SHIFT) & tsb_mask);
  44. * tsb_ptr = tsb_base + (tsb_index * 16);
  45. */
  46. srlx %g4, PAGE_SHIFT, %g3
  47. and %g3, %g7, %g3
  48. sllx %g3, 4, %g3
  49. add %g1, %g3, %g1
  50. /* Load TSB tag/pte into %g2/%g3 and compare the tag. */
  51. ldda [%g1] ASI_QUAD_LDD_PHYS, %g2
  52. cmp %g2, %g6
  53. sethi %hi(_PAGE_EXEC), %g7
  54. bne,a,pn %xcc, tsb_miss_page_table_walk
  55. mov FAULT_CODE_ITLB, %g3
  56. andcc %g3, %g7, %g0
  57. be,a,pn %xcc, tsb_do_fault
  58. mov FAULT_CODE_ITLB, %g3
  59. /* We have a valid entry, make hypervisor call to load
  60. * I-TLB and return from trap.
  61. *
  62. * %g3: PTE
  63. * %g4: vaddr
  64. * %g6: TAG TARGET (only "CTX << 48" part matters)
  65. */
  66. sun4v_itlb_load:
  67. mov %o0, %g1 ! save %o0
  68. mov %o1, %g2 ! save %o1
  69. mov %o2, %g5 ! save %o2
  70. mov %o3, %g7 ! save %o3
  71. mov %g4, %o0 ! vaddr
  72. srlx %g6, 48, %o1 ! ctx
  73. mov %g3, %o2 ! PTE
  74. mov HV_MMU_IMMU, %o3 ! flags
  75. ta HV_MMU_MAP_ADDR_TRAP
  76. mov %g1, %o0 ! restore %o0
  77. mov %g2, %o1 ! restore %o1
  78. mov %g5, %o2 ! restore %o2
  79. mov %g7, %o3 ! restore %o3
  80. retry
  81. sun4v_dtlb_miss:
  82. /* Load CPU ID into %g3. */
  83. mov SCRATCHPAD_CPUID, %g1
  84. ldxa [%g1] ASI_SCRATCHPAD, %g3
  85. /* Load UTSB reg into %g1. */
  86. ldxa [%g1 + %g1] ASI_SCRATCHPAD, %g1
  87. /* Load &trap_block[smp_processor_id()] into %g2. */
  88. sethi %hi(trap_block), %g2
  89. or %g2, %lo(trap_block), %g2
  90. sllx %g3, TRAP_BLOCK_SZ_SHIFT, %g3
  91. add %g2, %g3, %g2
  92. /* Create a TAG TARGET, "(vaddr>>22) | (ctx << 48)", in %g6.
  93. * Branch if kernel TLB miss. The kernel TSB and user TSB miss
  94. * code wants the missing virtual address in %g4, so that value
  95. * cannot be modified through the entirety of this handler.
  96. */
  97. ldx [%g2 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_D_ADDR_OFFSET], %g4
  98. ldx [%g2 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_D_CTX_OFFSET], %g5
  99. srlx %g4, 22, %g3
  100. sllx %g5, 48, %g6
  101. or %g6, %g3, %g6
  102. brz,pn %g5, kvmap_dtlb_4v
  103. nop
  104. /* Create TSB pointer. This is something like:
  105. *
  106. * index_mask = (512 << (tsb_reg & 0x7UL)) - 1UL;
  107. * tsb_base = tsb_reg & ~0x7UL;
  108. */
  109. and %g1, 0x7, %g3
  110. andn %g1, 0x7, %g1
  111. mov 512, %g7
  112. sllx %g7, %g3, %g7
  113. sub %g7, 1, %g7
  114. /* TSB index mask is in %g7, tsb base is in %g1. Compute
  115. * the TSB entry pointer into %g1:
  116. *
  117. * tsb_index = ((vaddr >> PAGE_SHIFT) & tsb_mask);
  118. * tsb_ptr = tsb_base + (tsb_index * 16);
  119. */
  120. srlx %g4, PAGE_SHIFT, %g3
  121. and %g3, %g7, %g3
  122. sllx %g3, 4, %g3
  123. add %g1, %g3, %g1
  124. /* Load TSB tag/pte into %g2/%g3 and compare the tag. */
  125. ldda [%g1] ASI_QUAD_LDD_PHYS, %g2
  126. cmp %g2, %g6
  127. bne,a,pn %xcc, tsb_miss_page_table_walk
  128. mov FAULT_CODE_ITLB, %g3
  129. /* We have a valid entry, make hypervisor call to load
  130. * D-TLB and return from trap.
  131. *
  132. * %g3: PTE
  133. * %g4: vaddr
  134. * %g6: TAG TARGET (only "CTX << 48" part matters)
  135. */
  136. sun4v_dtlb_load:
  137. mov %o0, %g1 ! save %o0
  138. mov %o1, %g2 ! save %o1
  139. mov %o2, %g5 ! save %o2
  140. mov %o3, %g7 ! save %o3
  141. mov %g4, %o0 ! vaddr
  142. srlx %g6, 48, %o1 ! ctx
  143. mov %g3, %o2 ! PTE
  144. mov HV_MMU_DMMU, %o3 ! flags
  145. ta HV_MMU_MAP_ADDR_TRAP
  146. mov %g1, %o0 ! restore %o0
  147. mov %g2, %o1 ! restore %o1
  148. mov %g5, %o2 ! restore %o2
  149. mov %g7, %o3 ! restore %o3
  150. retry
  151. sun4v_dtlb_prot:
  152. /* Load CPU ID into %g3. */
  153. mov SCRATCHPAD_CPUID, %g1
  154. ldxa [%g1] ASI_SCRATCHPAD, %g3
  155. /* Load &trap_block[smp_processor_id()] into %g2. */
  156. sethi %hi(trap_block), %g2
  157. or %g2, %lo(trap_block), %g2
  158. sllx %g3, TRAP_BLOCK_SZ_SHIFT, %g3
  159. add %g2, %g3, %g2
  160. ldx [%g2 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_D_ADDR_OFFSET], %g5
  161. rdpr %tl, %g1
  162. cmp %g1, 1
  163. bgu,pn %xcc, winfix_trampoline
  164. nop
  165. ba,pt %xcc, sparc64_realfault_common
  166. mov FAULT_CODE_DTLB | FAULT_CODE_WRITE, %g4
  167. /* Called from trap table with &trap_block[smp_processor_id()] in
  168. * %g5 and SCRATCHPAD_UTSBREG1 contents in %g1.
  169. */
  170. sun4v_itsb_miss:
  171. ldx [%g5 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_I_ADDR_OFFSET], %g4
  172. ldx [%g5 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_I_CTX_OFFSET], %g5
  173. srlx %g4, 22, %g7
  174. sllx %g5, 48, %g6
  175. or %g6, %g7, %g6
  176. brz,pn %g5, kvmap_itlb_4v
  177. nop
  178. ba,pt %xcc, sun4v_tsb_miss_common
  179. mov FAULT_CODE_ITLB, %g3
  180. /* Called from trap table with &trap_block[smp_processor_id()] in
  181. * %g5 and SCRATCHPAD_UTSBREG1 contents in %g1.
  182. */
  183. sun4v_dtsb_miss:
  184. ldx [%g5 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_D_ADDR_OFFSET], %g4
  185. ldx [%g5 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_D_CTX_OFFSET], %g5
  186. srlx %g4, 22, %g7
  187. sllx %g5, 48, %g6
  188. or %g6, %g7, %g6
  189. brz,pn %g5, kvmap_dtlb_4v
  190. nop
  191. mov FAULT_CODE_DTLB, %g3
  192. /* Create TSB pointer into %g1. This is something like:
  193. *
  194. * index_mask = (512 << (tsb_reg & 0x7UL)) - 1UL;
  195. * tsb_base = tsb_reg & ~0x7UL;
  196. * tsb_index = ((vaddr >> PAGE_SHIFT) & tsb_mask);
  197. * tsb_ptr = tsb_base + (tsb_index * 16);
  198. */
  199. sun4v_tsb_miss_common:
  200. and %g1, 0x7, %g2
  201. andn %g1, 0x7, %g1
  202. mov 512, %g7
  203. sllx %g7, %g2, %g7
  204. sub %g7, 1, %g7
  205. srlx %g4, PAGE_SHIFT, %g2
  206. and %g2, %g7, %g2
  207. sllx %g2, 4, %g2
  208. ba,pt %xcc, tsb_miss_page_table_walk
  209. add %g1, %g2, %g1
  210. /* Instruction Access Exception, tl0. */
  211. sun4v_iacc:
  212. mov SCRATCHPAD_CPUID, %g1
  213. ldxa [%g1] ASI_SCRATCHPAD, %g3
  214. sethi %hi(trap_block), %g2
  215. or %g2, %lo(trap_block), %g2
  216. sllx %g3, TRAP_BLOCK_SZ_SHIFT, %g3
  217. add %g2, %g3, %g2
  218. ldx [%g2 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_I_TYPE_OFFSET], %g3
  219. ldx [%g2 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_I_ADDR_OFFSET], %g4
  220. ldx [%g2 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_I_CTX_OFFSET], %g5
  221. sllx %g3, 16, %g3
  222. or %g5, %g3, %g5
  223. ba,pt %xcc, etrap
  224. rd %pc, %g7
  225. mov %l4, %o1
  226. mov %l5, %o2
  227. call sun4v_insn_access_exception
  228. add %sp, PTREGS_OFF, %o0
  229. ba,a,pt %xcc, rtrap_clr_l6
  230. /* Instruction Access Exception, tl1. */
  231. sun4v_iacc_tl1:
  232. mov SCRATCHPAD_CPUID, %g1
  233. ldxa [%g1] ASI_SCRATCHPAD, %g3
  234. sethi %hi(trap_block), %g2
  235. or %g2, %lo(trap_block), %g2
  236. sllx %g3, TRAP_BLOCK_SZ_SHIFT, %g3
  237. add %g2, %g3, %g2
  238. ldx [%g2 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_I_TYPE_OFFSET], %g3
  239. ldx [%g2 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_I_ADDR_OFFSET], %g4
  240. ldx [%g2 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_I_CTX_OFFSET], %g5
  241. sllx %g3, 16, %g3
  242. or %g5, %g3, %g5
  243. ba,pt %xcc, etraptl1
  244. rd %pc, %g7
  245. mov %l4, %o1
  246. mov %l5, %o2
  247. call sun4v_insn_access_exception_tl1
  248. add %sp, PTREGS_OFF, %o0
  249. ba,a,pt %xcc, rtrap_clr_l6
  250. /* Data Access Exception, tl0. */
  251. sun4v_dacc:
  252. mov SCRATCHPAD_CPUID, %g1
  253. ldxa [%g1] ASI_SCRATCHPAD, %g3
  254. sethi %hi(trap_block), %g2
  255. or %g2, %lo(trap_block), %g2
  256. sllx %g3, TRAP_BLOCK_SZ_SHIFT, %g3
  257. add %g2, %g3, %g2
  258. ldx [%g2 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_D_TYPE_OFFSET], %g3
  259. ldx [%g2 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_D_ADDR_OFFSET], %g4
  260. ldx [%g2 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_D_CTX_OFFSET], %g5
  261. sllx %g3, 16, %g3
  262. or %g5, %g3, %g5
  263. ba,pt %xcc, etrap
  264. rd %pc, %g7
  265. mov %l4, %o1
  266. mov %l5, %o2
  267. call sun4v_data_access_exception
  268. add %sp, PTREGS_OFF, %o0
  269. ba,a,pt %xcc, rtrap_clr_l6
  270. /* Data Access Exception, tl1. */
  271. sun4v_dacc_tl1:
  272. mov SCRATCHPAD_CPUID, %g1
  273. ldxa [%g1] ASI_SCRATCHPAD, %g3
  274. sethi %hi(trap_block), %g2
  275. or %g2, %lo(trap_block), %g2
  276. sllx %g3, TRAP_BLOCK_SZ_SHIFT, %g3
  277. add %g2, %g3, %g2
  278. ldx [%g2 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_D_TYPE_OFFSET], %g3
  279. ldx [%g2 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_D_ADDR_OFFSET], %g4
  280. ldx [%g2 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_D_CTX_OFFSET], %g5
  281. sllx %g3, 16, %g3
  282. or %g5, %g3, %g5
  283. ba,pt %xcc, etraptl1
  284. rd %pc, %g7
  285. mov %l4, %o1
  286. mov %l5, %o2
  287. call sun4v_data_access_exception_tl1
  288. add %sp, PTREGS_OFF, %o0
  289. ba,a,pt %xcc, rtrap_clr_l6
  290. /* Memory Address Unaligned. */
  291. sun4v_mna:
  292. mov SCRATCHPAD_CPUID, %g1
  293. ldxa [%g1] ASI_SCRATCHPAD, %g3
  294. sethi %hi(trap_block), %g2
  295. or %g2, %lo(trap_block), %g2
  296. sllx %g3, TRAP_BLOCK_SZ_SHIFT, %g3
  297. add %g2, %g3, %g2
  298. mov HV_FAULT_TYPE_UNALIGNED, %g3
  299. ldx [%g2 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_D_ADDR_OFFSET], %g4
  300. ldx [%g2 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_D_CTX_OFFSET], %g5
  301. sllx %g3, 16, %g3
  302. or %g5, %g3, %g5
  303. /* Window fixup? */
  304. rdpr %tl, %g2
  305. cmp %g2, 1
  306. bgu,pn %icc, winfix_mna
  307. rdpr %tpc, %g3
  308. ba,pt %xcc, etrap
  309. rd %pc, %g7
  310. mov %l4, %o1
  311. mov %l5, %o2
  312. call sun4v_mna
  313. add %sp, PTREGS_OFF, %o0
  314. ba,a,pt %xcc, rtrap_clr_l6
  315. /* Privileged Action. */
  316. sun4v_privact:
  317. ba,pt %xcc, etrap
  318. rd %pc, %g7
  319. call do_privact
  320. add %sp, PTREGS_OFF, %o0
  321. ba,a,pt %xcc, rtrap_clr_l6
  322. /* Unaligned ldd float, tl0. */
  323. sun4v_lddfmna:
  324. mov SCRATCHPAD_CPUID, %g1
  325. ldxa [%g1] ASI_SCRATCHPAD, %g3
  326. sethi %hi(trap_block), %g2
  327. or %g2, %lo(trap_block), %g2
  328. sllx %g3, TRAP_BLOCK_SZ_SHIFT, %g3
  329. add %g2, %g3, %g2
  330. ldx [%g2 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_D_TYPE_OFFSET], %g3
  331. ldx [%g2 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_D_ADDR_OFFSET], %g4
  332. ldx [%g2 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_D_CTX_OFFSET], %g5
  333. sllx %g3, 16, %g3
  334. or %g5, %g3, %g5
  335. ba,pt %xcc, etrap
  336. rd %pc, %g7
  337. mov %l4, %o1
  338. mov %l5, %o2
  339. call handle_lddfmna
  340. add %sp, PTREGS_OFF, %o0
  341. ba,a,pt %xcc, rtrap_clr_l6
  342. /* Unaligned std float, tl0. */
  343. sun4v_stdfmna:
  344. mov SCRATCHPAD_CPUID, %g1
  345. ldxa [%g1] ASI_SCRATCHPAD, %g3
  346. sethi %hi(trap_block), %g2
  347. or %g2, %lo(trap_block), %g2
  348. sllx %g3, TRAP_BLOCK_SZ_SHIFT, %g3
  349. add %g2, %g3, %g2
  350. ldx [%g2 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_D_TYPE_OFFSET], %g3
  351. ldx [%g2 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_D_ADDR_OFFSET], %g4
  352. ldx [%g2 + TRAP_PER_CPU_FAULT_INFO + HV_FAULT_D_CTX_OFFSET], %g5
  353. sllx %g3, 16, %g3
  354. or %g5, %g3, %g5
  355. ba,pt %xcc, etrap
  356. rd %pc, %g7
  357. mov %l4, %o1
  358. mov %l5, %o2
  359. call handle_stdfmna
  360. add %sp, PTREGS_OFF, %o0
  361. ba,a,pt %xcc, rtrap_clr_l6
  362. #define BRANCH_ALWAYS 0x10680000
  363. #define NOP 0x01000000
  364. #define SUN4V_DO_PATCH(OLD, NEW) \
  365. sethi %hi(NEW), %g1; \
  366. or %g1, %lo(NEW), %g1; \
  367. sethi %hi(OLD), %g2; \
  368. or %g2, %lo(OLD), %g2; \
  369. sub %g1, %g2, %g1; \
  370. sethi %hi(BRANCH_ALWAYS), %g3; \
  371. srl %g1, 2, %g1; \
  372. or %g3, %lo(BRANCH_ALWAYS), %g3; \
  373. or %g3, %g1, %g3; \
  374. stw %g3, [%g2]; \
  375. sethi %hi(NOP), %g3; \
  376. or %g3, %lo(NOP), %g3; \
  377. stw %g3, [%g2 + 0x4]; \
  378. flush %g2;
  379. .globl sun4v_patch_tlb_handlers
  380. .type sun4v_patch_tlb_handlers,#function
  381. sun4v_patch_tlb_handlers:
  382. SUN4V_DO_PATCH(tl0_iamiss, sun4v_itlb_miss)
  383. SUN4V_DO_PATCH(tl1_iamiss, sun4v_itlb_miss)
  384. SUN4V_DO_PATCH(tl0_damiss, sun4v_dtlb_miss)
  385. SUN4V_DO_PATCH(tl1_damiss, sun4v_dtlb_miss)
  386. SUN4V_DO_PATCH(tl0_daprot, sun4v_dtlb_prot)
  387. SUN4V_DO_PATCH(tl1_daprot, sun4v_dtlb_prot)
  388. SUN4V_DO_PATCH(tl0_iax, sun4v_iacc)
  389. SUN4V_DO_PATCH(tl1_iax, sun4v_iacc_tl1)
  390. SUN4V_DO_PATCH(tl0_dax, sun4v_dacc)
  391. SUN4V_DO_PATCH(tl1_dax, sun4v_dacc_tl1)
  392. SUN4V_DO_PATCH(tl0_mna, sun4v_mna)
  393. SUN4V_DO_PATCH(tl1_mna, sun4v_mna)
  394. SUN4V_DO_PATCH(tl0_lddfmna, sun4v_lddfmna)
  395. SUN4V_DO_PATCH(tl0_stdfmna, sun4v_stdfmna)
  396. SUN4V_DO_PATCH(tl0_privact, sun4v_privact)
  397. retl
  398. nop
  399. .size sun4v_patch_tlb_handlers,.-sun4v_patch_tlb_handlers