ktlb.S 7.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333
  1. /* arch/sparc64/kernel/ktlb.S: Kernel mapping TLB miss handling.
  2. *
  3. * Copyright (C) 1995, 1997, 2005, 2008 David S. Miller <davem@davemloft.net>
  4. * Copyright (C) 1996 Eddie C. Dost (ecd@brainaid.de)
  5. * Copyright (C) 1996 Miguel de Icaza (miguel@nuclecu.unam.mx)
  6. * Copyright (C) 1996,98,99 Jakub Jelinek (jj@sunsite.mff.cuni.cz)
  7. */
  8. #include <asm/head.h>
  9. #include <asm/asi.h>
  10. #include <asm/page.h>
  11. #include <asm/pgtable.h>
  12. #include <asm/tsb.h>
  13. .text
  14. .align 32
  15. kvmap_itlb:
  16. /* g6: TAG TARGET */
  17. mov TLB_TAG_ACCESS, %g4
  18. ldxa [%g4] ASI_IMMU, %g4
  19. /* sun4v_itlb_miss branches here with the missing virtual
  20. * address already loaded into %g4
  21. */
  22. kvmap_itlb_4v:
  23. kvmap_itlb_nonlinear:
  24. /* Catch kernel NULL pointer calls. */
  25. sethi %hi(PAGE_SIZE), %g5
  26. cmp %g4, %g5
  27. bleu,pn %xcc, kvmap_dtlb_longpath
  28. nop
  29. KERN_TSB_LOOKUP_TL1(%g4, %g6, %g5, %g1, %g2, %g3, kvmap_itlb_load)
  30. kvmap_itlb_tsb_miss:
  31. sethi %hi(LOW_OBP_ADDRESS), %g5
  32. cmp %g4, %g5
  33. blu,pn %xcc, kvmap_itlb_vmalloc_addr
  34. mov 0x1, %g5
  35. sllx %g5, 32, %g5
  36. cmp %g4, %g5
  37. blu,pn %xcc, kvmap_itlb_obp
  38. nop
  39. kvmap_itlb_vmalloc_addr:
  40. KERN_PGTABLE_WALK(%g4, %g5, %g2, kvmap_itlb_longpath)
  41. TSB_LOCK_TAG(%g1, %g2, %g7)
  42. /* Load and check PTE. */
  43. ldxa [%g5] ASI_PHYS_USE_EC, %g5
  44. mov 1, %g7
  45. sllx %g7, TSB_TAG_INVALID_BIT, %g7
  46. brgez,a,pn %g5, kvmap_itlb_longpath
  47. TSB_STORE(%g1, %g7)
  48. TSB_WRITE(%g1, %g5, %g6)
  49. /* fallthrough to TLB load */
  50. kvmap_itlb_load:
  51. 661: stxa %g5, [%g0] ASI_ITLB_DATA_IN
  52. retry
  53. .section .sun4v_2insn_patch, "ax"
  54. .word 661b
  55. nop
  56. nop
  57. .previous
  58. /* For sun4v the ASI_ITLB_DATA_IN store and the retry
  59. * instruction get nop'd out and we get here to branch
  60. * to the sun4v tlb load code. The registers are setup
  61. * as follows:
  62. *
  63. * %g4: vaddr
  64. * %g5: PTE
  65. * %g6: TAG
  66. *
  67. * The sun4v TLB load wants the PTE in %g3 so we fix that
  68. * up here.
  69. */
  70. ba,pt %xcc, sun4v_itlb_load
  71. mov %g5, %g3
  72. kvmap_itlb_longpath:
  73. 661: rdpr %pstate, %g5
  74. wrpr %g5, PSTATE_AG | PSTATE_MG, %pstate
  75. .section .sun4v_2insn_patch, "ax"
  76. .word 661b
  77. SET_GL(1)
  78. nop
  79. .previous
  80. rdpr %tpc, %g5
  81. ba,pt %xcc, sparc64_realfault_common
  82. mov FAULT_CODE_ITLB, %g4
  83. kvmap_itlb_obp:
  84. OBP_TRANS_LOOKUP(%g4, %g5, %g2, %g3, kvmap_itlb_longpath)
  85. TSB_LOCK_TAG(%g1, %g2, %g7)
  86. TSB_WRITE(%g1, %g5, %g6)
  87. ba,pt %xcc, kvmap_itlb_load
  88. nop
  89. kvmap_dtlb_obp:
  90. OBP_TRANS_LOOKUP(%g4, %g5, %g2, %g3, kvmap_dtlb_longpath)
  91. TSB_LOCK_TAG(%g1, %g2, %g7)
  92. TSB_WRITE(%g1, %g5, %g6)
  93. ba,pt %xcc, kvmap_dtlb_load
  94. nop
  95. .align 32
  96. kvmap_dtlb_tsb4m_load:
  97. TSB_LOCK_TAG(%g1, %g2, %g7)
  98. TSB_WRITE(%g1, %g5, %g6)
  99. ba,pt %xcc, kvmap_dtlb_load
  100. nop
  101. kvmap_dtlb:
  102. /* %g6: TAG TARGET */
  103. mov TLB_TAG_ACCESS, %g4
  104. ldxa [%g4] ASI_DMMU, %g4
  105. /* sun4v_dtlb_miss branches here with the missing virtual
  106. * address already loaded into %g4
  107. */
  108. kvmap_dtlb_4v:
  109. brgez,pn %g4, kvmap_dtlb_nonlinear
  110. nop
  111. #ifdef CONFIG_DEBUG_PAGEALLOC
  112. /* Index through the base page size TSB even for linear
  113. * mappings when using page allocation debugging.
  114. */
  115. KERN_TSB_LOOKUP_TL1(%g4, %g6, %g5, %g1, %g2, %g3, kvmap_dtlb_load)
  116. #else
  117. /* Correct TAG_TARGET is already in %g6, check 4mb TSB. */
  118. KERN_TSB4M_LOOKUP_TL1(%g6, %g5, %g1, %g2, %g3, kvmap_dtlb_load)
  119. #endif
  120. /* TSB entry address left in %g1, lookup linear PTE.
  121. * Must preserve %g1 and %g6 (TAG).
  122. */
  123. kvmap_dtlb_tsb4m_miss:
  124. /* Clear the PAGE_OFFSET top virtual bits, shift
  125. * down to get PFN, and make sure PFN is in range.
  126. */
  127. sllx %g4, 21, %g5
  128. /* Check to see if we know about valid memory at the 4MB
  129. * chunk this physical address will reside within.
  130. */
  131. srlx %g5, 21 + 41, %g2
  132. brnz,pn %g2, kvmap_dtlb_longpath
  133. nop
  134. /* This unconditional branch and delay-slot nop gets patched
  135. * by the sethi sequence once the bitmap is properly setup.
  136. */
  137. .globl valid_addr_bitmap_insn
  138. valid_addr_bitmap_insn:
  139. ba,pt %xcc, 2f
  140. nop
  141. .subsection 2
  142. .globl valid_addr_bitmap_patch
  143. valid_addr_bitmap_patch:
  144. sethi %hi(sparc64_valid_addr_bitmap), %g7
  145. or %g7, %lo(sparc64_valid_addr_bitmap), %g7
  146. .previous
  147. srlx %g5, 21 + 22, %g2
  148. srlx %g2, 6, %g5
  149. and %g2, 63, %g2
  150. sllx %g5, 3, %g5
  151. ldx [%g7 + %g5], %g5
  152. mov 1, %g7
  153. sllx %g7, %g2, %g7
  154. andcc %g5, %g7, %g0
  155. be,pn %xcc, kvmap_dtlb_longpath
  156. 2: sethi %hi(kpte_linear_bitmap), %g2
  157. /* Get the 256MB physical address index. */
  158. sllx %g4, 21, %g5
  159. or %g2, %lo(kpte_linear_bitmap), %g2
  160. srlx %g5, 21 + 28, %g5
  161. and %g5, (32 - 1), %g7
  162. /* Divide by 32 to get the offset into the bitmask. */
  163. srlx %g5, 5, %g5
  164. add %g7, %g7, %g7
  165. sllx %g5, 3, %g5
  166. /* kern_linear_pte_xor[(mask >> shift) & 3)] */
  167. ldx [%g2 + %g5], %g2
  168. srlx %g2, %g7, %g7
  169. sethi %hi(kern_linear_pte_xor), %g5
  170. and %g7, 3, %g7
  171. or %g5, %lo(kern_linear_pte_xor), %g5
  172. sllx %g7, 3, %g7
  173. ldx [%g5 + %g7], %g2
  174. .globl kvmap_linear_patch
  175. kvmap_linear_patch:
  176. ba,pt %xcc, kvmap_dtlb_tsb4m_load
  177. xor %g2, %g4, %g5
  178. kvmap_dtlb_vmalloc_addr:
  179. KERN_PGTABLE_WALK(%g4, %g5, %g2, kvmap_dtlb_longpath)
  180. TSB_LOCK_TAG(%g1, %g2, %g7)
  181. /* Load and check PTE. */
  182. ldxa [%g5] ASI_PHYS_USE_EC, %g5
  183. mov 1, %g7
  184. sllx %g7, TSB_TAG_INVALID_BIT, %g7
  185. brgez,a,pn %g5, kvmap_dtlb_longpath
  186. TSB_STORE(%g1, %g7)
  187. TSB_WRITE(%g1, %g5, %g6)
  188. /* fallthrough to TLB load */
  189. kvmap_dtlb_load:
  190. 661: stxa %g5, [%g0] ASI_DTLB_DATA_IN ! Reload TLB
  191. retry
  192. .section .sun4v_2insn_patch, "ax"
  193. .word 661b
  194. nop
  195. nop
  196. .previous
  197. /* For sun4v the ASI_DTLB_DATA_IN store and the retry
  198. * instruction get nop'd out and we get here to branch
  199. * to the sun4v tlb load code. The registers are setup
  200. * as follows:
  201. *
  202. * %g4: vaddr
  203. * %g5: PTE
  204. * %g6: TAG
  205. *
  206. * The sun4v TLB load wants the PTE in %g3 so we fix that
  207. * up here.
  208. */
  209. ba,pt %xcc, sun4v_dtlb_load
  210. mov %g5, %g3
  211. #ifdef CONFIG_SPARSEMEM_VMEMMAP
  212. kvmap_vmemmap:
  213. sub %g4, %g5, %g5
  214. srlx %g5, 22, %g5
  215. sethi %hi(vmemmap_table), %g1
  216. sllx %g5, 3, %g5
  217. or %g1, %lo(vmemmap_table), %g1
  218. ba,pt %xcc, kvmap_dtlb_load
  219. ldx [%g1 + %g5], %g5
  220. #endif
  221. kvmap_dtlb_nonlinear:
  222. /* Catch kernel NULL pointer derefs. */
  223. sethi %hi(PAGE_SIZE), %g5
  224. cmp %g4, %g5
  225. bleu,pn %xcc, kvmap_dtlb_longpath
  226. nop
  227. #ifdef CONFIG_SPARSEMEM_VMEMMAP
  228. /* Do not use the TSB for vmemmap. */
  229. mov (VMEMMAP_BASE >> 40), %g5
  230. sllx %g5, 40, %g5
  231. cmp %g4,%g5
  232. bgeu,pn %xcc, kvmap_vmemmap
  233. nop
  234. #endif
  235. KERN_TSB_LOOKUP_TL1(%g4, %g6, %g5, %g1, %g2, %g3, kvmap_dtlb_load)
  236. kvmap_dtlb_tsbmiss:
  237. sethi %hi(MODULES_VADDR), %g5
  238. cmp %g4, %g5
  239. blu,pn %xcc, kvmap_dtlb_longpath
  240. mov (VMALLOC_END >> 40), %g5
  241. sllx %g5, 40, %g5
  242. cmp %g4, %g5
  243. bgeu,pn %xcc, kvmap_dtlb_longpath
  244. nop
  245. kvmap_check_obp:
  246. sethi %hi(LOW_OBP_ADDRESS), %g5
  247. cmp %g4, %g5
  248. blu,pn %xcc, kvmap_dtlb_vmalloc_addr
  249. mov 0x1, %g5
  250. sllx %g5, 32, %g5
  251. cmp %g4, %g5
  252. blu,pn %xcc, kvmap_dtlb_obp
  253. nop
  254. ba,pt %xcc, kvmap_dtlb_vmalloc_addr
  255. nop
  256. kvmap_dtlb_longpath:
  257. 661: rdpr %pstate, %g5
  258. wrpr %g5, PSTATE_AG | PSTATE_MG, %pstate
  259. .section .sun4v_2insn_patch, "ax"
  260. .word 661b
  261. SET_GL(1)
  262. ldxa [%g0] ASI_SCRATCHPAD, %g5
  263. .previous
  264. rdpr %tl, %g3
  265. cmp %g3, 1
  266. 661: mov TLB_TAG_ACCESS, %g4
  267. ldxa [%g4] ASI_DMMU, %g5
  268. .section .sun4v_2insn_patch, "ax"
  269. .word 661b
  270. ldx [%g5 + HV_FAULT_D_ADDR_OFFSET], %g5
  271. nop
  272. .previous
  273. be,pt %xcc, sparc64_realfault_common
  274. mov FAULT_CODE_DTLB, %g4
  275. ba,pt %xcc, winfix_trampoline
  276. nop