pacache.S 29 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289
  1. /*
  2. * PARISC TLB and cache flushing support
  3. * Copyright (C) 2000-2001 Hewlett-Packard (John Marvin)
  4. * Copyright (C) 2001 Matthew Wilcox (willy at parisc-linux.org)
  5. * Copyright (C) 2002 Richard Hirst (rhirst with parisc-linux.org)
  6. *
  7. * This program is free software; you can redistribute it and/or modify
  8. * it under the terms of the GNU General Public License as published by
  9. * the Free Software Foundation; either version 2, or (at your option)
  10. * any later version.
  11. *
  12. * This program is distributed in the hope that it will be useful,
  13. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  15. * GNU General Public License for more details.
  16. *
  17. * You should have received a copy of the GNU General Public License
  18. * along with this program; if not, write to the Free Software
  19. * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
  20. */
  21. /*
  22. * NOTE: fdc,fic, and pdc instructions that use base register modification
  23. * should only use index and base registers that are not shadowed,
  24. * so that the fast path emulation in the non access miss handler
  25. * can be used.
  26. */
  27. #ifdef CONFIG_64BIT
  28. .level 2.0w
  29. #else
  30. .level 2.0
  31. #endif
  32. #include <asm/psw.h>
  33. #include <asm/assembly.h>
  34. #include <asm/pgtable.h>
  35. #include <asm/cache.h>
  36. #include <linux/linkage.h>
  37. .text
  38. .align 128
  39. ENTRY(flush_tlb_all_local)
  40. .proc
  41. .callinfo NO_CALLS
  42. .entry
  43. /*
  44. * The pitlbe and pdtlbe instructions should only be used to
  45. * flush the entire tlb. Also, there needs to be no intervening
  46. * tlb operations, e.g. tlb misses, so the operation needs
  47. * to happen in real mode with all interruptions disabled.
  48. */
  49. /* pcxt_ssm_bug - relied upon translation! PA 2.0 Arch. F-4 and F-5 */
  50. rsm PSW_SM_I, %r19 /* save I-bit state */
  51. load32 PA(1f), %r1
  52. nop
  53. nop
  54. nop
  55. nop
  56. nop
  57. rsm PSW_SM_Q, %r0 /* prep to load iia queue */
  58. mtctl %r0, %cr17 /* Clear IIASQ tail */
  59. mtctl %r0, %cr17 /* Clear IIASQ head */
  60. mtctl %r1, %cr18 /* IIAOQ head */
  61. ldo 4(%r1), %r1
  62. mtctl %r1, %cr18 /* IIAOQ tail */
  63. load32 REAL_MODE_PSW, %r1
  64. mtctl %r1, %ipsw
  65. rfi
  66. nop
  67. 1: load32 PA(cache_info), %r1
  68. /* Flush Instruction Tlb */
  69. LDREG ITLB_SID_BASE(%r1), %r20
  70. LDREG ITLB_SID_STRIDE(%r1), %r21
  71. LDREG ITLB_SID_COUNT(%r1), %r22
  72. LDREG ITLB_OFF_BASE(%r1), %arg0
  73. LDREG ITLB_OFF_STRIDE(%r1), %arg1
  74. LDREG ITLB_OFF_COUNT(%r1), %arg2
  75. LDREG ITLB_LOOP(%r1), %arg3
  76. addib,COND(=) -1, %arg3, fitoneloop /* Preadjust and test */
  77. movb,<,n %arg3, %r31, fitdone /* If loop < 0, skip */
  78. copy %arg0, %r28 /* Init base addr */
  79. fitmanyloop: /* Loop if LOOP >= 2 */
  80. mtsp %r20, %sr1
  81. add %r21, %r20, %r20 /* increment space */
  82. copy %arg2, %r29 /* Init middle loop count */
  83. fitmanymiddle: /* Loop if LOOP >= 2 */
  84. addib,COND(>) -1, %r31, fitmanymiddle /* Adjusted inner loop decr */
  85. pitlbe 0(%sr1, %r28)
  86. pitlbe,m %arg1(%sr1, %r28) /* Last pitlbe and addr adjust */
  87. addib,COND(>) -1, %r29, fitmanymiddle /* Middle loop decr */
  88. copy %arg3, %r31 /* Re-init inner loop count */
  89. movb,tr %arg0, %r28, fitmanyloop /* Re-init base addr */
  90. addib,COND(<=),n -1, %r22, fitdone /* Outer loop count decr */
  91. fitoneloop: /* Loop if LOOP = 1 */
  92. mtsp %r20, %sr1
  93. copy %arg0, %r28 /* init base addr */
  94. copy %arg2, %r29 /* init middle loop count */
  95. fitonemiddle: /* Loop if LOOP = 1 */
  96. addib,COND(>) -1, %r29, fitonemiddle /* Middle loop count decr */
  97. pitlbe,m %arg1(%sr1, %r28) /* pitlbe for one loop */
  98. addib,COND(>) -1, %r22, fitoneloop /* Outer loop count decr */
  99. add %r21, %r20, %r20 /* increment space */
  100. fitdone:
  101. /* Flush Data Tlb */
  102. LDREG DTLB_SID_BASE(%r1), %r20
  103. LDREG DTLB_SID_STRIDE(%r1), %r21
  104. LDREG DTLB_SID_COUNT(%r1), %r22
  105. LDREG DTLB_OFF_BASE(%r1), %arg0
  106. LDREG DTLB_OFF_STRIDE(%r1), %arg1
  107. LDREG DTLB_OFF_COUNT(%r1), %arg2
  108. LDREG DTLB_LOOP(%r1), %arg3
  109. addib,COND(=) -1, %arg3, fdtoneloop /* Preadjust and test */
  110. movb,<,n %arg3, %r31, fdtdone /* If loop < 0, skip */
  111. copy %arg0, %r28 /* Init base addr */
  112. fdtmanyloop: /* Loop if LOOP >= 2 */
  113. mtsp %r20, %sr1
  114. add %r21, %r20, %r20 /* increment space */
  115. copy %arg2, %r29 /* Init middle loop count */
  116. fdtmanymiddle: /* Loop if LOOP >= 2 */
  117. addib,COND(>) -1, %r31, fdtmanymiddle /* Adjusted inner loop decr */
  118. pdtlbe 0(%sr1, %r28)
  119. pdtlbe,m %arg1(%sr1, %r28) /* Last pdtlbe and addr adjust */
  120. addib,COND(>) -1, %r29, fdtmanymiddle /* Middle loop decr */
  121. copy %arg3, %r31 /* Re-init inner loop count */
  122. movb,tr %arg0, %r28, fdtmanyloop /* Re-init base addr */
  123. addib,COND(<=),n -1, %r22,fdtdone /* Outer loop count decr */
  124. fdtoneloop: /* Loop if LOOP = 1 */
  125. mtsp %r20, %sr1
  126. copy %arg0, %r28 /* init base addr */
  127. copy %arg2, %r29 /* init middle loop count */
  128. fdtonemiddle: /* Loop if LOOP = 1 */
  129. addib,COND(>) -1, %r29, fdtonemiddle /* Middle loop count decr */
  130. pdtlbe,m %arg1(%sr1, %r28) /* pdtlbe for one loop */
  131. addib,COND(>) -1, %r22, fdtoneloop /* Outer loop count decr */
  132. add %r21, %r20, %r20 /* increment space */
  133. fdtdone:
  134. /*
  135. * Switch back to virtual mode
  136. */
  137. /* pcxt_ssm_bug */
  138. rsm PSW_SM_I, %r0
  139. load32 2f, %r1
  140. nop
  141. nop
  142. nop
  143. nop
  144. nop
  145. rsm PSW_SM_Q, %r0 /* prep to load iia queue */
  146. mtctl %r0, %cr17 /* Clear IIASQ tail */
  147. mtctl %r0, %cr17 /* Clear IIASQ head */
  148. mtctl %r1, %cr18 /* IIAOQ head */
  149. ldo 4(%r1), %r1
  150. mtctl %r1, %cr18 /* IIAOQ tail */
  151. load32 KERNEL_PSW, %r1
  152. or %r1, %r19, %r1 /* I-bit to state on entry */
  153. mtctl %r1, %ipsw /* restore I-bit (entire PSW) */
  154. rfi
  155. nop
  156. 2: bv %r0(%r2)
  157. nop
  158. .exit
  159. .procend
  160. ENDPROC(flush_tlb_all_local)
  161. .import cache_info,data
  162. ENTRY(flush_instruction_cache_local)
  163. .proc
  164. .callinfo NO_CALLS
  165. .entry
  166. load32 cache_info, %r1
  167. /* Flush Instruction Cache */
  168. LDREG ICACHE_BASE(%r1), %arg0
  169. LDREG ICACHE_STRIDE(%r1), %arg1
  170. LDREG ICACHE_COUNT(%r1), %arg2
  171. LDREG ICACHE_LOOP(%r1), %arg3
  172. rsm PSW_SM_I, %r22 /* No mmgt ops during loop*/
  173. mtsp %r0, %sr1
  174. addib,COND(=) -1, %arg3, fioneloop /* Preadjust and test */
  175. movb,<,n %arg3, %r31, fisync /* If loop < 0, do sync */
  176. fimanyloop: /* Loop if LOOP >= 2 */
  177. addib,COND(>) -1, %r31, fimanyloop /* Adjusted inner loop decr */
  178. fice %r0(%sr1, %arg0)
  179. fice,m %arg1(%sr1, %arg0) /* Last fice and addr adjust */
  180. movb,tr %arg3, %r31, fimanyloop /* Re-init inner loop count */
  181. addib,COND(<=),n -1, %arg2, fisync /* Outer loop decr */
  182. fioneloop: /* Loop if LOOP = 1 */
  183. /* Some implementations may flush with a single fice instruction */
  184. cmpib,COND(>>=),n 15, %arg2, fioneloop2
  185. fioneloop1:
  186. fice,m %arg1(%sr1, %arg0)
  187. fice,m %arg1(%sr1, %arg0)
  188. fice,m %arg1(%sr1, %arg0)
  189. fice,m %arg1(%sr1, %arg0)
  190. fice,m %arg1(%sr1, %arg0)
  191. fice,m %arg1(%sr1, %arg0)
  192. fice,m %arg1(%sr1, %arg0)
  193. fice,m %arg1(%sr1, %arg0)
  194. fice,m %arg1(%sr1, %arg0)
  195. fice,m %arg1(%sr1, %arg0)
  196. fice,m %arg1(%sr1, %arg0)
  197. fice,m %arg1(%sr1, %arg0)
  198. fice,m %arg1(%sr1, %arg0)
  199. fice,m %arg1(%sr1, %arg0)
  200. fice,m %arg1(%sr1, %arg0)
  201. addib,COND(>) -16, %arg2, fioneloop1
  202. fice,m %arg1(%sr1, %arg0)
  203. /* Check if done */
  204. cmpb,COND(=),n %arg2, %r0, fisync /* Predict branch taken */
  205. fioneloop2:
  206. addib,COND(>) -1, %arg2, fioneloop2 /* Outer loop count decr */
  207. fice,m %arg1(%sr1, %arg0) /* Fice for one loop */
  208. fisync:
  209. sync
  210. mtsm %r22 /* restore I-bit */
  211. bv %r0(%r2)
  212. nop
  213. .exit
  214. .procend
  215. ENDPROC(flush_instruction_cache_local)
  216. .import cache_info, data
  217. ENTRY(flush_data_cache_local)
  218. .proc
  219. .callinfo NO_CALLS
  220. .entry
  221. load32 cache_info, %r1
  222. /* Flush Data Cache */
  223. LDREG DCACHE_BASE(%r1), %arg0
  224. LDREG DCACHE_STRIDE(%r1), %arg1
  225. LDREG DCACHE_COUNT(%r1), %arg2
  226. LDREG DCACHE_LOOP(%r1), %arg3
  227. rsm PSW_SM_I, %r22 /* No mmgt ops during loop*/
  228. mtsp %r0, %sr1
  229. addib,COND(=) -1, %arg3, fdoneloop /* Preadjust and test */
  230. movb,<,n %arg3, %r31, fdsync /* If loop < 0, do sync */
  231. fdmanyloop: /* Loop if LOOP >= 2 */
  232. addib,COND(>) -1, %r31, fdmanyloop /* Adjusted inner loop decr */
  233. fdce %r0(%sr1, %arg0)
  234. fdce,m %arg1(%sr1, %arg0) /* Last fdce and addr adjust */
  235. movb,tr %arg3, %r31, fdmanyloop /* Re-init inner loop count */
  236. addib,COND(<=),n -1, %arg2, fdsync /* Outer loop decr */
  237. fdoneloop: /* Loop if LOOP = 1 */
  238. /* Some implementations may flush with a single fdce instruction */
  239. cmpib,COND(>>=),n 15, %arg2, fdoneloop2
  240. fdoneloop1:
  241. fdce,m %arg1(%sr1, %arg0)
  242. fdce,m %arg1(%sr1, %arg0)
  243. fdce,m %arg1(%sr1, %arg0)
  244. fdce,m %arg1(%sr1, %arg0)
  245. fdce,m %arg1(%sr1, %arg0)
  246. fdce,m %arg1(%sr1, %arg0)
  247. fdce,m %arg1(%sr1, %arg0)
  248. fdce,m %arg1(%sr1, %arg0)
  249. fdce,m %arg1(%sr1, %arg0)
  250. fdce,m %arg1(%sr1, %arg0)
  251. fdce,m %arg1(%sr1, %arg0)
  252. fdce,m %arg1(%sr1, %arg0)
  253. fdce,m %arg1(%sr1, %arg0)
  254. fdce,m %arg1(%sr1, %arg0)
  255. fdce,m %arg1(%sr1, %arg0)
  256. addib,COND(>) -16, %arg2, fdoneloop1
  257. fdce,m %arg1(%sr1, %arg0)
  258. /* Check if done */
  259. cmpb,COND(=),n %arg2, %r0, fdsync /* Predict branch taken */
  260. fdoneloop2:
  261. addib,COND(>) -1, %arg2, fdoneloop2 /* Outer loop count decr */
  262. fdce,m %arg1(%sr1, %arg0) /* Fdce for one loop */
  263. fdsync:
  264. syncdma
  265. sync
  266. mtsm %r22 /* restore I-bit */
  267. bv %r0(%r2)
  268. nop
  269. .exit
  270. .procend
  271. ENDPROC(flush_data_cache_local)
  272. .align 16
  273. /* Macros to serialize TLB purge operations on SMP. */
  274. .macro tlb_lock la,flags,tmp
  275. #ifdef CONFIG_SMP
  276. ldil L%pa_tlb_lock,%r1
  277. ldo R%pa_tlb_lock(%r1),\la
  278. rsm PSW_SM_I,\flags
  279. 1: LDCW 0(\la),\tmp
  280. cmpib,<>,n 0,\tmp,3f
  281. 2: ldw 0(\la),\tmp
  282. cmpb,<> %r0,\tmp,1b
  283. nop
  284. b,n 2b
  285. 3:
  286. #endif
  287. .endm
  288. .macro tlb_unlock la,flags,tmp
  289. #ifdef CONFIG_SMP
  290. ldi 1,\tmp
  291. stw \tmp,0(\la)
  292. mtsm \flags
  293. #endif
  294. .endm
  295. /* Clear page using kernel mapping. */
  296. ENTRY(clear_page_asm)
  297. .proc
  298. .callinfo NO_CALLS
  299. .entry
  300. #ifdef CONFIG_64BIT
  301. /* Unroll the loop. */
  302. ldi (PAGE_SIZE / 128), %r1
  303. 1:
  304. std %r0, 0(%r26)
  305. std %r0, 8(%r26)
  306. std %r0, 16(%r26)
  307. std %r0, 24(%r26)
  308. std %r0, 32(%r26)
  309. std %r0, 40(%r26)
  310. std %r0, 48(%r26)
  311. std %r0, 56(%r26)
  312. std %r0, 64(%r26)
  313. std %r0, 72(%r26)
  314. std %r0, 80(%r26)
  315. std %r0, 88(%r26)
  316. std %r0, 96(%r26)
  317. std %r0, 104(%r26)
  318. std %r0, 112(%r26)
  319. std %r0, 120(%r26)
  320. /* Note reverse branch hint for addib is taken. */
  321. addib,COND(>),n -1, %r1, 1b
  322. ldo 128(%r26), %r26
  323. #else
  324. /*
  325. * Note that until (if) we start saving the full 64-bit register
  326. * values on interrupt, we can't use std on a 32 bit kernel.
  327. */
  328. ldi (PAGE_SIZE / 64), %r1
  329. 1:
  330. stw %r0, 0(%r26)
  331. stw %r0, 4(%r26)
  332. stw %r0, 8(%r26)
  333. stw %r0, 12(%r26)
  334. stw %r0, 16(%r26)
  335. stw %r0, 20(%r26)
  336. stw %r0, 24(%r26)
  337. stw %r0, 28(%r26)
  338. stw %r0, 32(%r26)
  339. stw %r0, 36(%r26)
  340. stw %r0, 40(%r26)
  341. stw %r0, 44(%r26)
  342. stw %r0, 48(%r26)
  343. stw %r0, 52(%r26)
  344. stw %r0, 56(%r26)
  345. stw %r0, 60(%r26)
  346. addib,COND(>),n -1, %r1, 1b
  347. ldo 64(%r26), %r26
  348. #endif
  349. bv %r0(%r2)
  350. nop
  351. .exit
  352. .procend
  353. ENDPROC(clear_page_asm)
  354. /* Copy page using kernel mapping. */
  355. ENTRY(copy_page_asm)
  356. .proc
  357. .callinfo NO_CALLS
  358. .entry
  359. #ifdef CONFIG_64BIT
  360. /* PA8x00 CPUs can consume 2 loads or 1 store per cycle.
  361. * Unroll the loop by hand and arrange insn appropriately.
  362. * Prefetch doesn't improve performance on rp3440.
  363. * GCC probably can do this just as well...
  364. */
  365. ldi (PAGE_SIZE / 128), %r1
  366. 1: ldd 0(%r25), %r19
  367. ldd 8(%r25), %r20
  368. ldd 16(%r25), %r21
  369. ldd 24(%r25), %r22
  370. std %r19, 0(%r26)
  371. std %r20, 8(%r26)
  372. ldd 32(%r25), %r19
  373. ldd 40(%r25), %r20
  374. std %r21, 16(%r26)
  375. std %r22, 24(%r26)
  376. ldd 48(%r25), %r21
  377. ldd 56(%r25), %r22
  378. std %r19, 32(%r26)
  379. std %r20, 40(%r26)
  380. ldd 64(%r25), %r19
  381. ldd 72(%r25), %r20
  382. std %r21, 48(%r26)
  383. std %r22, 56(%r26)
  384. ldd 80(%r25), %r21
  385. ldd 88(%r25), %r22
  386. std %r19, 64(%r26)
  387. std %r20, 72(%r26)
  388. ldd 96(%r25), %r19
  389. ldd 104(%r25), %r20
  390. std %r21, 80(%r26)
  391. std %r22, 88(%r26)
  392. ldd 112(%r25), %r21
  393. ldd 120(%r25), %r22
  394. ldo 128(%r25), %r25
  395. std %r19, 96(%r26)
  396. std %r20, 104(%r26)
  397. std %r21, 112(%r26)
  398. std %r22, 120(%r26)
  399. /* Note reverse branch hint for addib is taken. */
  400. addib,COND(>),n -1, %r1, 1b
  401. ldo 128(%r26), %r26
  402. #else
  403. /*
  404. * This loop is optimized for PCXL/PCXL2 ldw/ldw and stw/stw
  405. * bundles (very restricted rules for bundling).
  406. * Note that until (if) we start saving
  407. * the full 64 bit register values on interrupt, we can't
  408. * use ldd/std on a 32 bit kernel.
  409. */
  410. ldw 0(%r25), %r19
  411. ldi (PAGE_SIZE / 64), %r1
  412. 1:
  413. ldw 4(%r25), %r20
  414. ldw 8(%r25), %r21
  415. ldw 12(%r25), %r22
  416. stw %r19, 0(%r26)
  417. stw %r20, 4(%r26)
  418. stw %r21, 8(%r26)
  419. stw %r22, 12(%r26)
  420. ldw 16(%r25), %r19
  421. ldw 20(%r25), %r20
  422. ldw 24(%r25), %r21
  423. ldw 28(%r25), %r22
  424. stw %r19, 16(%r26)
  425. stw %r20, 20(%r26)
  426. stw %r21, 24(%r26)
  427. stw %r22, 28(%r26)
  428. ldw 32(%r25), %r19
  429. ldw 36(%r25), %r20
  430. ldw 40(%r25), %r21
  431. ldw 44(%r25), %r22
  432. stw %r19, 32(%r26)
  433. stw %r20, 36(%r26)
  434. stw %r21, 40(%r26)
  435. stw %r22, 44(%r26)
  436. ldw 48(%r25), %r19
  437. ldw 52(%r25), %r20
  438. ldw 56(%r25), %r21
  439. ldw 60(%r25), %r22
  440. stw %r19, 48(%r26)
  441. stw %r20, 52(%r26)
  442. ldo 64(%r25), %r25
  443. stw %r21, 56(%r26)
  444. stw %r22, 60(%r26)
  445. ldo 64(%r26), %r26
  446. addib,COND(>),n -1, %r1, 1b
  447. ldw 0(%r25), %r19
  448. #endif
  449. bv %r0(%r2)
  450. nop
  451. .exit
  452. .procend
  453. ENDPROC(copy_page_asm)
  454. /*
  455. * NOTE: Code in clear_user_page has a hard coded dependency on the
  456. * maximum alias boundary being 4 Mb. We've been assured by the
  457. * parisc chip designers that there will not ever be a parisc
  458. * chip with a larger alias boundary (Never say never :-) ).
  459. *
  460. * Subtle: the dtlb miss handlers support the temp alias region by
  461. * "knowing" that if a dtlb miss happens within the temp alias
  462. * region it must have occurred while in clear_user_page. Since
  463. * this routine makes use of processor local translations, we
  464. * don't want to insert them into the kernel page table. Instead,
  465. * we load up some general registers (they need to be registers
  466. * which aren't shadowed) with the physical page numbers (preshifted
  467. * for tlb insertion) needed to insert the translations. When we
  468. * miss on the translation, the dtlb miss handler inserts the
  469. * translation into the tlb using these values:
  470. *
  471. * %r26 physical page (shifted for tlb insert) of "to" translation
  472. * %r23 physical page (shifted for tlb insert) of "from" translation
  473. */
  474. /*
  475. * We can't do this since copy_user_page is used to bring in
  476. * file data that might have instructions. Since the data would
  477. * then need to be flushed out so the i-fetch can see it, it
  478. * makes more sense to just copy through the kernel translation
  479. * and flush it.
  480. *
  481. * I'm still keeping this around because it may be possible to
  482. * use it if more information is passed into copy_user_page().
  483. * Have to do some measurements to see if it is worthwhile to
  484. * lobby for such a change.
  485. *
  486. */
  487. ENTRY(copy_user_page_asm)
  488. .proc
  489. .callinfo NO_CALLS
  490. .entry
  491. /* Convert virtual `to' and `from' addresses to physical addresses.
  492. Move `from' physical address to non shadowed register. */
  493. ldil L%(__PAGE_OFFSET), %r1
  494. sub %r26, %r1, %r26
  495. sub %r25, %r1, %r23
  496. ldil L%(TMPALIAS_MAP_START), %r28
  497. /* FIXME for different page sizes != 4k */
  498. #ifdef CONFIG_64BIT
  499. #if (TMPALIAS_MAP_START >= 0x80000000)
  500. depdi 0, 31,32, %r28 /* clear any sign extension */
  501. #endif
  502. extrd,u %r26,56,32, %r26 /* convert phys addr to tlb insert format */
  503. extrd,u %r23,56,32, %r23 /* convert phys addr to tlb insert format */
  504. depd %r24,63,22, %r28 /* Form aliased virtual address 'to' */
  505. depdi 0, 63,12, %r28 /* Clear any offset bits */
  506. copy %r28, %r29
  507. depdi 1, 41,1, %r29 /* Form aliased virtual address 'from' */
  508. #else
  509. extrw,u %r26, 24,25, %r26 /* convert phys addr to tlb insert format */
  510. extrw,u %r23, 24,25, %r23 /* convert phys addr to tlb insert format */
  511. depw %r24, 31,22, %r28 /* Form aliased virtual address 'to' */
  512. depwi 0, 31,12, %r28 /* Clear any offset bits */
  513. copy %r28, %r29
  514. depwi 1, 9,1, %r29 /* Form aliased virtual address 'from' */
  515. #endif
  516. /* Purge any old translations */
  517. #ifdef CONFIG_PA20
  518. pdtlb,l 0(%r28)
  519. pdtlb,l 0(%r29)
  520. #else
  521. tlb_lock %r20,%r21,%r22
  522. pdtlb 0(%r28)
  523. pdtlb 0(%r29)
  524. tlb_unlock %r20,%r21,%r22
  525. #endif
  526. #ifdef CONFIG_64BIT
  527. /* PA8x00 CPUs can consume 2 loads or 1 store per cycle.
  528. * Unroll the loop by hand and arrange insn appropriately.
  529. * GCC probably can do this just as well.
  530. */
  531. ldd 0(%r29), %r19
  532. ldi (PAGE_SIZE / 128), %r1
  533. 1: ldd 8(%r29), %r20
  534. ldd 16(%r29), %r21
  535. ldd 24(%r29), %r22
  536. std %r19, 0(%r28)
  537. std %r20, 8(%r28)
  538. ldd 32(%r29), %r19
  539. ldd 40(%r29), %r20
  540. std %r21, 16(%r28)
  541. std %r22, 24(%r28)
  542. ldd 48(%r29), %r21
  543. ldd 56(%r29), %r22
  544. std %r19, 32(%r28)
  545. std %r20, 40(%r28)
  546. ldd 64(%r29), %r19
  547. ldd 72(%r29), %r20
  548. std %r21, 48(%r28)
  549. std %r22, 56(%r28)
  550. ldd 80(%r29), %r21
  551. ldd 88(%r29), %r22
  552. std %r19, 64(%r28)
  553. std %r20, 72(%r28)
  554. ldd 96(%r29), %r19
  555. ldd 104(%r29), %r20
  556. std %r21, 80(%r28)
  557. std %r22, 88(%r28)
  558. ldd 112(%r29), %r21
  559. ldd 120(%r29), %r22
  560. std %r19, 96(%r28)
  561. std %r20, 104(%r28)
  562. ldo 128(%r29), %r29
  563. std %r21, 112(%r28)
  564. std %r22, 120(%r28)
  565. ldo 128(%r28), %r28
  566. /* conditional branches nullify on forward taken branch, and on
  567. * non-taken backward branch. Note that .+4 is a backwards branch.
  568. * The ldd should only get executed if the branch is taken.
  569. */
  570. addib,COND(>),n -1, %r1, 1b /* bundle 10 */
  571. ldd 0(%r29), %r19 /* start next loads */
  572. #else
  573. ldi (PAGE_SIZE / 64), %r1
  574. /*
  575. * This loop is optimized for PCXL/PCXL2 ldw/ldw and stw/stw
  576. * bundles (very restricted rules for bundling). It probably
  577. * does OK on PCXU and better, but we could do better with
  578. * ldd/std instructions. Note that until (if) we start saving
  579. * the full 64 bit register values on interrupt, we can't
  580. * use ldd/std on a 32 bit kernel.
  581. */
  582. 1: ldw 0(%r29), %r19
  583. ldw 4(%r29), %r20
  584. ldw 8(%r29), %r21
  585. ldw 12(%r29), %r22
  586. stw %r19, 0(%r28)
  587. stw %r20, 4(%r28)
  588. stw %r21, 8(%r28)
  589. stw %r22, 12(%r28)
  590. ldw 16(%r29), %r19
  591. ldw 20(%r29), %r20
  592. ldw 24(%r29), %r21
  593. ldw 28(%r29), %r22
  594. stw %r19, 16(%r28)
  595. stw %r20, 20(%r28)
  596. stw %r21, 24(%r28)
  597. stw %r22, 28(%r28)
  598. ldw 32(%r29), %r19
  599. ldw 36(%r29), %r20
  600. ldw 40(%r29), %r21
  601. ldw 44(%r29), %r22
  602. stw %r19, 32(%r28)
  603. stw %r20, 36(%r28)
  604. stw %r21, 40(%r28)
  605. stw %r22, 44(%r28)
  606. ldw 48(%r29), %r19
  607. ldw 52(%r29), %r20
  608. ldw 56(%r29), %r21
  609. ldw 60(%r29), %r22
  610. stw %r19, 48(%r28)
  611. stw %r20, 52(%r28)
  612. stw %r21, 56(%r28)
  613. stw %r22, 60(%r28)
  614. ldo 64(%r28), %r28
  615. addib,COND(>) -1, %r1,1b
  616. ldo 64(%r29), %r29
  617. #endif
  618. bv %r0(%r2)
  619. nop
  620. .exit
  621. .procend
  622. ENDPROC(copy_user_page_asm)
  623. ENTRY(clear_user_page_asm)
  624. .proc
  625. .callinfo NO_CALLS
  626. .entry
  627. tophys_r1 %r26
  628. ldil L%(TMPALIAS_MAP_START), %r28
  629. #ifdef CONFIG_64BIT
  630. #if (TMPALIAS_MAP_START >= 0x80000000)
  631. depdi 0, 31,32, %r28 /* clear any sign extension */
  632. /* FIXME: page size dependend */
  633. #endif
  634. extrd,u %r26, 56,32, %r26 /* convert phys addr to tlb insert format */
  635. depd %r25, 63,22, %r28 /* Form aliased virtual address 'to' */
  636. depdi 0, 63,12, %r28 /* Clear any offset bits */
  637. #else
  638. extrw,u %r26, 24,25, %r26 /* convert phys addr to tlb insert format */
  639. depw %r25, 31,22, %r28 /* Form aliased virtual address 'to' */
  640. depwi 0, 31,12, %r28 /* Clear any offset bits */
  641. #endif
  642. /* Purge any old translation */
  643. #ifdef CONFIG_PA20
  644. pdtlb,l 0(%r28)
  645. #else
  646. tlb_lock %r20,%r21,%r22
  647. pdtlb 0(%r28)
  648. tlb_unlock %r20,%r21,%r22
  649. #endif
  650. #ifdef CONFIG_64BIT
  651. ldi (PAGE_SIZE / 128), %r1
  652. /* PREFETCH (Write) has not (yet) been proven to help here */
  653. /* #define PREFETCHW_OP ldd 256(%0), %r0 */
  654. 1: std %r0, 0(%r28)
  655. std %r0, 8(%r28)
  656. std %r0, 16(%r28)
  657. std %r0, 24(%r28)
  658. std %r0, 32(%r28)
  659. std %r0, 40(%r28)
  660. std %r0, 48(%r28)
  661. std %r0, 56(%r28)
  662. std %r0, 64(%r28)
  663. std %r0, 72(%r28)
  664. std %r0, 80(%r28)
  665. std %r0, 88(%r28)
  666. std %r0, 96(%r28)
  667. std %r0, 104(%r28)
  668. std %r0, 112(%r28)
  669. std %r0, 120(%r28)
  670. addib,COND(>) -1, %r1, 1b
  671. ldo 128(%r28), %r28
  672. #else /* ! CONFIG_64BIT */
  673. ldi (PAGE_SIZE / 64), %r1
  674. 1: stw %r0, 0(%r28)
  675. stw %r0, 4(%r28)
  676. stw %r0, 8(%r28)
  677. stw %r0, 12(%r28)
  678. stw %r0, 16(%r28)
  679. stw %r0, 20(%r28)
  680. stw %r0, 24(%r28)
  681. stw %r0, 28(%r28)
  682. stw %r0, 32(%r28)
  683. stw %r0, 36(%r28)
  684. stw %r0, 40(%r28)
  685. stw %r0, 44(%r28)
  686. stw %r0, 48(%r28)
  687. stw %r0, 52(%r28)
  688. stw %r0, 56(%r28)
  689. stw %r0, 60(%r28)
  690. addib,COND(>) -1, %r1, 1b
  691. ldo 64(%r28), %r28
  692. #endif /* CONFIG_64BIT */
  693. bv %r0(%r2)
  694. nop
  695. .exit
  696. .procend
  697. ENDPROC(clear_user_page_asm)
  698. ENTRY(flush_dcache_page_asm)
  699. .proc
  700. .callinfo NO_CALLS
  701. .entry
  702. ldil L%(TMPALIAS_MAP_START), %r28
  703. #ifdef CONFIG_64BIT
  704. #if (TMPALIAS_MAP_START >= 0x80000000)
  705. depdi 0, 31,32, %r28 /* clear any sign extension */
  706. /* FIXME: page size dependend */
  707. #endif
  708. extrd,u %r26, 56,32, %r26 /* convert phys addr to tlb insert format */
  709. depd %r25, 63,22, %r28 /* Form aliased virtual address 'to' */
  710. depdi 0, 63,12, %r28 /* Clear any offset bits */
  711. #else
  712. extrw,u %r26, 24,25, %r26 /* convert phys addr to tlb insert format */
  713. depw %r25, 31,22, %r28 /* Form aliased virtual address 'to' */
  714. depwi 0, 31,12, %r28 /* Clear any offset bits */
  715. #endif
  716. /* Purge any old translation */
  717. #ifdef CONFIG_PA20
  718. pdtlb,l 0(%r28)
  719. #else
  720. tlb_lock %r20,%r21,%r22
  721. pdtlb 0(%r28)
  722. tlb_unlock %r20,%r21,%r22
  723. #endif
  724. ldil L%dcache_stride, %r1
  725. ldw R%dcache_stride(%r1), %r1
  726. #ifdef CONFIG_64BIT
  727. depdi,z 1, 63-PAGE_SHIFT,1, %r25
  728. #else
  729. depwi,z 1, 31-PAGE_SHIFT,1, %r25
  730. #endif
  731. add %r28, %r25, %r25
  732. sub %r25, %r1, %r25
  733. 1: fdc,m %r1(%r28)
  734. fdc,m %r1(%r28)
  735. fdc,m %r1(%r28)
  736. fdc,m %r1(%r28)
  737. fdc,m %r1(%r28)
  738. fdc,m %r1(%r28)
  739. fdc,m %r1(%r28)
  740. fdc,m %r1(%r28)
  741. fdc,m %r1(%r28)
  742. fdc,m %r1(%r28)
  743. fdc,m %r1(%r28)
  744. fdc,m %r1(%r28)
  745. fdc,m %r1(%r28)
  746. fdc,m %r1(%r28)
  747. fdc,m %r1(%r28)
  748. cmpb,COND(<<) %r28, %r25,1b
  749. fdc,m %r1(%r28)
  750. sync
  751. #ifdef CONFIG_PA20
  752. pdtlb,l 0(%r25)
  753. #else
  754. tlb_lock %r20,%r21,%r22
  755. pdtlb 0(%r25)
  756. tlb_unlock %r20,%r21,%r22
  757. #endif
  758. bv %r0(%r2)
  759. nop
  760. .exit
  761. .procend
  762. ENDPROC(flush_dcache_page_asm)
  763. ENTRY(flush_icache_page_asm)
  764. .proc
  765. .callinfo NO_CALLS
  766. .entry
  767. ldil L%(TMPALIAS_MAP_START), %r28
  768. #ifdef CONFIG_64BIT
  769. #if (TMPALIAS_MAP_START >= 0x80000000)
  770. depdi 0, 31,32, %r28 /* clear any sign extension */
  771. /* FIXME: page size dependend */
  772. #endif
  773. extrd,u %r26, 56,32, %r26 /* convert phys addr to tlb insert format */
  774. depd %r25, 63,22, %r28 /* Form aliased virtual address 'to' */
  775. depdi 0, 63,12, %r28 /* Clear any offset bits */
  776. #else
  777. extrw,u %r26, 24,25, %r26 /* convert phys addr to tlb insert format */
  778. depw %r25, 31,22, %r28 /* Form aliased virtual address 'to' */
  779. depwi 0, 31,12, %r28 /* Clear any offset bits */
  780. #endif
  781. /* Purge any old translation */
  782. #ifdef CONFIG_PA20
  783. pitlb,l %r0(%sr4,%r28)
  784. #else
  785. tlb_lock %r20,%r21,%r22
  786. pitlb (%sr4,%r28)
  787. tlb_unlock %r20,%r21,%r22
  788. #endif
  789. ldil L%icache_stride, %r1
  790. ldw R%icache_stride(%r1), %r1
  791. #ifdef CONFIG_64BIT
  792. depdi,z 1, 63-PAGE_SHIFT,1, %r25
  793. #else
  794. depwi,z 1, 31-PAGE_SHIFT,1, %r25
  795. #endif
  796. add %r28, %r25, %r25
  797. sub %r25, %r1, %r25
  798. /* fic only has the type 26 form on PA1.1, requiring an
  799. * explicit space specification, so use %sr4 */
  800. 1: fic,m %r1(%sr4,%r28)
  801. fic,m %r1(%sr4,%r28)
  802. fic,m %r1(%sr4,%r28)
  803. fic,m %r1(%sr4,%r28)
  804. fic,m %r1(%sr4,%r28)
  805. fic,m %r1(%sr4,%r28)
  806. fic,m %r1(%sr4,%r28)
  807. fic,m %r1(%sr4,%r28)
  808. fic,m %r1(%sr4,%r28)
  809. fic,m %r1(%sr4,%r28)
  810. fic,m %r1(%sr4,%r28)
  811. fic,m %r1(%sr4,%r28)
  812. fic,m %r1(%sr4,%r28)
  813. fic,m %r1(%sr4,%r28)
  814. fic,m %r1(%sr4,%r28)
  815. cmpb,COND(<<) %r28, %r25,1b
  816. fic,m %r1(%sr4,%r28)
  817. sync
  818. #ifdef CONFIG_PA20
  819. pitlb,l %r0(%sr4,%r25)
  820. #else
  821. tlb_lock %r20,%r21,%r22
  822. pitlb (%sr4,%r25)
  823. tlb_unlock %r20,%r21,%r22
  824. #endif
  825. bv %r0(%r2)
  826. nop
  827. .exit
  828. .procend
  829. ENDPROC(flush_icache_page_asm)
  830. ENTRY(flush_kernel_dcache_page_asm)
  831. .proc
  832. .callinfo NO_CALLS
  833. .entry
  834. ldil L%dcache_stride, %r1
  835. ldw R%dcache_stride(%r1), %r23
  836. #ifdef CONFIG_64BIT
  837. depdi,z 1, 63-PAGE_SHIFT,1, %r25
  838. #else
  839. depwi,z 1, 31-PAGE_SHIFT,1, %r25
  840. #endif
  841. add %r26, %r25, %r25
  842. sub %r25, %r23, %r25
  843. 1: fdc,m %r23(%r26)
  844. fdc,m %r23(%r26)
  845. fdc,m %r23(%r26)
  846. fdc,m %r23(%r26)
  847. fdc,m %r23(%r26)
  848. fdc,m %r23(%r26)
  849. fdc,m %r23(%r26)
  850. fdc,m %r23(%r26)
  851. fdc,m %r23(%r26)
  852. fdc,m %r23(%r26)
  853. fdc,m %r23(%r26)
  854. fdc,m %r23(%r26)
  855. fdc,m %r23(%r26)
  856. fdc,m %r23(%r26)
  857. fdc,m %r23(%r26)
  858. cmpb,COND(<<) %r26, %r25,1b
  859. fdc,m %r23(%r26)
  860. sync
  861. bv %r0(%r2)
  862. nop
  863. .exit
  864. .procend
  865. ENDPROC(flush_kernel_dcache_page_asm)
  866. ENTRY(purge_kernel_dcache_page_asm)
  867. .proc
  868. .callinfo NO_CALLS
  869. .entry
  870. ldil L%dcache_stride, %r1
  871. ldw R%dcache_stride(%r1), %r23
  872. #ifdef CONFIG_64BIT
  873. depdi,z 1, 63-PAGE_SHIFT,1, %r25
  874. #else
  875. depwi,z 1, 31-PAGE_SHIFT,1, %r25
  876. #endif
  877. add %r26, %r25, %r25
  878. sub %r25, %r23, %r25
  879. 1: pdc,m %r23(%r26)
  880. pdc,m %r23(%r26)
  881. pdc,m %r23(%r26)
  882. pdc,m %r23(%r26)
  883. pdc,m %r23(%r26)
  884. pdc,m %r23(%r26)
  885. pdc,m %r23(%r26)
  886. pdc,m %r23(%r26)
  887. pdc,m %r23(%r26)
  888. pdc,m %r23(%r26)
  889. pdc,m %r23(%r26)
  890. pdc,m %r23(%r26)
  891. pdc,m %r23(%r26)
  892. pdc,m %r23(%r26)
  893. pdc,m %r23(%r26)
  894. cmpb,COND(<<) %r26, %r25, 1b
  895. pdc,m %r23(%r26)
  896. sync
  897. bv %r0(%r2)
  898. nop
  899. .exit
  900. .procend
  901. ENDPROC(purge_kernel_dcache_page_asm)
  902. ENTRY(flush_user_dcache_range_asm)
  903. .proc
  904. .callinfo NO_CALLS
  905. .entry
  906. ldil L%dcache_stride, %r1
  907. ldw R%dcache_stride(%r1), %r23
  908. ldo -1(%r23), %r21
  909. ANDCM %r26, %r21, %r26
  910. 1: cmpb,COND(<<),n %r26, %r25, 1b
  911. fdc,m %r23(%sr3, %r26)
  912. sync
  913. bv %r0(%r2)
  914. nop
  915. .exit
  916. .procend
  917. ENDPROC(flush_user_dcache_range_asm)
  918. ENTRY(flush_kernel_dcache_range_asm)
  919. .proc
  920. .callinfo NO_CALLS
  921. .entry
  922. ldil L%dcache_stride, %r1
  923. ldw R%dcache_stride(%r1), %r23
  924. ldo -1(%r23), %r21
  925. ANDCM %r26, %r21, %r26
  926. 1: cmpb,COND(<<),n %r26, %r25,1b
  927. fdc,m %r23(%r26)
  928. sync
  929. syncdma
  930. bv %r0(%r2)
  931. nop
  932. .exit
  933. .procend
  934. ENDPROC(flush_kernel_dcache_range_asm)
  935. ENTRY(flush_user_icache_range_asm)
  936. .proc
  937. .callinfo NO_CALLS
  938. .entry
  939. ldil L%icache_stride, %r1
  940. ldw R%icache_stride(%r1), %r23
  941. ldo -1(%r23), %r21
  942. ANDCM %r26, %r21, %r26
  943. 1: cmpb,COND(<<),n %r26, %r25,1b
  944. fic,m %r23(%sr3, %r26)
  945. sync
  946. bv %r0(%r2)
  947. nop
  948. .exit
  949. .procend
  950. ENDPROC(flush_user_icache_range_asm)
  951. ENTRY(flush_kernel_icache_page)
  952. .proc
  953. .callinfo NO_CALLS
  954. .entry
  955. ldil L%icache_stride, %r1
  956. ldw R%icache_stride(%r1), %r23
  957. #ifdef CONFIG_64BIT
  958. depdi,z 1, 63-PAGE_SHIFT,1, %r25
  959. #else
  960. depwi,z 1, 31-PAGE_SHIFT,1, %r25
  961. #endif
  962. add %r26, %r25, %r25
  963. sub %r25, %r23, %r25
  964. 1: fic,m %r23(%sr4, %r26)
  965. fic,m %r23(%sr4, %r26)
  966. fic,m %r23(%sr4, %r26)
  967. fic,m %r23(%sr4, %r26)
  968. fic,m %r23(%sr4, %r26)
  969. fic,m %r23(%sr4, %r26)
  970. fic,m %r23(%sr4, %r26)
  971. fic,m %r23(%sr4, %r26)
  972. fic,m %r23(%sr4, %r26)
  973. fic,m %r23(%sr4, %r26)
  974. fic,m %r23(%sr4, %r26)
  975. fic,m %r23(%sr4, %r26)
  976. fic,m %r23(%sr4, %r26)
  977. fic,m %r23(%sr4, %r26)
  978. fic,m %r23(%sr4, %r26)
  979. cmpb,COND(<<) %r26, %r25, 1b
  980. fic,m %r23(%sr4, %r26)
  981. sync
  982. bv %r0(%r2)
  983. nop
  984. .exit
  985. .procend
  986. ENDPROC(flush_kernel_icache_page)
  987. ENTRY(flush_kernel_icache_range_asm)
  988. .proc
  989. .callinfo NO_CALLS
  990. .entry
  991. ldil L%icache_stride, %r1
  992. ldw R%icache_stride(%r1), %r23
  993. ldo -1(%r23), %r21
  994. ANDCM %r26, %r21, %r26
  995. 1: cmpb,COND(<<),n %r26, %r25, 1b
  996. fic,m %r23(%sr4, %r26)
  997. sync
  998. bv %r0(%r2)
  999. nop
  1000. .exit
  1001. .procend
  1002. ENDPROC(flush_kernel_icache_range_asm)
  1003. /* align should cover use of rfi in disable_sr_hashing_asm and
  1004. * srdis_done.
  1005. */
  1006. .align 256
  1007. ENTRY(disable_sr_hashing_asm)
  1008. .proc
  1009. .callinfo NO_CALLS
  1010. .entry
  1011. /*
  1012. * Switch to real mode
  1013. */
  1014. /* pcxt_ssm_bug */
  1015. rsm PSW_SM_I, %r0
  1016. load32 PA(1f), %r1
  1017. nop
  1018. nop
  1019. nop
  1020. nop
  1021. nop
  1022. rsm PSW_SM_Q, %r0 /* prep to load iia queue */
  1023. mtctl %r0, %cr17 /* Clear IIASQ tail */
  1024. mtctl %r0, %cr17 /* Clear IIASQ head */
  1025. mtctl %r1, %cr18 /* IIAOQ head */
  1026. ldo 4(%r1), %r1
  1027. mtctl %r1, %cr18 /* IIAOQ tail */
  1028. load32 REAL_MODE_PSW, %r1
  1029. mtctl %r1, %ipsw
  1030. rfi
  1031. nop
  1032. 1: cmpib,=,n SRHASH_PCXST, %r26,srdis_pcxs
  1033. cmpib,=,n SRHASH_PCXL, %r26,srdis_pcxl
  1034. cmpib,=,n SRHASH_PA20, %r26,srdis_pa20
  1035. b,n srdis_done
  1036. srdis_pcxs:
  1037. /* Disable Space Register Hashing for PCXS,PCXT,PCXT' */
  1038. .word 0x141c1a00 /* mfdiag %dr0, %r28 */
  1039. .word 0x141c1a00 /* must issue twice */
  1040. depwi 0,18,1, %r28 /* Clear DHE (dcache hash enable) */
  1041. depwi 0,20,1, %r28 /* Clear IHE (icache hash enable) */
  1042. .word 0x141c1600 /* mtdiag %r28, %dr0 */
  1043. .word 0x141c1600 /* must issue twice */
  1044. b,n srdis_done
  1045. srdis_pcxl:
  1046. /* Disable Space Register Hashing for PCXL */
  1047. .word 0x141c0600 /* mfdiag %dr0, %r28 */
  1048. depwi 0,28,2, %r28 /* Clear DHASH_EN & IHASH_EN */
  1049. .word 0x141c0240 /* mtdiag %r28, %dr0 */
  1050. b,n srdis_done
  1051. srdis_pa20:
  1052. /* Disable Space Register Hashing for PCXU,PCXU+,PCXW,PCXW+,PCXW2 */
  1053. .word 0x144008bc /* mfdiag %dr2, %r28 */
  1054. depdi 0, 54,1, %r28 /* clear DIAG_SPHASH_ENAB (bit 54) */
  1055. .word 0x145c1840 /* mtdiag %r28, %dr2 */
  1056. srdis_done:
  1057. /* Switch back to virtual mode */
  1058. rsm PSW_SM_I, %r0 /* prep to load iia queue */
  1059. load32 2f, %r1
  1060. nop
  1061. nop
  1062. nop
  1063. nop
  1064. nop
  1065. rsm PSW_SM_Q, %r0 /* prep to load iia queue */
  1066. mtctl %r0, %cr17 /* Clear IIASQ tail */
  1067. mtctl %r0, %cr17 /* Clear IIASQ head */
  1068. mtctl %r1, %cr18 /* IIAOQ head */
  1069. ldo 4(%r1), %r1
  1070. mtctl %r1, %cr18 /* IIAOQ tail */
  1071. load32 KERNEL_PSW, %r1
  1072. mtctl %r1, %ipsw
  1073. rfi
  1074. nop
  1075. 2: bv %r0(%r2)
  1076. nop
  1077. .exit
  1078. .procend
  1079. ENDPROC(disable_sr_hashing_asm)
  1080. .end