pacache.S 29 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294
  1. /*
  2. * PARISC TLB and cache flushing support
  3. * Copyright (C) 2000-2001 Hewlett-Packard (John Marvin)
  4. * Copyright (C) 2001 Matthew Wilcox (willy at parisc-linux.org)
  5. * Copyright (C) 2002 Richard Hirst (rhirst with parisc-linux.org)
  6. *
  7. * This program is free software; you can redistribute it and/or modify
  8. * it under the terms of the GNU General Public License as published by
  9. * the Free Software Foundation; either version 2, or (at your option)
  10. * any later version.
  11. *
  12. * This program is distributed in the hope that it will be useful,
  13. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  15. * GNU General Public License for more details.
  16. *
  17. * You should have received a copy of the GNU General Public License
  18. * along with this program; if not, write to the Free Software
  19. * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
  20. */
  21. /*
  22. * NOTE: fdc,fic, and pdc instructions that use base register modification
  23. * should only use index and base registers that are not shadowed,
  24. * so that the fast path emulation in the non access miss handler
  25. * can be used.
  26. */
  27. #ifdef CONFIG_64BIT
  28. .level 2.0w
  29. #else
  30. .level 2.0
  31. #endif
  32. #include <asm/psw.h>
  33. #include <asm/assembly.h>
  34. #include <asm/pgtable.h>
  35. #include <asm/cache.h>
  36. #include <linux/linkage.h>
  37. .text
  38. .align 128
  39. ENTRY(flush_tlb_all_local)
  40. .proc
  41. .callinfo NO_CALLS
  42. .entry
  43. /*
  44. * The pitlbe and pdtlbe instructions should only be used to
  45. * flush the entire tlb. Also, there needs to be no intervening
  46. * tlb operations, e.g. tlb misses, so the operation needs
  47. * to happen in real mode with all interruptions disabled.
  48. */
  49. /* pcxt_ssm_bug - relied upon translation! PA 2.0 Arch. F-4 and F-5 */
  50. rsm PSW_SM_I, %r19 /* save I-bit state */
  51. load32 PA(1f), %r1
  52. nop
  53. nop
  54. nop
  55. nop
  56. nop
  57. rsm PSW_SM_Q, %r0 /* prep to load iia queue */
  58. mtctl %r0, %cr17 /* Clear IIASQ tail */
  59. mtctl %r0, %cr17 /* Clear IIASQ head */
  60. mtctl %r1, %cr18 /* IIAOQ head */
  61. ldo 4(%r1), %r1
  62. mtctl %r1, %cr18 /* IIAOQ tail */
  63. load32 REAL_MODE_PSW, %r1
  64. mtctl %r1, %ipsw
  65. rfi
  66. nop
  67. 1: load32 PA(cache_info), %r1
  68. /* Flush Instruction Tlb */
  69. LDREG ITLB_SID_BASE(%r1), %r20
  70. LDREG ITLB_SID_STRIDE(%r1), %r21
  71. LDREG ITLB_SID_COUNT(%r1), %r22
  72. LDREG ITLB_OFF_BASE(%r1), %arg0
  73. LDREG ITLB_OFF_STRIDE(%r1), %arg1
  74. LDREG ITLB_OFF_COUNT(%r1), %arg2
  75. LDREG ITLB_LOOP(%r1), %arg3
  76. addib,COND(=) -1, %arg3, fitoneloop /* Preadjust and test */
  77. movb,<,n %arg3, %r31, fitdone /* If loop < 0, skip */
  78. copy %arg0, %r28 /* Init base addr */
  79. fitmanyloop: /* Loop if LOOP >= 2 */
  80. mtsp %r20, %sr1
  81. add %r21, %r20, %r20 /* increment space */
  82. copy %arg2, %r29 /* Init middle loop count */
  83. fitmanymiddle: /* Loop if LOOP >= 2 */
  84. addib,COND(>) -1, %r31, fitmanymiddle /* Adjusted inner loop decr */
  85. pitlbe 0(%sr1, %r28)
  86. pitlbe,m %arg1(%sr1, %r28) /* Last pitlbe and addr adjust */
  87. addib,COND(>) -1, %r29, fitmanymiddle /* Middle loop decr */
  88. copy %arg3, %r31 /* Re-init inner loop count */
  89. movb,tr %arg0, %r28, fitmanyloop /* Re-init base addr */
  90. addib,COND(<=),n -1, %r22, fitdone /* Outer loop count decr */
  91. fitoneloop: /* Loop if LOOP = 1 */
  92. mtsp %r20, %sr1
  93. copy %arg0, %r28 /* init base addr */
  94. copy %arg2, %r29 /* init middle loop count */
  95. fitonemiddle: /* Loop if LOOP = 1 */
  96. addib,COND(>) -1, %r29, fitonemiddle /* Middle loop count decr */
  97. pitlbe,m %arg1(%sr1, %r28) /* pitlbe for one loop */
  98. addib,COND(>) -1, %r22, fitoneloop /* Outer loop count decr */
  99. add %r21, %r20, %r20 /* increment space */
  100. fitdone:
  101. /* Flush Data Tlb */
  102. LDREG DTLB_SID_BASE(%r1), %r20
  103. LDREG DTLB_SID_STRIDE(%r1), %r21
  104. LDREG DTLB_SID_COUNT(%r1), %r22
  105. LDREG DTLB_OFF_BASE(%r1), %arg0
  106. LDREG DTLB_OFF_STRIDE(%r1), %arg1
  107. LDREG DTLB_OFF_COUNT(%r1), %arg2
  108. LDREG DTLB_LOOP(%r1), %arg3
  109. addib,COND(=) -1, %arg3, fdtoneloop /* Preadjust and test */
  110. movb,<,n %arg3, %r31, fdtdone /* If loop < 0, skip */
  111. copy %arg0, %r28 /* Init base addr */
  112. fdtmanyloop: /* Loop if LOOP >= 2 */
  113. mtsp %r20, %sr1
  114. add %r21, %r20, %r20 /* increment space */
  115. copy %arg2, %r29 /* Init middle loop count */
  116. fdtmanymiddle: /* Loop if LOOP >= 2 */
  117. addib,COND(>) -1, %r31, fdtmanymiddle /* Adjusted inner loop decr */
  118. pdtlbe 0(%sr1, %r28)
  119. pdtlbe,m %arg1(%sr1, %r28) /* Last pdtlbe and addr adjust */
  120. addib,COND(>) -1, %r29, fdtmanymiddle /* Middle loop decr */
  121. copy %arg3, %r31 /* Re-init inner loop count */
  122. movb,tr %arg0, %r28, fdtmanyloop /* Re-init base addr */
  123. addib,COND(<=),n -1, %r22,fdtdone /* Outer loop count decr */
  124. fdtoneloop: /* Loop if LOOP = 1 */
  125. mtsp %r20, %sr1
  126. copy %arg0, %r28 /* init base addr */
  127. copy %arg2, %r29 /* init middle loop count */
  128. fdtonemiddle: /* Loop if LOOP = 1 */
  129. addib,COND(>) -1, %r29, fdtonemiddle /* Middle loop count decr */
  130. pdtlbe,m %arg1(%sr1, %r28) /* pdtlbe for one loop */
  131. addib,COND(>) -1, %r22, fdtoneloop /* Outer loop count decr */
  132. add %r21, %r20, %r20 /* increment space */
  133. fdtdone:
  134. /*
  135. * Switch back to virtual mode
  136. */
  137. /* pcxt_ssm_bug */
  138. rsm PSW_SM_I, %r0
  139. load32 2f, %r1
  140. nop
  141. nop
  142. nop
  143. nop
  144. nop
  145. rsm PSW_SM_Q, %r0 /* prep to load iia queue */
  146. mtctl %r0, %cr17 /* Clear IIASQ tail */
  147. mtctl %r0, %cr17 /* Clear IIASQ head */
  148. mtctl %r1, %cr18 /* IIAOQ head */
  149. ldo 4(%r1), %r1
  150. mtctl %r1, %cr18 /* IIAOQ tail */
  151. load32 KERNEL_PSW, %r1
  152. or %r1, %r19, %r1 /* I-bit to state on entry */
  153. mtctl %r1, %ipsw /* restore I-bit (entire PSW) */
  154. rfi
  155. nop
  156. 2: bv %r0(%r2)
  157. nop
  158. .exit
  159. .procend
  160. ENDPROC(flush_tlb_all_local)
  161. .import cache_info,data
  162. ENTRY(flush_instruction_cache_local)
  163. .proc
  164. .callinfo NO_CALLS
  165. .entry
  166. load32 cache_info, %r1
  167. /* Flush Instruction Cache */
  168. LDREG ICACHE_BASE(%r1), %arg0
  169. LDREG ICACHE_STRIDE(%r1), %arg1
  170. LDREG ICACHE_COUNT(%r1), %arg2
  171. LDREG ICACHE_LOOP(%r1), %arg3
  172. rsm PSW_SM_I, %r22 /* No mmgt ops during loop*/
  173. mtsp %r0, %sr1
  174. addib,COND(=) -1, %arg3, fioneloop /* Preadjust and test */
  175. movb,<,n %arg3, %r31, fisync /* If loop < 0, do sync */
  176. fimanyloop: /* Loop if LOOP >= 2 */
  177. addib,COND(>) -1, %r31, fimanyloop /* Adjusted inner loop decr */
  178. fice %r0(%sr1, %arg0)
  179. fice,m %arg1(%sr1, %arg0) /* Last fice and addr adjust */
  180. movb,tr %arg3, %r31, fimanyloop /* Re-init inner loop count */
  181. addib,COND(<=),n -1, %arg2, fisync /* Outer loop decr */
  182. fioneloop: /* Loop if LOOP = 1 */
  183. /* Some implementations may flush with a single fice instruction */
  184. cmpib,COND(>>=),n 15, %arg2, fioneloop2
  185. fioneloop1:
  186. fice,m %arg1(%sr1, %arg0)
  187. fice,m %arg1(%sr1, %arg0)
  188. fice,m %arg1(%sr1, %arg0)
  189. fice,m %arg1(%sr1, %arg0)
  190. fice,m %arg1(%sr1, %arg0)
  191. fice,m %arg1(%sr1, %arg0)
  192. fice,m %arg1(%sr1, %arg0)
  193. fice,m %arg1(%sr1, %arg0)
  194. fice,m %arg1(%sr1, %arg0)
  195. fice,m %arg1(%sr1, %arg0)
  196. fice,m %arg1(%sr1, %arg0)
  197. fice,m %arg1(%sr1, %arg0)
  198. fice,m %arg1(%sr1, %arg0)
  199. fice,m %arg1(%sr1, %arg0)
  200. fice,m %arg1(%sr1, %arg0)
  201. addib,COND(>) -16, %arg2, fioneloop1
  202. fice,m %arg1(%sr1, %arg0)
  203. /* Check if done */
  204. cmpb,COND(=),n %arg2, %r0, fisync /* Predict branch taken */
  205. fioneloop2:
  206. addib,COND(>) -1, %arg2, fioneloop2 /* Outer loop count decr */
  207. fice,m %arg1(%sr1, %arg0) /* Fice for one loop */
  208. fisync:
  209. sync
  210. mtsm %r22 /* restore I-bit */
  211. bv %r0(%r2)
  212. nop
  213. .exit
  214. .procend
  215. ENDPROC(flush_instruction_cache_local)
  216. .import cache_info, data
  217. ENTRY(flush_data_cache_local)
  218. .proc
  219. .callinfo NO_CALLS
  220. .entry
  221. load32 cache_info, %r1
  222. /* Flush Data Cache */
  223. LDREG DCACHE_BASE(%r1), %arg0
  224. LDREG DCACHE_STRIDE(%r1), %arg1
  225. LDREG DCACHE_COUNT(%r1), %arg2
  226. LDREG DCACHE_LOOP(%r1), %arg3
  227. rsm PSW_SM_I, %r22 /* No mmgt ops during loop*/
  228. mtsp %r0, %sr1
  229. addib,COND(=) -1, %arg3, fdoneloop /* Preadjust and test */
  230. movb,<,n %arg3, %r31, fdsync /* If loop < 0, do sync */
  231. fdmanyloop: /* Loop if LOOP >= 2 */
  232. addib,COND(>) -1, %r31, fdmanyloop /* Adjusted inner loop decr */
  233. fdce %r0(%sr1, %arg0)
  234. fdce,m %arg1(%sr1, %arg0) /* Last fdce and addr adjust */
  235. movb,tr %arg3, %r31, fdmanyloop /* Re-init inner loop count */
  236. addib,COND(<=),n -1, %arg2, fdsync /* Outer loop decr */
  237. fdoneloop: /* Loop if LOOP = 1 */
  238. /* Some implementations may flush with a single fdce instruction */
  239. cmpib,COND(>>=),n 15, %arg2, fdoneloop2
  240. fdoneloop1:
  241. fdce,m %arg1(%sr1, %arg0)
  242. fdce,m %arg1(%sr1, %arg0)
  243. fdce,m %arg1(%sr1, %arg0)
  244. fdce,m %arg1(%sr1, %arg0)
  245. fdce,m %arg1(%sr1, %arg0)
  246. fdce,m %arg1(%sr1, %arg0)
  247. fdce,m %arg1(%sr1, %arg0)
  248. fdce,m %arg1(%sr1, %arg0)
  249. fdce,m %arg1(%sr1, %arg0)
  250. fdce,m %arg1(%sr1, %arg0)
  251. fdce,m %arg1(%sr1, %arg0)
  252. fdce,m %arg1(%sr1, %arg0)
  253. fdce,m %arg1(%sr1, %arg0)
  254. fdce,m %arg1(%sr1, %arg0)
  255. fdce,m %arg1(%sr1, %arg0)
  256. addib,COND(>) -16, %arg2, fdoneloop1
  257. fdce,m %arg1(%sr1, %arg0)
  258. /* Check if done */
  259. cmpb,COND(=),n %arg2, %r0, fdsync /* Predict branch taken */
  260. fdoneloop2:
  261. addib,COND(>) -1, %arg2, fdoneloop2 /* Outer loop count decr */
  262. fdce,m %arg1(%sr1, %arg0) /* Fdce for one loop */
  263. fdsync:
  264. syncdma
  265. sync
  266. mtsm %r22 /* restore I-bit */
  267. bv %r0(%r2)
  268. nop
  269. .exit
  270. .procend
  271. ENDPROC(flush_data_cache_local)
  272. .align 16
  273. /* Macros to serialize TLB purge operations on SMP. */
  274. .macro tlb_lock la,flags,tmp
  275. #ifdef CONFIG_SMP
  276. ldil L%pa_tlb_lock,%r1
  277. ldo R%pa_tlb_lock(%r1),\la
  278. rsm PSW_SM_I,\flags
  279. 1: LDCW 0(\la),\tmp
  280. cmpib,<>,n 0,\tmp,3f
  281. 2: ldw 0(\la),\tmp
  282. cmpb,<> %r0,\tmp,1b
  283. nop
  284. b,n 2b
  285. 3:
  286. #endif
  287. .endm
  288. .macro tlb_unlock la,flags,tmp
  289. #ifdef CONFIG_SMP
  290. ldi 1,\tmp
  291. stw \tmp,0(\la)
  292. mtsm \flags
  293. #endif
  294. .endm
  295. /* Clear page using kernel mapping. */
  296. ENTRY(clear_page_asm)
  297. .proc
  298. .callinfo NO_CALLS
  299. .entry
  300. #ifdef CONFIG_64BIT
  301. /* Unroll the loop. */
  302. ldi (PAGE_SIZE / 128), %r1
  303. 1:
  304. std %r0, 0(%r26)
  305. std %r0, 8(%r26)
  306. std %r0, 16(%r26)
  307. std %r0, 24(%r26)
  308. std %r0, 32(%r26)
  309. std %r0, 40(%r26)
  310. std %r0, 48(%r26)
  311. std %r0, 56(%r26)
  312. std %r0, 64(%r26)
  313. std %r0, 72(%r26)
  314. std %r0, 80(%r26)
  315. std %r0, 88(%r26)
  316. std %r0, 96(%r26)
  317. std %r0, 104(%r26)
  318. std %r0, 112(%r26)
  319. std %r0, 120(%r26)
  320. /* Note reverse branch hint for addib is taken. */
  321. addib,COND(>),n -1, %r1, 1b
  322. ldo 128(%r26), %r26
  323. #else
  324. /*
  325. * Note that until (if) we start saving the full 64-bit register
  326. * values on interrupt, we can't use std on a 32 bit kernel.
  327. */
  328. ldi (PAGE_SIZE / 64), %r1
  329. 1:
  330. stw %r0, 0(%r26)
  331. stw %r0, 4(%r26)
  332. stw %r0, 8(%r26)
  333. stw %r0, 12(%r26)
  334. stw %r0, 16(%r26)
  335. stw %r0, 20(%r26)
  336. stw %r0, 24(%r26)
  337. stw %r0, 28(%r26)
  338. stw %r0, 32(%r26)
  339. stw %r0, 36(%r26)
  340. stw %r0, 40(%r26)
  341. stw %r0, 44(%r26)
  342. stw %r0, 48(%r26)
  343. stw %r0, 52(%r26)
  344. stw %r0, 56(%r26)
  345. stw %r0, 60(%r26)
  346. addib,COND(>),n -1, %r1, 1b
  347. ldo 64(%r26), %r26
  348. #endif
  349. bv %r0(%r2)
  350. nop
  351. .exit
  352. .procend
  353. ENDPROC(clear_page_asm)
  354. /* Copy page using kernel mapping. */
  355. ENTRY(copy_page_asm)
  356. .proc
  357. .callinfo NO_CALLS
  358. .entry
  359. #ifdef CONFIG_64BIT
  360. /* PA8x00 CPUs can consume 2 loads or 1 store per cycle.
  361. * Unroll the loop by hand and arrange insn appropriately.
  362. * Prefetch doesn't improve performance on rp3440.
  363. * GCC probably can do this just as well...
  364. */
  365. ldi (PAGE_SIZE / 128), %r1
  366. 1: ldd 0(%r25), %r19
  367. ldd 8(%r25), %r20
  368. ldd 16(%r25), %r21
  369. ldd 24(%r25), %r22
  370. std %r19, 0(%r26)
  371. std %r20, 8(%r26)
  372. ldd 32(%r25), %r19
  373. ldd 40(%r25), %r20
  374. std %r21, 16(%r26)
  375. std %r22, 24(%r26)
  376. ldd 48(%r25), %r21
  377. ldd 56(%r25), %r22
  378. std %r19, 32(%r26)
  379. std %r20, 40(%r26)
  380. ldd 64(%r25), %r19
  381. ldd 72(%r25), %r20
  382. std %r21, 48(%r26)
  383. std %r22, 56(%r26)
  384. ldd 80(%r25), %r21
  385. ldd 88(%r25), %r22
  386. std %r19, 64(%r26)
  387. std %r20, 72(%r26)
  388. ldd 96(%r25), %r19
  389. ldd 104(%r25), %r20
  390. std %r21, 80(%r26)
  391. std %r22, 88(%r26)
  392. ldd 112(%r25), %r21
  393. ldd 120(%r25), %r22
  394. ldo 128(%r25), %r25
  395. std %r19, 96(%r26)
  396. std %r20, 104(%r26)
  397. std %r21, 112(%r26)
  398. std %r22, 120(%r26)
  399. /* Note reverse branch hint for addib is taken. */
  400. addib,COND(>),n -1, %r1, 1b
  401. ldo 128(%r26), %r26
  402. #else
  403. /*
  404. * This loop is optimized for PCXL/PCXL2 ldw/ldw and stw/stw
  405. * bundles (very restricted rules for bundling).
  406. * Note that until (if) we start saving
  407. * the full 64 bit register values on interrupt, we can't
  408. * use ldd/std on a 32 bit kernel.
  409. */
  410. ldw 0(%r25), %r19
  411. ldi (PAGE_SIZE / 64), %r1
  412. 1:
  413. ldw 4(%r25), %r20
  414. ldw 8(%r25), %r21
  415. ldw 12(%r25), %r22
  416. stw %r19, 0(%r26)
  417. stw %r20, 4(%r26)
  418. stw %r21, 8(%r26)
  419. stw %r22, 12(%r26)
  420. ldw 16(%r25), %r19
  421. ldw 20(%r25), %r20
  422. ldw 24(%r25), %r21
  423. ldw 28(%r25), %r22
  424. stw %r19, 16(%r26)
  425. stw %r20, 20(%r26)
  426. stw %r21, 24(%r26)
  427. stw %r22, 28(%r26)
  428. ldw 32(%r25), %r19
  429. ldw 36(%r25), %r20
  430. ldw 40(%r25), %r21
  431. ldw 44(%r25), %r22
  432. stw %r19, 32(%r26)
  433. stw %r20, 36(%r26)
  434. stw %r21, 40(%r26)
  435. stw %r22, 44(%r26)
  436. ldw 48(%r25), %r19
  437. ldw 52(%r25), %r20
  438. ldw 56(%r25), %r21
  439. ldw 60(%r25), %r22
  440. stw %r19, 48(%r26)
  441. stw %r20, 52(%r26)
  442. ldo 64(%r25), %r25
  443. stw %r21, 56(%r26)
  444. stw %r22, 60(%r26)
  445. ldo 64(%r26), %r26
  446. addib,COND(>),n -1, %r1, 1b
  447. ldw 0(%r25), %r19
  448. #endif
  449. bv %r0(%r2)
  450. nop
  451. .exit
  452. .procend
  453. ENDPROC(copy_page_asm)
  454. /*
  455. * NOTE: Code in clear_user_page has a hard coded dependency on the
  456. * maximum alias boundary being 4 Mb. We've been assured by the
  457. * parisc chip designers that there will not ever be a parisc
  458. * chip with a larger alias boundary (Never say never :-) ).
  459. *
  460. * Subtle: the dtlb miss handlers support the temp alias region by
  461. * "knowing" that if a dtlb miss happens within the temp alias
  462. * region it must have occurred while in clear_user_page. Since
  463. * this routine makes use of processor local translations, we
  464. * don't want to insert them into the kernel page table. Instead,
  465. * we load up some general registers (they need to be registers
  466. * which aren't shadowed) with the physical page numbers (preshifted
  467. * for tlb insertion) needed to insert the translations. When we
  468. * miss on the translation, the dtlb miss handler inserts the
  469. * translation into the tlb using these values:
  470. *
  471. * %r26 physical page (shifted for tlb insert) of "to" translation
  472. * %r23 physical page (shifted for tlb insert) of "from" translation
  473. */
  474. /* Drop prot bits and convert to page addr for iitlbt and idtlbt */
  475. #define PAGE_ADD_SHIFT (PAGE_SHIFT-12)
  476. .macro convert_phys_for_tlb_insert20 phys
  477. extrd,u \phys, 56-PAGE_ADD_SHIFT, 32-PAGE_ADD_SHIFT, \phys
  478. #if _PAGE_SIZE_ENCODING_DEFAULT
  479. depdi _PAGE_SIZE_ENCODING_DEFAULT, 63, (63-58), \phys
  480. #endif
  481. .endm
  482. /*
  483. * We can't do this since copy_user_page is used to bring in
  484. * file data that might have instructions. Since the data would
  485. * then need to be flushed out so the i-fetch can see it, it
  486. * makes more sense to just copy through the kernel translation
  487. * and flush it.
  488. *
  489. * I'm still keeping this around because it may be possible to
  490. * use it if more information is passed into copy_user_page().
  491. * Have to do some measurements to see if it is worthwhile to
  492. * lobby for such a change.
  493. *
  494. */
  495. ENTRY(copy_user_page_asm)
  496. .proc
  497. .callinfo NO_CALLS
  498. .entry
  499. /* Convert virtual `to' and `from' addresses to physical addresses.
  500. Move `from' physical address to non shadowed register. */
  501. ldil L%(__PAGE_OFFSET), %r1
  502. sub %r26, %r1, %r26
  503. sub %r25, %r1, %r23
  504. ldil L%(TMPALIAS_MAP_START), %r28
  505. #ifdef CONFIG_64BIT
  506. #if (TMPALIAS_MAP_START >= 0x80000000)
  507. depdi 0, 31,32, %r28 /* clear any sign extension */
  508. #endif
  509. convert_phys_for_tlb_insert20 %r26 /* convert phys addr to tlb insert format */
  510. convert_phys_for_tlb_insert20 %r23 /* convert phys addr to tlb insert format */
  511. depd %r24,63,22, %r28 /* Form aliased virtual address 'to' */
  512. depdi 0, 63,PAGE_SHIFT, %r28 /* Clear any offset bits */
  513. copy %r28, %r29
  514. depdi 1, 41,1, %r29 /* Form aliased virtual address 'from' */
  515. #else
  516. extrw,u %r26, 24,25, %r26 /* convert phys addr to tlb insert format */
  517. extrw,u %r23, 24,25, %r23 /* convert phys addr to tlb insert format */
  518. depw %r24, 31,22, %r28 /* Form aliased virtual address 'to' */
  519. depwi 0, 31,PAGE_SHIFT, %r28 /* Clear any offset bits */
  520. copy %r28, %r29
  521. depwi 1, 9,1, %r29 /* Form aliased virtual address 'from' */
  522. #endif
  523. /* Purge any old translations */
  524. #ifdef CONFIG_PA20
  525. pdtlb,l 0(%r28)
  526. pdtlb,l 0(%r29)
  527. #else
  528. tlb_lock %r20,%r21,%r22
  529. pdtlb 0(%r28)
  530. pdtlb 0(%r29)
  531. tlb_unlock %r20,%r21,%r22
  532. #endif
  533. #ifdef CONFIG_64BIT
  534. /* PA8x00 CPUs can consume 2 loads or 1 store per cycle.
  535. * Unroll the loop by hand and arrange insn appropriately.
  536. * GCC probably can do this just as well.
  537. */
  538. ldd 0(%r29), %r19
  539. ldi (PAGE_SIZE / 128), %r1
  540. 1: ldd 8(%r29), %r20
  541. ldd 16(%r29), %r21
  542. ldd 24(%r29), %r22
  543. std %r19, 0(%r28)
  544. std %r20, 8(%r28)
  545. ldd 32(%r29), %r19
  546. ldd 40(%r29), %r20
  547. std %r21, 16(%r28)
  548. std %r22, 24(%r28)
  549. ldd 48(%r29), %r21
  550. ldd 56(%r29), %r22
  551. std %r19, 32(%r28)
  552. std %r20, 40(%r28)
  553. ldd 64(%r29), %r19
  554. ldd 72(%r29), %r20
  555. std %r21, 48(%r28)
  556. std %r22, 56(%r28)
  557. ldd 80(%r29), %r21
  558. ldd 88(%r29), %r22
  559. std %r19, 64(%r28)
  560. std %r20, 72(%r28)
  561. ldd 96(%r29), %r19
  562. ldd 104(%r29), %r20
  563. std %r21, 80(%r28)
  564. std %r22, 88(%r28)
  565. ldd 112(%r29), %r21
  566. ldd 120(%r29), %r22
  567. std %r19, 96(%r28)
  568. std %r20, 104(%r28)
  569. ldo 128(%r29), %r29
  570. std %r21, 112(%r28)
  571. std %r22, 120(%r28)
  572. ldo 128(%r28), %r28
  573. /* conditional branches nullify on forward taken branch, and on
  574. * non-taken backward branch. Note that .+4 is a backwards branch.
  575. * The ldd should only get executed if the branch is taken.
  576. */
  577. addib,COND(>),n -1, %r1, 1b /* bundle 10 */
  578. ldd 0(%r29), %r19 /* start next loads */
  579. #else
  580. ldi (PAGE_SIZE / 64), %r1
  581. /*
  582. * This loop is optimized for PCXL/PCXL2 ldw/ldw and stw/stw
  583. * bundles (very restricted rules for bundling). It probably
  584. * does OK on PCXU and better, but we could do better with
  585. * ldd/std instructions. Note that until (if) we start saving
  586. * the full 64 bit register values on interrupt, we can't
  587. * use ldd/std on a 32 bit kernel.
  588. */
  589. 1: ldw 0(%r29), %r19
  590. ldw 4(%r29), %r20
  591. ldw 8(%r29), %r21
  592. ldw 12(%r29), %r22
  593. stw %r19, 0(%r28)
  594. stw %r20, 4(%r28)
  595. stw %r21, 8(%r28)
  596. stw %r22, 12(%r28)
  597. ldw 16(%r29), %r19
  598. ldw 20(%r29), %r20
  599. ldw 24(%r29), %r21
  600. ldw 28(%r29), %r22
  601. stw %r19, 16(%r28)
  602. stw %r20, 20(%r28)
  603. stw %r21, 24(%r28)
  604. stw %r22, 28(%r28)
  605. ldw 32(%r29), %r19
  606. ldw 36(%r29), %r20
  607. ldw 40(%r29), %r21
  608. ldw 44(%r29), %r22
  609. stw %r19, 32(%r28)
  610. stw %r20, 36(%r28)
  611. stw %r21, 40(%r28)
  612. stw %r22, 44(%r28)
  613. ldw 48(%r29), %r19
  614. ldw 52(%r29), %r20
  615. ldw 56(%r29), %r21
  616. ldw 60(%r29), %r22
  617. stw %r19, 48(%r28)
  618. stw %r20, 52(%r28)
  619. stw %r21, 56(%r28)
  620. stw %r22, 60(%r28)
  621. ldo 64(%r28), %r28
  622. addib,COND(>) -1, %r1,1b
  623. ldo 64(%r29), %r29
  624. #endif
  625. bv %r0(%r2)
  626. nop
  627. .exit
  628. .procend
  629. ENDPROC(copy_user_page_asm)
  630. ENTRY(clear_user_page_asm)
  631. .proc
  632. .callinfo NO_CALLS
  633. .entry
  634. tophys_r1 %r26
  635. ldil L%(TMPALIAS_MAP_START), %r28
  636. #ifdef CONFIG_64BIT
  637. #if (TMPALIAS_MAP_START >= 0x80000000)
  638. depdi 0, 31,32, %r28 /* clear any sign extension */
  639. #endif
  640. convert_phys_for_tlb_insert20 %r26 /* convert phys addr to tlb insert format */
  641. depd %r25, 63,22, %r28 /* Form aliased virtual address 'to' */
  642. depdi 0, 63,PAGE_SHIFT, %r28 /* Clear any offset bits */
  643. #else
  644. extrw,u %r26, 24,25, %r26 /* convert phys addr to tlb insert format */
  645. depw %r25, 31,22, %r28 /* Form aliased virtual address 'to' */
  646. depwi 0, 31,PAGE_SHIFT, %r28 /* Clear any offset bits */
  647. #endif
  648. /* Purge any old translation */
  649. #ifdef CONFIG_PA20
  650. pdtlb,l 0(%r28)
  651. #else
  652. tlb_lock %r20,%r21,%r22
  653. pdtlb 0(%r28)
  654. tlb_unlock %r20,%r21,%r22
  655. #endif
  656. #ifdef CONFIG_64BIT
  657. ldi (PAGE_SIZE / 128), %r1
  658. /* PREFETCH (Write) has not (yet) been proven to help here */
  659. /* #define PREFETCHW_OP ldd 256(%0), %r0 */
  660. 1: std %r0, 0(%r28)
  661. std %r0, 8(%r28)
  662. std %r0, 16(%r28)
  663. std %r0, 24(%r28)
  664. std %r0, 32(%r28)
  665. std %r0, 40(%r28)
  666. std %r0, 48(%r28)
  667. std %r0, 56(%r28)
  668. std %r0, 64(%r28)
  669. std %r0, 72(%r28)
  670. std %r0, 80(%r28)
  671. std %r0, 88(%r28)
  672. std %r0, 96(%r28)
  673. std %r0, 104(%r28)
  674. std %r0, 112(%r28)
  675. std %r0, 120(%r28)
  676. addib,COND(>) -1, %r1, 1b
  677. ldo 128(%r28), %r28
  678. #else /* ! CONFIG_64BIT */
  679. ldi (PAGE_SIZE / 64), %r1
  680. 1: stw %r0, 0(%r28)
  681. stw %r0, 4(%r28)
  682. stw %r0, 8(%r28)
  683. stw %r0, 12(%r28)
  684. stw %r0, 16(%r28)
  685. stw %r0, 20(%r28)
  686. stw %r0, 24(%r28)
  687. stw %r0, 28(%r28)
  688. stw %r0, 32(%r28)
  689. stw %r0, 36(%r28)
  690. stw %r0, 40(%r28)
  691. stw %r0, 44(%r28)
  692. stw %r0, 48(%r28)
  693. stw %r0, 52(%r28)
  694. stw %r0, 56(%r28)
  695. stw %r0, 60(%r28)
  696. addib,COND(>) -1, %r1, 1b
  697. ldo 64(%r28), %r28
  698. #endif /* CONFIG_64BIT */
  699. bv %r0(%r2)
  700. nop
  701. .exit
  702. .procend
  703. ENDPROC(clear_user_page_asm)
  704. ENTRY(flush_dcache_page_asm)
  705. .proc
  706. .callinfo NO_CALLS
  707. .entry
  708. ldil L%(TMPALIAS_MAP_START), %r28
  709. #ifdef CONFIG_64BIT
  710. #if (TMPALIAS_MAP_START >= 0x80000000)
  711. depdi 0, 31,32, %r28 /* clear any sign extension */
  712. #endif
  713. convert_phys_for_tlb_insert20 %r26 /* convert phys addr to tlb insert format */
  714. depd %r25, 63,22, %r28 /* Form aliased virtual address 'to' */
  715. depdi 0, 63,PAGE_SHIFT, %r28 /* Clear any offset bits */
  716. #else
  717. extrw,u %r26, 24,25, %r26 /* convert phys addr to tlb insert format */
  718. depw %r25, 31,22, %r28 /* Form aliased virtual address 'to' */
  719. depwi 0, 31,PAGE_SHIFT, %r28 /* Clear any offset bits */
  720. #endif
  721. /* Purge any old translation */
  722. #ifdef CONFIG_PA20
  723. pdtlb,l 0(%r28)
  724. #else
  725. tlb_lock %r20,%r21,%r22
  726. pdtlb 0(%r28)
  727. tlb_unlock %r20,%r21,%r22
  728. #endif
  729. ldil L%dcache_stride, %r1
  730. ldw R%dcache_stride(%r1), r31
  731. #ifdef CONFIG_64BIT
  732. depdi,z 1, 63-PAGE_SHIFT,1, %r25
  733. #else
  734. depwi,z 1, 31-PAGE_SHIFT,1, %r25
  735. #endif
  736. add %r28, %r25, %r25
  737. sub %r25, r31, %r25
  738. 1: fdc,m r31(%r28)
  739. fdc,m r31(%r28)
  740. fdc,m r31(%r28)
  741. fdc,m r31(%r28)
  742. fdc,m r31(%r28)
  743. fdc,m r31(%r28)
  744. fdc,m r31(%r28)
  745. fdc,m r31(%r28)
  746. fdc,m r31(%r28)
  747. fdc,m r31(%r28)
  748. fdc,m r31(%r28)
  749. fdc,m r31(%r28)
  750. fdc,m r31(%r28)
  751. fdc,m r31(%r28)
  752. fdc,m r31(%r28)
  753. cmpb,COND(<<) %r28, %r25,1b
  754. fdc,m r31(%r28)
  755. sync
  756. #ifdef CONFIG_PA20
  757. pdtlb,l 0(%r25)
  758. #else
  759. tlb_lock %r20,%r21,%r22
  760. pdtlb 0(%r25)
  761. tlb_unlock %r20,%r21,%r22
  762. #endif
  763. bv %r0(%r2)
  764. nop
  765. .exit
  766. .procend
  767. ENDPROC(flush_dcache_page_asm)
  768. ENTRY(flush_icache_page_asm)
  769. .proc
  770. .callinfo NO_CALLS
  771. .entry
  772. ldil L%(TMPALIAS_MAP_START), %r28
  773. #ifdef CONFIG_64BIT
  774. #if (TMPALIAS_MAP_START >= 0x80000000)
  775. depdi 0, 31,32, %r28 /* clear any sign extension */
  776. #endif
  777. convert_phys_for_tlb_insert20 %r26 /* convert phys addr to tlb insert format */
  778. depd %r25, 63,22, %r28 /* Form aliased virtual address 'to' */
  779. depdi 0, 63,PAGE_SHIFT, %r28 /* Clear any offset bits */
  780. #else
  781. extrw,u %r26, 24,25, %r26 /* convert phys addr to tlb insert format */
  782. depw %r25, 31,22, %r28 /* Form aliased virtual address 'to' */
  783. depwi 0, 31,PAGE_SHIFT, %r28 /* Clear any offset bits */
  784. #endif
  785. /* Purge any old translation */
  786. #ifdef CONFIG_PA20
  787. pitlb,l %r0(%sr4,%r28)
  788. #else
  789. tlb_lock %r20,%r21,%r22
  790. pitlb (%sr4,%r28)
  791. tlb_unlock %r20,%r21,%r22
  792. #endif
  793. ldil L%icache_stride, %r1
  794. ldw R%icache_stride(%r1), %r31
  795. #ifdef CONFIG_64BIT
  796. depdi,z 1, 63-PAGE_SHIFT,1, %r25
  797. #else
  798. depwi,z 1, 31-PAGE_SHIFT,1, %r25
  799. #endif
  800. add %r28, %r25, %r25
  801. sub %r25, %r31, %r25
  802. /* fic only has the type 26 form on PA1.1, requiring an
  803. * explicit space specification, so use %sr4 */
  804. 1: fic,m %r31(%sr4,%r28)
  805. fic,m %r31(%sr4,%r28)
  806. fic,m %r31(%sr4,%r28)
  807. fic,m %r31(%sr4,%r28)
  808. fic,m %r31(%sr4,%r28)
  809. fic,m %r31(%sr4,%r28)
  810. fic,m %r31(%sr4,%r28)
  811. fic,m %r31(%sr4,%r28)
  812. fic,m %r31(%sr4,%r28)
  813. fic,m %r31(%sr4,%r28)
  814. fic,m %r31(%sr4,%r28)
  815. fic,m %r31(%sr4,%r28)
  816. fic,m %r31(%sr4,%r28)
  817. fic,m %r31(%sr4,%r28)
  818. fic,m %r31(%sr4,%r28)
  819. cmpb,COND(<<) %r28, %r25,1b
  820. fic,m %r31(%sr4,%r28)
  821. sync
  822. #ifdef CONFIG_PA20
  823. pitlb,l %r0(%sr4,%r25)
  824. #else
  825. tlb_lock %r20,%r21,%r22
  826. pitlb (%sr4,%r25)
  827. tlb_unlock %r20,%r21,%r22
  828. #endif
  829. bv %r0(%r2)
  830. nop
  831. .exit
  832. .procend
  833. ENDPROC(flush_icache_page_asm)
  834. ENTRY(flush_kernel_dcache_page_asm)
  835. .proc
  836. .callinfo NO_CALLS
  837. .entry
  838. ldil L%dcache_stride, %r1
  839. ldw R%dcache_stride(%r1), %r23
  840. #ifdef CONFIG_64BIT
  841. depdi,z 1, 63-PAGE_SHIFT,1, %r25
  842. #else
  843. depwi,z 1, 31-PAGE_SHIFT,1, %r25
  844. #endif
  845. add %r26, %r25, %r25
  846. sub %r25, %r23, %r25
  847. 1: fdc,m %r23(%r26)
  848. fdc,m %r23(%r26)
  849. fdc,m %r23(%r26)
  850. fdc,m %r23(%r26)
  851. fdc,m %r23(%r26)
  852. fdc,m %r23(%r26)
  853. fdc,m %r23(%r26)
  854. fdc,m %r23(%r26)
  855. fdc,m %r23(%r26)
  856. fdc,m %r23(%r26)
  857. fdc,m %r23(%r26)
  858. fdc,m %r23(%r26)
  859. fdc,m %r23(%r26)
  860. fdc,m %r23(%r26)
  861. fdc,m %r23(%r26)
  862. cmpb,COND(<<) %r26, %r25,1b
  863. fdc,m %r23(%r26)
  864. sync
  865. bv %r0(%r2)
  866. nop
  867. .exit
  868. .procend
  869. ENDPROC(flush_kernel_dcache_page_asm)
  870. ENTRY(purge_kernel_dcache_page_asm)
  871. .proc
  872. .callinfo NO_CALLS
  873. .entry
  874. ldil L%dcache_stride, %r1
  875. ldw R%dcache_stride(%r1), %r23
  876. #ifdef CONFIG_64BIT
  877. depdi,z 1, 63-PAGE_SHIFT,1, %r25
  878. #else
  879. depwi,z 1, 31-PAGE_SHIFT,1, %r25
  880. #endif
  881. add %r26, %r25, %r25
  882. sub %r25, %r23, %r25
  883. 1: pdc,m %r23(%r26)
  884. pdc,m %r23(%r26)
  885. pdc,m %r23(%r26)
  886. pdc,m %r23(%r26)
  887. pdc,m %r23(%r26)
  888. pdc,m %r23(%r26)
  889. pdc,m %r23(%r26)
  890. pdc,m %r23(%r26)
  891. pdc,m %r23(%r26)
  892. pdc,m %r23(%r26)
  893. pdc,m %r23(%r26)
  894. pdc,m %r23(%r26)
  895. pdc,m %r23(%r26)
  896. pdc,m %r23(%r26)
  897. pdc,m %r23(%r26)
  898. cmpb,COND(<<) %r26, %r25, 1b
  899. pdc,m %r23(%r26)
  900. sync
  901. bv %r0(%r2)
  902. nop
  903. .exit
  904. .procend
  905. ENDPROC(purge_kernel_dcache_page_asm)
  906. ENTRY(flush_user_dcache_range_asm)
  907. .proc
  908. .callinfo NO_CALLS
  909. .entry
  910. ldil L%dcache_stride, %r1
  911. ldw R%dcache_stride(%r1), %r23
  912. ldo -1(%r23), %r21
  913. ANDCM %r26, %r21, %r26
  914. 1: cmpb,COND(<<),n %r26, %r25, 1b
  915. fdc,m %r23(%sr3, %r26)
  916. sync
  917. bv %r0(%r2)
  918. nop
  919. .exit
  920. .procend
  921. ENDPROC(flush_user_dcache_range_asm)
  922. ENTRY(flush_kernel_dcache_range_asm)
  923. .proc
  924. .callinfo NO_CALLS
  925. .entry
  926. ldil L%dcache_stride, %r1
  927. ldw R%dcache_stride(%r1), %r23
  928. ldo -1(%r23), %r21
  929. ANDCM %r26, %r21, %r26
  930. 1: cmpb,COND(<<),n %r26, %r25,1b
  931. fdc,m %r23(%r26)
  932. sync
  933. syncdma
  934. bv %r0(%r2)
  935. nop
  936. .exit
  937. .procend
  938. ENDPROC(flush_kernel_dcache_range_asm)
  939. ENTRY(flush_user_icache_range_asm)
  940. .proc
  941. .callinfo NO_CALLS
  942. .entry
  943. ldil L%icache_stride, %r1
  944. ldw R%icache_stride(%r1), %r23
  945. ldo -1(%r23), %r21
  946. ANDCM %r26, %r21, %r26
  947. 1: cmpb,COND(<<),n %r26, %r25,1b
  948. fic,m %r23(%sr3, %r26)
  949. sync
  950. bv %r0(%r2)
  951. nop
  952. .exit
  953. .procend
  954. ENDPROC(flush_user_icache_range_asm)
  955. ENTRY(flush_kernel_icache_page)
  956. .proc
  957. .callinfo NO_CALLS
  958. .entry
  959. ldil L%icache_stride, %r1
  960. ldw R%icache_stride(%r1), %r23
  961. #ifdef CONFIG_64BIT
  962. depdi,z 1, 63-PAGE_SHIFT,1, %r25
  963. #else
  964. depwi,z 1, 31-PAGE_SHIFT,1, %r25
  965. #endif
  966. add %r26, %r25, %r25
  967. sub %r25, %r23, %r25
  968. 1: fic,m %r23(%sr4, %r26)
  969. fic,m %r23(%sr4, %r26)
  970. fic,m %r23(%sr4, %r26)
  971. fic,m %r23(%sr4, %r26)
  972. fic,m %r23(%sr4, %r26)
  973. fic,m %r23(%sr4, %r26)
  974. fic,m %r23(%sr4, %r26)
  975. fic,m %r23(%sr4, %r26)
  976. fic,m %r23(%sr4, %r26)
  977. fic,m %r23(%sr4, %r26)
  978. fic,m %r23(%sr4, %r26)
  979. fic,m %r23(%sr4, %r26)
  980. fic,m %r23(%sr4, %r26)
  981. fic,m %r23(%sr4, %r26)
  982. fic,m %r23(%sr4, %r26)
  983. cmpb,COND(<<) %r26, %r25, 1b
  984. fic,m %r23(%sr4, %r26)
  985. sync
  986. bv %r0(%r2)
  987. nop
  988. .exit
  989. .procend
  990. ENDPROC(flush_kernel_icache_page)
  991. ENTRY(flush_kernel_icache_range_asm)
  992. .proc
  993. .callinfo NO_CALLS
  994. .entry
  995. ldil L%icache_stride, %r1
  996. ldw R%icache_stride(%r1), %r23
  997. ldo -1(%r23), %r21
  998. ANDCM %r26, %r21, %r26
  999. 1: cmpb,COND(<<),n %r26, %r25, 1b
  1000. fic,m %r23(%sr4, %r26)
  1001. sync
  1002. bv %r0(%r2)
  1003. nop
  1004. .exit
  1005. .procend
  1006. ENDPROC(flush_kernel_icache_range_asm)
  1007. /* align should cover use of rfi in disable_sr_hashing_asm and
  1008. * srdis_done.
  1009. */
  1010. .align 256
  1011. ENTRY(disable_sr_hashing_asm)
  1012. .proc
  1013. .callinfo NO_CALLS
  1014. .entry
  1015. /*
  1016. * Switch to real mode
  1017. */
  1018. /* pcxt_ssm_bug */
  1019. rsm PSW_SM_I, %r0
  1020. load32 PA(1f), %r1
  1021. nop
  1022. nop
  1023. nop
  1024. nop
  1025. nop
  1026. rsm PSW_SM_Q, %r0 /* prep to load iia queue */
  1027. mtctl %r0, %cr17 /* Clear IIASQ tail */
  1028. mtctl %r0, %cr17 /* Clear IIASQ head */
  1029. mtctl %r1, %cr18 /* IIAOQ head */
  1030. ldo 4(%r1), %r1
  1031. mtctl %r1, %cr18 /* IIAOQ tail */
  1032. load32 REAL_MODE_PSW, %r1
  1033. mtctl %r1, %ipsw
  1034. rfi
  1035. nop
  1036. 1: cmpib,=,n SRHASH_PCXST, %r26,srdis_pcxs
  1037. cmpib,=,n SRHASH_PCXL, %r26,srdis_pcxl
  1038. cmpib,=,n SRHASH_PA20, %r26,srdis_pa20
  1039. b,n srdis_done
  1040. srdis_pcxs:
  1041. /* Disable Space Register Hashing for PCXS,PCXT,PCXT' */
  1042. .word 0x141c1a00 /* mfdiag %dr0, %r28 */
  1043. .word 0x141c1a00 /* must issue twice */
  1044. depwi 0,18,1, %r28 /* Clear DHE (dcache hash enable) */
  1045. depwi 0,20,1, %r28 /* Clear IHE (icache hash enable) */
  1046. .word 0x141c1600 /* mtdiag %r28, %dr0 */
  1047. .word 0x141c1600 /* must issue twice */
  1048. b,n srdis_done
  1049. srdis_pcxl:
  1050. /* Disable Space Register Hashing for PCXL */
  1051. .word 0x141c0600 /* mfdiag %dr0, %r28 */
  1052. depwi 0,28,2, %r28 /* Clear DHASH_EN & IHASH_EN */
  1053. .word 0x141c0240 /* mtdiag %r28, %dr0 */
  1054. b,n srdis_done
  1055. srdis_pa20:
  1056. /* Disable Space Register Hashing for PCXU,PCXU+,PCXW,PCXW+,PCXW2 */
  1057. .word 0x144008bc /* mfdiag %dr2, %r28 */
  1058. depdi 0, 54,1, %r28 /* clear DIAG_SPHASH_ENAB (bit 54) */
  1059. .word 0x145c1840 /* mtdiag %r28, %dr2 */
  1060. srdis_done:
  1061. /* Switch back to virtual mode */
  1062. rsm PSW_SM_I, %r0 /* prep to load iia queue */
  1063. load32 2f, %r1
  1064. nop
  1065. nop
  1066. nop
  1067. nop
  1068. nop
  1069. rsm PSW_SM_Q, %r0 /* prep to load iia queue */
  1070. mtctl %r0, %cr17 /* Clear IIASQ tail */
  1071. mtctl %r0, %cr17 /* Clear IIASQ head */
  1072. mtctl %r1, %cr18 /* IIAOQ head */
  1073. ldo 4(%r1), %r1
  1074. mtctl %r1, %cr18 /* IIAOQ tail */
  1075. load32 KERNEL_PSW, %r1
  1076. mtctl %r1, %ipsw
  1077. rfi
  1078. nop
  1079. 2: bv %r0(%r2)
  1080. nop
  1081. .exit
  1082. .procend
  1083. ENDPROC(disable_sr_hashing_asm)
  1084. .end