pacache.S 24 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093
  1. /*
  2. * PARISC TLB and cache flushing support
  3. * Copyright (C) 2000-2001 Hewlett-Packard (John Marvin)
  4. * Copyright (C) 2001 Matthew Wilcox (willy at parisc-linux.org)
  5. * Copyright (C) 2002 Richard Hirst (rhirst with parisc-linux.org)
  6. *
  7. * This program is free software; you can redistribute it and/or modify
  8. * it under the terms of the GNU General Public License as published by
  9. * the Free Software Foundation; either version 2, or (at your option)
  10. * any later version.
  11. *
  12. * This program is distributed in the hope that it will be useful,
  13. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  15. * GNU General Public License for more details.
  16. *
  17. * You should have received a copy of the GNU General Public License
  18. * along with this program; if not, write to the Free Software
  19. * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
  20. */
  21. /*
  22. * NOTE: fdc,fic, and pdc instructions that use base register modification
  23. * should only use index and base registers that are not shadowed,
  24. * so that the fast path emulation in the non access miss handler
  25. * can be used.
  26. */
  27. #ifdef CONFIG_64BIT
  28. #define ADDIB addib,*
  29. #define CMPB cmpb,*
  30. #define ANDCM andcm,*
  31. .level 2.0w
  32. #else
  33. #define ADDIB addib,
  34. #define CMPB cmpb,
  35. #define ANDCM andcm
  36. .level 2.0
  37. #endif
  38. #include <linux/config.h>
  39. #include <asm/psw.h>
  40. #include <asm/assembly.h>
  41. #include <asm/pgtable.h>
  42. #include <asm/cache.h>
  43. .text
  44. .align 128
  45. .export flush_tlb_all_local,code
  46. flush_tlb_all_local:
  47. .proc
  48. .callinfo NO_CALLS
  49. .entry
  50. /*
  51. * The pitlbe and pdtlbe instructions should only be used to
  52. * flush the entire tlb. Also, there needs to be no intervening
  53. * tlb operations, e.g. tlb misses, so the operation needs
  54. * to happen in real mode with all interruptions disabled.
  55. */
  56. /* pcxt_ssm_bug - relied upon translation! PA 2.0 Arch. F-4 and F-5 */
  57. rsm PSW_SM_I, %r19 /* save I-bit state */
  58. load32 PA(1f), %r1
  59. nop
  60. nop
  61. nop
  62. nop
  63. nop
  64. rsm PSW_SM_Q, %r0 /* prep to load iia queue */
  65. mtctl %r0, %cr17 /* Clear IIASQ tail */
  66. mtctl %r0, %cr17 /* Clear IIASQ head */
  67. mtctl %r1, %cr18 /* IIAOQ head */
  68. ldo 4(%r1), %r1
  69. mtctl %r1, %cr18 /* IIAOQ tail */
  70. load32 REAL_MODE_PSW, %r1
  71. mtctl %r1, %ipsw
  72. rfi
  73. nop
  74. 1: load32 PA(cache_info), %r1
  75. /* Flush Instruction Tlb */
  76. LDREG ITLB_SID_BASE(%r1), %r20
  77. LDREG ITLB_SID_STRIDE(%r1), %r21
  78. LDREG ITLB_SID_COUNT(%r1), %r22
  79. LDREG ITLB_OFF_BASE(%r1), %arg0
  80. LDREG ITLB_OFF_STRIDE(%r1), %arg1
  81. LDREG ITLB_OFF_COUNT(%r1), %arg2
  82. LDREG ITLB_LOOP(%r1), %arg3
  83. ADDIB= -1, %arg3, fitoneloop /* Preadjust and test */
  84. movb,<,n %arg3, %r31, fitdone /* If loop < 0, skip */
  85. copy %arg0, %r28 /* Init base addr */
  86. fitmanyloop: /* Loop if LOOP >= 2 */
  87. mtsp %r20, %sr1
  88. add %r21, %r20, %r20 /* increment space */
  89. copy %arg2, %r29 /* Init middle loop count */
  90. fitmanymiddle: /* Loop if LOOP >= 2 */
  91. ADDIB> -1, %r31, fitmanymiddle /* Adjusted inner loop decr */
  92. pitlbe 0(%sr1, %r28)
  93. pitlbe,m %arg1(%sr1, %r28) /* Last pitlbe and addr adjust */
  94. ADDIB> -1, %r29, fitmanymiddle /* Middle loop decr */
  95. copy %arg3, %r31 /* Re-init inner loop count */
  96. movb,tr %arg0, %r28, fitmanyloop /* Re-init base addr */
  97. ADDIB<=,n -1, %r22, fitdone /* Outer loop count decr */
  98. fitoneloop: /* Loop if LOOP = 1 */
  99. mtsp %r20, %sr1
  100. copy %arg0, %r28 /* init base addr */
  101. copy %arg2, %r29 /* init middle loop count */
  102. fitonemiddle: /* Loop if LOOP = 1 */
  103. ADDIB> -1, %r29, fitonemiddle /* Middle loop count decr */
  104. pitlbe,m %arg1(%sr1, %r28) /* pitlbe for one loop */
  105. ADDIB> -1, %r22, fitoneloop /* Outer loop count decr */
  106. add %r21, %r20, %r20 /* increment space */
  107. fitdone:
  108. /* Flush Data Tlb */
  109. LDREG DTLB_SID_BASE(%r1), %r20
  110. LDREG DTLB_SID_STRIDE(%r1), %r21
  111. LDREG DTLB_SID_COUNT(%r1), %r22
  112. LDREG DTLB_OFF_BASE(%r1), %arg0
  113. LDREG DTLB_OFF_STRIDE(%r1), %arg1
  114. LDREG DTLB_OFF_COUNT(%r1), %arg2
  115. LDREG DTLB_LOOP(%r1), %arg3
  116. ADDIB= -1, %arg3, fdtoneloop /* Preadjust and test */
  117. movb,<,n %arg3, %r31, fdtdone /* If loop < 0, skip */
  118. copy %arg0, %r28 /* Init base addr */
  119. fdtmanyloop: /* Loop if LOOP >= 2 */
  120. mtsp %r20, %sr1
  121. add %r21, %r20, %r20 /* increment space */
  122. copy %arg2, %r29 /* Init middle loop count */
  123. fdtmanymiddle: /* Loop if LOOP >= 2 */
  124. ADDIB> -1, %r31, fdtmanymiddle /* Adjusted inner loop decr */
  125. pdtlbe 0(%sr1, %r28)
  126. pdtlbe,m %arg1(%sr1, %r28) /* Last pdtlbe and addr adjust */
  127. ADDIB> -1, %r29, fdtmanymiddle /* Middle loop decr */
  128. copy %arg3, %r31 /* Re-init inner loop count */
  129. movb,tr %arg0, %r28, fdtmanyloop /* Re-init base addr */
  130. ADDIB<=,n -1, %r22,fdtdone /* Outer loop count decr */
  131. fdtoneloop: /* Loop if LOOP = 1 */
  132. mtsp %r20, %sr1
  133. copy %arg0, %r28 /* init base addr */
  134. copy %arg2, %r29 /* init middle loop count */
  135. fdtonemiddle: /* Loop if LOOP = 1 */
  136. ADDIB> -1, %r29, fdtonemiddle /* Middle loop count decr */
  137. pdtlbe,m %arg1(%sr1, %r28) /* pdtlbe for one loop */
  138. ADDIB> -1, %r22, fdtoneloop /* Outer loop count decr */
  139. add %r21, %r20, %r20 /* increment space */
  140. fdtdone:
  141. /*
  142. * Switch back to virtual mode
  143. */
  144. /* pcxt_ssm_bug */
  145. rsm PSW_SM_I, %r0
  146. load32 2f, %r1
  147. nop
  148. nop
  149. nop
  150. nop
  151. nop
  152. rsm PSW_SM_Q, %r0 /* prep to load iia queue */
  153. mtctl %r0, %cr17 /* Clear IIASQ tail */
  154. mtctl %r0, %cr17 /* Clear IIASQ head */
  155. mtctl %r1, %cr18 /* IIAOQ head */
  156. ldo 4(%r1), %r1
  157. mtctl %r1, %cr18 /* IIAOQ tail */
  158. load32 KERNEL_PSW, %r1
  159. or %r1, %r19, %r1 /* I-bit to state on entry */
  160. mtctl %r1, %ipsw /* restore I-bit (entire PSW) */
  161. rfi
  162. nop
  163. 2: bv %r0(%r2)
  164. nop
  165. .exit
  166. .procend
  167. .export flush_instruction_cache_local,code
  168. .import cache_info,data
  169. flush_instruction_cache_local:
  170. .proc
  171. .callinfo NO_CALLS
  172. .entry
  173. mtsp %r0, %sr1
  174. load32 cache_info, %r1
  175. /* Flush Instruction Cache */
  176. LDREG ICACHE_BASE(%r1), %arg0
  177. LDREG ICACHE_STRIDE(%r1), %arg1
  178. LDREG ICACHE_COUNT(%r1), %arg2
  179. LDREG ICACHE_LOOP(%r1), %arg3
  180. rsm PSW_SM_I, %r22 /* No mmgt ops during loop*/
  181. ADDIB= -1, %arg3, fioneloop /* Preadjust and test */
  182. movb,<,n %arg3, %r31, fisync /* If loop < 0, do sync */
  183. fimanyloop: /* Loop if LOOP >= 2 */
  184. ADDIB> -1, %r31, fimanyloop /* Adjusted inner loop decr */
  185. fice %r0(%sr1, %arg0)
  186. fice,m %arg1(%sr1, %arg0) /* Last fice and addr adjust */
  187. movb,tr %arg3, %r31, fimanyloop /* Re-init inner loop count */
  188. ADDIB<=,n -1, %arg2, fisync /* Outer loop decr */
  189. fioneloop: /* Loop if LOOP = 1 */
  190. ADDIB> -1, %arg2, fioneloop /* Outer loop count decr */
  191. fice,m %arg1(%sr1, %arg0) /* Fice for one loop */
  192. fisync:
  193. sync
  194. mtsm %r22 /* restore I-bit */
  195. bv %r0(%r2)
  196. nop
  197. .exit
  198. .procend
  199. .export flush_data_cache_local, code
  200. .import cache_info, data
  201. flush_data_cache_local:
  202. .proc
  203. .callinfo NO_CALLS
  204. .entry
  205. mtsp %r0, %sr1
  206. load32 cache_info, %r1
  207. /* Flush Data Cache */
  208. LDREG DCACHE_BASE(%r1), %arg0
  209. LDREG DCACHE_STRIDE(%r1), %arg1
  210. LDREG DCACHE_COUNT(%r1), %arg2
  211. LDREG DCACHE_LOOP(%r1), %arg3
  212. rsm PSW_SM_I, %r22
  213. ADDIB= -1, %arg3, fdoneloop /* Preadjust and test */
  214. movb,<,n %arg3, %r31, fdsync /* If loop < 0, do sync */
  215. fdmanyloop: /* Loop if LOOP >= 2 */
  216. ADDIB> -1, %r31, fdmanyloop /* Adjusted inner loop decr */
  217. fdce %r0(%sr1, %arg0)
  218. fdce,m %arg1(%sr1, %arg0) /* Last fdce and addr adjust */
  219. movb,tr %arg3, %r31, fdmanyloop /* Re-init inner loop count */
  220. ADDIB<=,n -1, %arg2, fdsync /* Outer loop decr */
  221. fdoneloop: /* Loop if LOOP = 1 */
  222. ADDIB> -1, %arg2, fdoneloop /* Outer loop count decr */
  223. fdce,m %arg1(%sr1, %arg0) /* Fdce for one loop */
  224. fdsync:
  225. syncdma
  226. sync
  227. mtsm %r22 /* restore I-bit */
  228. bv %r0(%r2)
  229. nop
  230. .exit
  231. .procend
  232. .export copy_user_page_asm,code
  233. .align 16
  234. copy_user_page_asm:
  235. .proc
  236. .callinfo NO_CALLS
  237. .entry
  238. #ifdef CONFIG_64BIT
  239. /* PA8x00 CPUs can consume 2 loads or 1 store per cycle.
  240. * Unroll the loop by hand and arrange insn appropriately.
  241. * GCC probably can do this just as well.
  242. */
  243. ldd 0(%r25), %r19
  244. ldi ASM_PAGE_SIZE_DIV128, %r1
  245. ldw 64(%r25), %r0 /* prefetch 1 cacheline ahead */
  246. ldw 128(%r25), %r0 /* prefetch 2 */
  247. 1: ldd 8(%r25), %r20
  248. ldw 192(%r25), %r0 /* prefetch 3 */
  249. ldw 256(%r25), %r0 /* prefetch 4 */
  250. ldd 16(%r25), %r21
  251. ldd 24(%r25), %r22
  252. std %r19, 0(%r26)
  253. std %r20, 8(%r26)
  254. ldd 32(%r25), %r19
  255. ldd 40(%r25), %r20
  256. std %r21, 16(%r26)
  257. std %r22, 24(%r26)
  258. ldd 48(%r25), %r21
  259. ldd 56(%r25), %r22
  260. std %r19, 32(%r26)
  261. std %r20, 40(%r26)
  262. ldd 64(%r25), %r19
  263. ldd 72(%r25), %r20
  264. std %r21, 48(%r26)
  265. std %r22, 56(%r26)
  266. ldd 80(%r25), %r21
  267. ldd 88(%r25), %r22
  268. std %r19, 64(%r26)
  269. std %r20, 72(%r26)
  270. ldd 96(%r25), %r19
  271. ldd 104(%r25), %r20
  272. std %r21, 80(%r26)
  273. std %r22, 88(%r26)
  274. ldd 112(%r25), %r21
  275. ldd 120(%r25), %r22
  276. std %r19, 96(%r26)
  277. std %r20, 104(%r26)
  278. ldo 128(%r25), %r25
  279. std %r21, 112(%r26)
  280. std %r22, 120(%r26)
  281. ldo 128(%r26), %r26
  282. /* conditional branches nullify on forward taken branch, and on
  283. * non-taken backward branch. Note that .+4 is a backwards branch.
  284. * The ldd should only get executed if the branch is taken.
  285. */
  286. ADDIB>,n -1, %r1, 1b /* bundle 10 */
  287. ldd 0(%r25), %r19 /* start next loads */
  288. #else
  289. /*
  290. * This loop is optimized for PCXL/PCXL2 ldw/ldw and stw/stw
  291. * bundles (very restricted rules for bundling).
  292. * Note that until (if) we start saving
  293. * the full 64 bit register values on interrupt, we can't
  294. * use ldd/std on a 32 bit kernel.
  295. */
  296. ldw 0(%r25), %r19
  297. ldi ASM_PAGE_SIZE_DIV64, %r1
  298. 1:
  299. ldw 4(%r25), %r20
  300. ldw 8(%r25), %r21
  301. ldw 12(%r25), %r22
  302. stw %r19, 0(%r26)
  303. stw %r20, 4(%r26)
  304. stw %r21, 8(%r26)
  305. stw %r22, 12(%r26)
  306. ldw 16(%r25), %r19
  307. ldw 20(%r25), %r20
  308. ldw 24(%r25), %r21
  309. ldw 28(%r25), %r22
  310. stw %r19, 16(%r26)
  311. stw %r20, 20(%r26)
  312. stw %r21, 24(%r26)
  313. stw %r22, 28(%r26)
  314. ldw 32(%r25), %r19
  315. ldw 36(%r25), %r20
  316. ldw 40(%r25), %r21
  317. ldw 44(%r25), %r22
  318. stw %r19, 32(%r26)
  319. stw %r20, 36(%r26)
  320. stw %r21, 40(%r26)
  321. stw %r22, 44(%r26)
  322. ldw 48(%r25), %r19
  323. ldw 52(%r25), %r20
  324. ldw 56(%r25), %r21
  325. ldw 60(%r25), %r22
  326. stw %r19, 48(%r26)
  327. stw %r20, 52(%r26)
  328. ldo 64(%r25), %r25
  329. stw %r21, 56(%r26)
  330. stw %r22, 60(%r26)
  331. ldo 64(%r26), %r26
  332. ADDIB>,n -1, %r1, 1b
  333. ldw 0(%r25), %r19
  334. #endif
  335. bv %r0(%r2)
  336. nop
  337. .exit
  338. .procend
  339. /*
  340. * NOTE: Code in clear_user_page has a hard coded dependency on the
  341. * maximum alias boundary being 4 Mb. We've been assured by the
  342. * parisc chip designers that there will not ever be a parisc
  343. * chip with a larger alias boundary (Never say never :-) ).
  344. *
  345. * Subtle: the dtlb miss handlers support the temp alias region by
  346. * "knowing" that if a dtlb miss happens within the temp alias
  347. * region it must have occurred while in clear_user_page. Since
  348. * this routine makes use of processor local translations, we
  349. * don't want to insert them into the kernel page table. Instead,
  350. * we load up some general registers (they need to be registers
  351. * which aren't shadowed) with the physical page numbers (preshifted
  352. * for tlb insertion) needed to insert the translations. When we
  353. * miss on the translation, the dtlb miss handler inserts the
  354. * translation into the tlb using these values:
  355. *
  356. * %r26 physical page (shifted for tlb insert) of "to" translation
  357. * %r23 physical page (shifted for tlb insert) of "from" translation
  358. */
  359. #if 0
  360. /*
  361. * We can't do this since copy_user_page is used to bring in
  362. * file data that might have instructions. Since the data would
  363. * then need to be flushed out so the i-fetch can see it, it
  364. * makes more sense to just copy through the kernel translation
  365. * and flush it.
  366. *
  367. * I'm still keeping this around because it may be possible to
  368. * use it if more information is passed into copy_user_page().
  369. * Have to do some measurements to see if it is worthwhile to
  370. * lobby for such a change.
  371. */
  372. .export copy_user_page_asm,code
  373. copy_user_page_asm:
  374. .proc
  375. .callinfo NO_CALLS
  376. .entry
  377. ldil L%(__PAGE_OFFSET), %r1
  378. sub %r26, %r1, %r26
  379. sub %r25, %r1, %r23 /* move physical addr into non shadowed reg */
  380. ldil L%(TMPALIAS_MAP_START), %r28
  381. /* FIXME for different page sizes != 4k */
  382. #ifdef CONFIG_64BIT
  383. extrd,u %r26,56,32, %r26 /* convert phys addr to tlb insert format */
  384. extrd,u %r23,56,32, %r23 /* convert phys addr to tlb insert format */
  385. depd %r24,63,22, %r28 /* Form aliased virtual address 'to' */
  386. depdi 0, 63,12, %r28 /* Clear any offset bits */
  387. copy %r28, %r29
  388. depdi 1, 41,1, %r29 /* Form aliased virtual address 'from' */
  389. #else
  390. extrw,u %r26, 24,25, %r26 /* convert phys addr to tlb insert format */
  391. extrw,u %r23, 24,25, %r23 /* convert phys addr to tlb insert format */
  392. depw %r24, 31,22, %r28 /* Form aliased virtual address 'to' */
  393. depwi 0, 31,12, %r28 /* Clear any offset bits */
  394. copy %r28, %r29
  395. depwi 1, 9,1, %r29 /* Form aliased virtual address 'from' */
  396. #endif
  397. /* Purge any old translations */
  398. pdtlb 0(%r28)
  399. pdtlb 0(%r29)
  400. ldi 64, %r1
  401. /*
  402. * This loop is optimized for PCXL/PCXL2 ldw/ldw and stw/stw
  403. * bundles (very restricted rules for bundling). It probably
  404. * does OK on PCXU and better, but we could do better with
  405. * ldd/std instructions. Note that until (if) we start saving
  406. * the full 64 bit register values on interrupt, we can't
  407. * use ldd/std on a 32 bit kernel.
  408. */
  409. 1:
  410. ldw 0(%r29), %r19
  411. ldw 4(%r29), %r20
  412. ldw 8(%r29), %r21
  413. ldw 12(%r29), %r22
  414. stw %r19, 0(%r28)
  415. stw %r20, 4(%r28)
  416. stw %r21, 8(%r28)
  417. stw %r22, 12(%r28)
  418. ldw 16(%r29), %r19
  419. ldw 20(%r29), %r20
  420. ldw 24(%r29), %r21
  421. ldw 28(%r29), %r22
  422. stw %r19, 16(%r28)
  423. stw %r20, 20(%r28)
  424. stw %r21, 24(%r28)
  425. stw %r22, 28(%r28)
  426. ldw 32(%r29), %r19
  427. ldw 36(%r29), %r20
  428. ldw 40(%r29), %r21
  429. ldw 44(%r29), %r22
  430. stw %r19, 32(%r28)
  431. stw %r20, 36(%r28)
  432. stw %r21, 40(%r28)
  433. stw %r22, 44(%r28)
  434. ldw 48(%r29), %r19
  435. ldw 52(%r29), %r20
  436. ldw 56(%r29), %r21
  437. ldw 60(%r29), %r22
  438. stw %r19, 48(%r28)
  439. stw %r20, 52(%r28)
  440. stw %r21, 56(%r28)
  441. stw %r22, 60(%r28)
  442. ldo 64(%r28), %r28
  443. ADDIB> -1, %r1,1b
  444. ldo 64(%r29), %r29
  445. bv %r0(%r2)
  446. nop
  447. .exit
  448. .procend
  449. #endif
  450. .export __clear_user_page_asm,code
  451. __clear_user_page_asm:
  452. .proc
  453. .callinfo NO_CALLS
  454. .entry
  455. tophys_r1 %r26
  456. ldil L%(TMPALIAS_MAP_START), %r28
  457. #ifdef CONFIG_64BIT
  458. #if (TMPALIAS_MAP_START >= 0x80000000)
  459. depdi 0, 31,32, %r28 /* clear any sign extension */
  460. /* FIXME: page size dependend */
  461. #endif
  462. extrd,u %r26, 56,32, %r26 /* convert phys addr to tlb insert format */
  463. depd %r25, 63,22, %r28 /* Form aliased virtual address 'to' */
  464. depdi 0, 63,12, %r28 /* Clear any offset bits */
  465. #else
  466. extrw,u %r26, 24,25, %r26 /* convert phys addr to tlb insert format */
  467. depw %r25, 31,22, %r28 /* Form aliased virtual address 'to' */
  468. depwi 0, 31,12, %r28 /* Clear any offset bits */
  469. #endif
  470. /* Purge any old translation */
  471. pdtlb 0(%r28)
  472. #ifdef CONFIG_64BIT
  473. ldi ASM_PAGE_SIZE_DIV128, %r1
  474. /* PREFETCH (Write) has not (yet) been proven to help here */
  475. /* #define PREFETCHW_OP ldd 256(%0), %r0 */
  476. 1: std %r0, 0(%r28)
  477. std %r0, 8(%r28)
  478. std %r0, 16(%r28)
  479. std %r0, 24(%r28)
  480. std %r0, 32(%r28)
  481. std %r0, 40(%r28)
  482. std %r0, 48(%r28)
  483. std %r0, 56(%r28)
  484. std %r0, 64(%r28)
  485. std %r0, 72(%r28)
  486. std %r0, 80(%r28)
  487. std %r0, 88(%r28)
  488. std %r0, 96(%r28)
  489. std %r0, 104(%r28)
  490. std %r0, 112(%r28)
  491. std %r0, 120(%r28)
  492. ADDIB> -1, %r1, 1b
  493. ldo 128(%r28), %r28
  494. #else /* ! CONFIG_64BIT */
  495. ldi ASM_PAGE_SIZE_DIV64, %r1
  496. 1:
  497. stw %r0, 0(%r28)
  498. stw %r0, 4(%r28)
  499. stw %r0, 8(%r28)
  500. stw %r0, 12(%r28)
  501. stw %r0, 16(%r28)
  502. stw %r0, 20(%r28)
  503. stw %r0, 24(%r28)
  504. stw %r0, 28(%r28)
  505. stw %r0, 32(%r28)
  506. stw %r0, 36(%r28)
  507. stw %r0, 40(%r28)
  508. stw %r0, 44(%r28)
  509. stw %r0, 48(%r28)
  510. stw %r0, 52(%r28)
  511. stw %r0, 56(%r28)
  512. stw %r0, 60(%r28)
  513. ADDIB> -1, %r1, 1b
  514. ldo 64(%r28), %r28
  515. #endif /* CONFIG_64BIT */
  516. bv %r0(%r2)
  517. nop
  518. .exit
  519. .procend
  520. .export flush_kernel_dcache_page_asm
  521. flush_kernel_dcache_page_asm:
  522. .proc
  523. .callinfo NO_CALLS
  524. .entry
  525. ldil L%dcache_stride, %r1
  526. ldw R%dcache_stride(%r1), %r23
  527. #ifdef CONFIG_64BIT
  528. depdi,z 1, 63-PAGE_SHIFT,1, %r25
  529. #else
  530. depwi,z 1, 31-PAGE_SHIFT,1, %r25
  531. #endif
  532. add %r26, %r25, %r25
  533. sub %r25, %r23, %r25
  534. 1: fdc,m %r23(%r26)
  535. fdc,m %r23(%r26)
  536. fdc,m %r23(%r26)
  537. fdc,m %r23(%r26)
  538. fdc,m %r23(%r26)
  539. fdc,m %r23(%r26)
  540. fdc,m %r23(%r26)
  541. fdc,m %r23(%r26)
  542. fdc,m %r23(%r26)
  543. fdc,m %r23(%r26)
  544. fdc,m %r23(%r26)
  545. fdc,m %r23(%r26)
  546. fdc,m %r23(%r26)
  547. fdc,m %r23(%r26)
  548. fdc,m %r23(%r26)
  549. CMPB<< %r26, %r25,1b
  550. fdc,m %r23(%r26)
  551. sync
  552. bv %r0(%r2)
  553. nop
  554. .exit
  555. .procend
  556. .export flush_user_dcache_page
  557. flush_user_dcache_page:
  558. .proc
  559. .callinfo NO_CALLS
  560. .entry
  561. ldil L%dcache_stride, %r1
  562. ldw R%dcache_stride(%r1), %r23
  563. #ifdef CONFIG_64BIT
  564. depdi,z 1,63-PAGE_SHIFT,1, %r25
  565. #else
  566. depwi,z 1,31-PAGE_SHIFT,1, %r25
  567. #endif
  568. add %r26, %r25, %r25
  569. sub %r25, %r23, %r25
  570. 1: fdc,m %r23(%sr3, %r26)
  571. fdc,m %r23(%sr3, %r26)
  572. fdc,m %r23(%sr3, %r26)
  573. fdc,m %r23(%sr3, %r26)
  574. fdc,m %r23(%sr3, %r26)
  575. fdc,m %r23(%sr3, %r26)
  576. fdc,m %r23(%sr3, %r26)
  577. fdc,m %r23(%sr3, %r26)
  578. fdc,m %r23(%sr3, %r26)
  579. fdc,m %r23(%sr3, %r26)
  580. fdc,m %r23(%sr3, %r26)
  581. fdc,m %r23(%sr3, %r26)
  582. fdc,m %r23(%sr3, %r26)
  583. fdc,m %r23(%sr3, %r26)
  584. fdc,m %r23(%sr3, %r26)
  585. CMPB<< %r26, %r25,1b
  586. fdc,m %r23(%sr3, %r26)
  587. sync
  588. bv %r0(%r2)
  589. nop
  590. .exit
  591. .procend
  592. .export flush_user_icache_page
  593. flush_user_icache_page:
  594. .proc
  595. .callinfo NO_CALLS
  596. .entry
  597. ldil L%dcache_stride, %r1
  598. ldw R%dcache_stride(%r1), %r23
  599. #ifdef CONFIG_64BIT
  600. depdi,z 1, 63-PAGE_SHIFT,1, %r25
  601. #else
  602. depwi,z 1, 31-PAGE_SHIFT,1, %r25
  603. #endif
  604. add %r26, %r25, %r25
  605. sub %r25, %r23, %r25
  606. 1: fic,m %r23(%sr3, %r26)
  607. fic,m %r23(%sr3, %r26)
  608. fic,m %r23(%sr3, %r26)
  609. fic,m %r23(%sr3, %r26)
  610. fic,m %r23(%sr3, %r26)
  611. fic,m %r23(%sr3, %r26)
  612. fic,m %r23(%sr3, %r26)
  613. fic,m %r23(%sr3, %r26)
  614. fic,m %r23(%sr3, %r26)
  615. fic,m %r23(%sr3, %r26)
  616. fic,m %r23(%sr3, %r26)
  617. fic,m %r23(%sr3, %r26)
  618. fic,m %r23(%sr3, %r26)
  619. fic,m %r23(%sr3, %r26)
  620. fic,m %r23(%sr3, %r26)
  621. CMPB<< %r26, %r25,1b
  622. fic,m %r23(%sr3, %r26)
  623. sync
  624. bv %r0(%r2)
  625. nop
  626. .exit
  627. .procend
  628. .export purge_kernel_dcache_page
  629. purge_kernel_dcache_page:
  630. .proc
  631. .callinfo NO_CALLS
  632. .entry
  633. ldil L%dcache_stride, %r1
  634. ldw R%dcache_stride(%r1), %r23
  635. #ifdef CONFIG_64BIT
  636. depdi,z 1, 63-PAGE_SHIFT,1, %r25
  637. #else
  638. depwi,z 1, 31-PAGE_SHIFT,1, %r25
  639. #endif
  640. add %r26, %r25, %r25
  641. sub %r25, %r23, %r25
  642. 1: pdc,m %r23(%r26)
  643. pdc,m %r23(%r26)
  644. pdc,m %r23(%r26)
  645. pdc,m %r23(%r26)
  646. pdc,m %r23(%r26)
  647. pdc,m %r23(%r26)
  648. pdc,m %r23(%r26)
  649. pdc,m %r23(%r26)
  650. pdc,m %r23(%r26)
  651. pdc,m %r23(%r26)
  652. pdc,m %r23(%r26)
  653. pdc,m %r23(%r26)
  654. pdc,m %r23(%r26)
  655. pdc,m %r23(%r26)
  656. pdc,m %r23(%r26)
  657. CMPB<< %r26, %r25, 1b
  658. pdc,m %r23(%r26)
  659. sync
  660. bv %r0(%r2)
  661. nop
  662. .exit
  663. .procend
  664. #if 0
  665. /* Currently not used, but it still is a possible alternate
  666. * solution.
  667. */
  668. .export flush_alias_page
  669. flush_alias_page:
  670. .proc
  671. .callinfo NO_CALLS
  672. .entry
  673. tophys_r1 %r26
  674. ldil L%(TMPALIAS_MAP_START), %r28
  675. #ifdef CONFIG_64BIT
  676. extrd,u %r26, 56,32, %r26 /* convert phys addr to tlb insert format */
  677. depd %r25, 63,22, %r28 /* Form aliased virtual address 'to' */
  678. depdi 0, 63,12, %r28 /* Clear any offset bits */
  679. #else
  680. extrw,u %r26, 24,25, %r26 /* convert phys addr to tlb insert format */
  681. depw %r25, 31,22, %r28 /* Form aliased virtual address 'to' */
  682. depwi 0, 31,12, %r28 /* Clear any offset bits */
  683. #endif
  684. /* Purge any old translation */
  685. pdtlb 0(%r28)
  686. ldil L%dcache_stride, %r1
  687. ldw R%dcache_stride(%r1), %r23
  688. #ifdef CONFIG_64BIT
  689. depdi,z 1, 63-PAGE_SHIFT,1, %r29
  690. #else
  691. depwi,z 1, 31-PAGE_SHIFT,1, %r29
  692. #endif
  693. add %r28, %r29, %r29
  694. sub %r29, %r23, %r29
  695. 1: fdc,m %r23(%r28)
  696. fdc,m %r23(%r28)
  697. fdc,m %r23(%r28)
  698. fdc,m %r23(%r28)
  699. fdc,m %r23(%r28)
  700. fdc,m %r23(%r28)
  701. fdc,m %r23(%r28)
  702. fdc,m %r23(%r28)
  703. fdc,m %r23(%r28)
  704. fdc,m %r23(%r28)
  705. fdc,m %r23(%r28)
  706. fdc,m %r23(%r28)
  707. fdc,m %r23(%r28)
  708. fdc,m %r23(%r28)
  709. fdc,m %r23(%r28)
  710. CMPB<< %r28, %r29, 1b
  711. fdc,m %r23(%r28)
  712. sync
  713. bv %r0(%r2)
  714. nop
  715. .exit
  716. .procend
  717. #endif
  718. .export flush_user_dcache_range_asm
  719. flush_user_dcache_range_asm:
  720. .proc
  721. .callinfo NO_CALLS
  722. .entry
  723. ldil L%dcache_stride, %r1
  724. ldw R%dcache_stride(%r1), %r23
  725. ldo -1(%r23), %r21
  726. ANDCM %r26, %r21, %r26
  727. 1: CMPB<<,n %r26, %r25, 1b
  728. fdc,m %r23(%sr3, %r26)
  729. sync
  730. bv %r0(%r2)
  731. nop
  732. .exit
  733. .procend
  734. .export flush_kernel_dcache_range_asm
  735. flush_kernel_dcache_range_asm:
  736. .proc
  737. .callinfo NO_CALLS
  738. .entry
  739. ldil L%dcache_stride, %r1
  740. ldw R%dcache_stride(%r1), %r23
  741. ldo -1(%r23), %r21
  742. ANDCM %r26, %r21, %r26
  743. 1: CMPB<<,n %r26, %r25,1b
  744. fdc,m %r23(%r26)
  745. sync
  746. syncdma
  747. bv %r0(%r2)
  748. nop
  749. .exit
  750. .procend
  751. .export flush_user_icache_range_asm
  752. flush_user_icache_range_asm:
  753. .proc
  754. .callinfo NO_CALLS
  755. .entry
  756. ldil L%icache_stride, %r1
  757. ldw R%icache_stride(%r1), %r23
  758. ldo -1(%r23), %r21
  759. ANDCM %r26, %r21, %r26
  760. 1: CMPB<<,n %r26, %r25,1b
  761. fic,m %r23(%sr3, %r26)
  762. sync
  763. bv %r0(%r2)
  764. nop
  765. .exit
  766. .procend
  767. .export flush_kernel_icache_page
  768. flush_kernel_icache_page:
  769. .proc
  770. .callinfo NO_CALLS
  771. .entry
  772. ldil L%icache_stride, %r1
  773. ldw R%icache_stride(%r1), %r23
  774. #ifdef CONFIG_64BIT
  775. depdi,z 1, 63-PAGE_SHIFT,1, %r25
  776. #else
  777. depwi,z 1, 31-PAGE_SHIFT,1, %r25
  778. #endif
  779. add %r26, %r25, %r25
  780. sub %r25, %r23, %r25
  781. 1: fic,m %r23(%sr4, %r26)
  782. fic,m %r23(%sr4, %r26)
  783. fic,m %r23(%sr4, %r26)
  784. fic,m %r23(%sr4, %r26)
  785. fic,m %r23(%sr4, %r26)
  786. fic,m %r23(%sr4, %r26)
  787. fic,m %r23(%sr4, %r26)
  788. fic,m %r23(%sr4, %r26)
  789. fic,m %r23(%sr4, %r26)
  790. fic,m %r23(%sr4, %r26)
  791. fic,m %r23(%sr4, %r26)
  792. fic,m %r23(%sr4, %r26)
  793. fic,m %r23(%sr4, %r26)
  794. fic,m %r23(%sr4, %r26)
  795. fic,m %r23(%sr4, %r26)
  796. CMPB<< %r26, %r25, 1b
  797. fic,m %r23(%sr4, %r26)
  798. sync
  799. bv %r0(%r2)
  800. nop
  801. .exit
  802. .procend
  803. .export flush_kernel_icache_range_asm
  804. flush_kernel_icache_range_asm:
  805. .proc
  806. .callinfo NO_CALLS
  807. .entry
  808. ldil L%icache_stride, %r1
  809. ldw R%icache_stride(%r1), %r23
  810. ldo -1(%r23), %r21
  811. ANDCM %r26, %r21, %r26
  812. 1: CMPB<<,n %r26, %r25, 1b
  813. fic,m %r23(%sr4, %r26)
  814. sync
  815. bv %r0(%r2)
  816. nop
  817. .exit
  818. .procend
  819. /* align should cover use of rfi in disable_sr_hashing_asm and
  820. * srdis_done.
  821. */
  822. .align 256
  823. .export disable_sr_hashing_asm,code
  824. disable_sr_hashing_asm:
  825. .proc
  826. .callinfo NO_CALLS
  827. .entry
  828. /*
  829. * Switch to real mode
  830. */
  831. /* pcxt_ssm_bug */
  832. rsm PSW_SM_I, %r0
  833. load32 PA(1f), %r1
  834. nop
  835. nop
  836. nop
  837. nop
  838. nop
  839. rsm PSW_SM_Q, %r0 /* prep to load iia queue */
  840. mtctl %r0, %cr17 /* Clear IIASQ tail */
  841. mtctl %r0, %cr17 /* Clear IIASQ head */
  842. mtctl %r1, %cr18 /* IIAOQ head */
  843. ldo 4(%r1), %r1
  844. mtctl %r1, %cr18 /* IIAOQ tail */
  845. load32 REAL_MODE_PSW, %r1
  846. mtctl %r1, %ipsw
  847. rfi
  848. nop
  849. 1: cmpib,=,n SRHASH_PCXST, %r26,srdis_pcxs
  850. cmpib,=,n SRHASH_PCXL, %r26,srdis_pcxl
  851. cmpib,=,n SRHASH_PA20, %r26,srdis_pa20
  852. b,n srdis_done
  853. srdis_pcxs:
  854. /* Disable Space Register Hashing for PCXS,PCXT,PCXT' */
  855. .word 0x141c1a00 /* mfdiag %dr0, %r28 */
  856. .word 0x141c1a00 /* must issue twice */
  857. depwi 0,18,1, %r28 /* Clear DHE (dcache hash enable) */
  858. depwi 0,20,1, %r28 /* Clear IHE (icache hash enable) */
  859. .word 0x141c1600 /* mtdiag %r28, %dr0 */
  860. .word 0x141c1600 /* must issue twice */
  861. b,n srdis_done
  862. srdis_pcxl:
  863. /* Disable Space Register Hashing for PCXL */
  864. .word 0x141c0600 /* mfdiag %dr0, %r28 */
  865. depwi 0,28,2, %r28 /* Clear DHASH_EN & IHASH_EN */
  866. .word 0x141c0240 /* mtdiag %r28, %dr0 */
  867. b,n srdis_done
  868. srdis_pa20:
  869. /* Disable Space Register Hashing for PCXU,PCXU+,PCXW,PCXW+,PCXW2 */
  870. .word 0x144008bc /* mfdiag %dr2, %r28 */
  871. depdi 0, 54,1, %r28 /* clear DIAG_SPHASH_ENAB (bit 54) */
  872. .word 0x145c1840 /* mtdiag %r28, %dr2 */
  873. srdis_done:
  874. /* Switch back to virtual mode */
  875. rsm PSW_SM_I, %r0 /* prep to load iia queue */
  876. load32 2f, %r1
  877. nop
  878. nop
  879. nop
  880. nop
  881. nop
  882. rsm PSW_SM_Q, %r0 /* prep to load iia queue */
  883. mtctl %r0, %cr17 /* Clear IIASQ tail */
  884. mtctl %r0, %cr17 /* Clear IIASQ head */
  885. mtctl %r1, %cr18 /* IIAOQ head */
  886. ldo 4(%r1), %r1
  887. mtctl %r1, %cr18 /* IIAOQ tail */
  888. load32 KERNEL_PSW, %r1
  889. mtctl %r1, %ipsw
  890. rfi
  891. nop
  892. 2: bv %r0(%r2)
  893. nop
  894. .exit
  895. .procend
  896. .end