pacache.S 24 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092
  1. /*
  2. * PARISC TLB and cache flushing support
  3. * Copyright (C) 2000-2001 Hewlett-Packard (John Marvin)
  4. * Copyright (C) 2001 Matthew Wilcox (willy at parisc-linux.org)
  5. * Copyright (C) 2002 Richard Hirst (rhirst with parisc-linux.org)
  6. *
  7. * This program is free software; you can redistribute it and/or modify
  8. * it under the terms of the GNU General Public License as published by
  9. * the Free Software Foundation; either version 2, or (at your option)
  10. * any later version.
  11. *
  12. * This program is distributed in the hope that it will be useful,
  13. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  14. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  15. * GNU General Public License for more details.
  16. *
  17. * You should have received a copy of the GNU General Public License
  18. * along with this program; if not, write to the Free Software
  19. * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
  20. */
  21. /*
  22. * NOTE: fdc,fic, and pdc instructions that use base register modification
  23. * should only use index and base registers that are not shadowed,
  24. * so that the fast path emulation in the non access miss handler
  25. * can be used.
  26. */
  27. #ifdef CONFIG_64BIT
  28. #define ADDIB addib,*
  29. #define CMPB cmpb,*
  30. #define ANDCM andcm,*
  31. .level 2.0w
  32. #else
  33. #define ADDIB addib,
  34. #define CMPB cmpb,
  35. #define ANDCM andcm
  36. .level 2.0
  37. #endif
  38. #include <asm/psw.h>
  39. #include <asm/assembly.h>
  40. #include <asm/pgtable.h>
  41. #include <asm/cache.h>
  42. .text
  43. .align 128
  44. .export flush_tlb_all_local,code
  45. flush_tlb_all_local:
  46. .proc
  47. .callinfo NO_CALLS
  48. .entry
  49. /*
  50. * The pitlbe and pdtlbe instructions should only be used to
  51. * flush the entire tlb. Also, there needs to be no intervening
  52. * tlb operations, e.g. tlb misses, so the operation needs
  53. * to happen in real mode with all interruptions disabled.
  54. */
  55. /* pcxt_ssm_bug - relied upon translation! PA 2.0 Arch. F-4 and F-5 */
  56. rsm PSW_SM_I, %r19 /* save I-bit state */
  57. load32 PA(1f), %r1
  58. nop
  59. nop
  60. nop
  61. nop
  62. nop
  63. rsm PSW_SM_Q, %r0 /* prep to load iia queue */
  64. mtctl %r0, %cr17 /* Clear IIASQ tail */
  65. mtctl %r0, %cr17 /* Clear IIASQ head */
  66. mtctl %r1, %cr18 /* IIAOQ head */
  67. ldo 4(%r1), %r1
  68. mtctl %r1, %cr18 /* IIAOQ tail */
  69. load32 REAL_MODE_PSW, %r1
  70. mtctl %r1, %ipsw
  71. rfi
  72. nop
  73. 1: load32 PA(cache_info), %r1
  74. /* Flush Instruction Tlb */
  75. LDREG ITLB_SID_BASE(%r1), %r20
  76. LDREG ITLB_SID_STRIDE(%r1), %r21
  77. LDREG ITLB_SID_COUNT(%r1), %r22
  78. LDREG ITLB_OFF_BASE(%r1), %arg0
  79. LDREG ITLB_OFF_STRIDE(%r1), %arg1
  80. LDREG ITLB_OFF_COUNT(%r1), %arg2
  81. LDREG ITLB_LOOP(%r1), %arg3
  82. ADDIB= -1, %arg3, fitoneloop /* Preadjust and test */
  83. movb,<,n %arg3, %r31, fitdone /* If loop < 0, skip */
  84. copy %arg0, %r28 /* Init base addr */
  85. fitmanyloop: /* Loop if LOOP >= 2 */
  86. mtsp %r20, %sr1
  87. add %r21, %r20, %r20 /* increment space */
  88. copy %arg2, %r29 /* Init middle loop count */
  89. fitmanymiddle: /* Loop if LOOP >= 2 */
  90. ADDIB> -1, %r31, fitmanymiddle /* Adjusted inner loop decr */
  91. pitlbe 0(%sr1, %r28)
  92. pitlbe,m %arg1(%sr1, %r28) /* Last pitlbe and addr adjust */
  93. ADDIB> -1, %r29, fitmanymiddle /* Middle loop decr */
  94. copy %arg3, %r31 /* Re-init inner loop count */
  95. movb,tr %arg0, %r28, fitmanyloop /* Re-init base addr */
  96. ADDIB<=,n -1, %r22, fitdone /* Outer loop count decr */
  97. fitoneloop: /* Loop if LOOP = 1 */
  98. mtsp %r20, %sr1
  99. copy %arg0, %r28 /* init base addr */
  100. copy %arg2, %r29 /* init middle loop count */
  101. fitonemiddle: /* Loop if LOOP = 1 */
  102. ADDIB> -1, %r29, fitonemiddle /* Middle loop count decr */
  103. pitlbe,m %arg1(%sr1, %r28) /* pitlbe for one loop */
  104. ADDIB> -1, %r22, fitoneloop /* Outer loop count decr */
  105. add %r21, %r20, %r20 /* increment space */
  106. fitdone:
  107. /* Flush Data Tlb */
  108. LDREG DTLB_SID_BASE(%r1), %r20
  109. LDREG DTLB_SID_STRIDE(%r1), %r21
  110. LDREG DTLB_SID_COUNT(%r1), %r22
  111. LDREG DTLB_OFF_BASE(%r1), %arg0
  112. LDREG DTLB_OFF_STRIDE(%r1), %arg1
  113. LDREG DTLB_OFF_COUNT(%r1), %arg2
  114. LDREG DTLB_LOOP(%r1), %arg3
  115. ADDIB= -1, %arg3, fdtoneloop /* Preadjust and test */
  116. movb,<,n %arg3, %r31, fdtdone /* If loop < 0, skip */
  117. copy %arg0, %r28 /* Init base addr */
  118. fdtmanyloop: /* Loop if LOOP >= 2 */
  119. mtsp %r20, %sr1
  120. add %r21, %r20, %r20 /* increment space */
  121. copy %arg2, %r29 /* Init middle loop count */
  122. fdtmanymiddle: /* Loop if LOOP >= 2 */
  123. ADDIB> -1, %r31, fdtmanymiddle /* Adjusted inner loop decr */
  124. pdtlbe 0(%sr1, %r28)
  125. pdtlbe,m %arg1(%sr1, %r28) /* Last pdtlbe and addr adjust */
  126. ADDIB> -1, %r29, fdtmanymiddle /* Middle loop decr */
  127. copy %arg3, %r31 /* Re-init inner loop count */
  128. movb,tr %arg0, %r28, fdtmanyloop /* Re-init base addr */
  129. ADDIB<=,n -1, %r22,fdtdone /* Outer loop count decr */
  130. fdtoneloop: /* Loop if LOOP = 1 */
  131. mtsp %r20, %sr1
  132. copy %arg0, %r28 /* init base addr */
  133. copy %arg2, %r29 /* init middle loop count */
  134. fdtonemiddle: /* Loop if LOOP = 1 */
  135. ADDIB> -1, %r29, fdtonemiddle /* Middle loop count decr */
  136. pdtlbe,m %arg1(%sr1, %r28) /* pdtlbe for one loop */
  137. ADDIB> -1, %r22, fdtoneloop /* Outer loop count decr */
  138. add %r21, %r20, %r20 /* increment space */
  139. fdtdone:
  140. /*
  141. * Switch back to virtual mode
  142. */
  143. /* pcxt_ssm_bug */
  144. rsm PSW_SM_I, %r0
  145. load32 2f, %r1
  146. nop
  147. nop
  148. nop
  149. nop
  150. nop
  151. rsm PSW_SM_Q, %r0 /* prep to load iia queue */
  152. mtctl %r0, %cr17 /* Clear IIASQ tail */
  153. mtctl %r0, %cr17 /* Clear IIASQ head */
  154. mtctl %r1, %cr18 /* IIAOQ head */
  155. ldo 4(%r1), %r1
  156. mtctl %r1, %cr18 /* IIAOQ tail */
  157. load32 KERNEL_PSW, %r1
  158. or %r1, %r19, %r1 /* I-bit to state on entry */
  159. mtctl %r1, %ipsw /* restore I-bit (entire PSW) */
  160. rfi
  161. nop
  162. 2: bv %r0(%r2)
  163. nop
  164. .exit
  165. .procend
  166. .export flush_instruction_cache_local,code
  167. .import cache_info,data
  168. flush_instruction_cache_local:
  169. .proc
  170. .callinfo NO_CALLS
  171. .entry
  172. mtsp %r0, %sr1
  173. load32 cache_info, %r1
  174. /* Flush Instruction Cache */
  175. LDREG ICACHE_BASE(%r1), %arg0
  176. LDREG ICACHE_STRIDE(%r1), %arg1
  177. LDREG ICACHE_COUNT(%r1), %arg2
  178. LDREG ICACHE_LOOP(%r1), %arg3
  179. rsm PSW_SM_I, %r22 /* No mmgt ops during loop*/
  180. ADDIB= -1, %arg3, fioneloop /* Preadjust and test */
  181. movb,<,n %arg3, %r31, fisync /* If loop < 0, do sync */
  182. fimanyloop: /* Loop if LOOP >= 2 */
  183. ADDIB> -1, %r31, fimanyloop /* Adjusted inner loop decr */
  184. fice %r0(%sr1, %arg0)
  185. fice,m %arg1(%sr1, %arg0) /* Last fice and addr adjust */
  186. movb,tr %arg3, %r31, fimanyloop /* Re-init inner loop count */
  187. ADDIB<=,n -1, %arg2, fisync /* Outer loop decr */
  188. fioneloop: /* Loop if LOOP = 1 */
  189. ADDIB> -1, %arg2, fioneloop /* Outer loop count decr */
  190. fice,m %arg1(%sr1, %arg0) /* Fice for one loop */
  191. fisync:
  192. sync
  193. mtsm %r22 /* restore I-bit */
  194. bv %r0(%r2)
  195. nop
  196. .exit
  197. .procend
  198. .export flush_data_cache_local, code
  199. .import cache_info, data
  200. flush_data_cache_local:
  201. .proc
  202. .callinfo NO_CALLS
  203. .entry
  204. mtsp %r0, %sr1
  205. load32 cache_info, %r1
  206. /* Flush Data Cache */
  207. LDREG DCACHE_BASE(%r1), %arg0
  208. LDREG DCACHE_STRIDE(%r1), %arg1
  209. LDREG DCACHE_COUNT(%r1), %arg2
  210. LDREG DCACHE_LOOP(%r1), %arg3
  211. rsm PSW_SM_I, %r22
  212. ADDIB= -1, %arg3, fdoneloop /* Preadjust and test */
  213. movb,<,n %arg3, %r31, fdsync /* If loop < 0, do sync */
  214. fdmanyloop: /* Loop if LOOP >= 2 */
  215. ADDIB> -1, %r31, fdmanyloop /* Adjusted inner loop decr */
  216. fdce %r0(%sr1, %arg0)
  217. fdce,m %arg1(%sr1, %arg0) /* Last fdce and addr adjust */
  218. movb,tr %arg3, %r31, fdmanyloop /* Re-init inner loop count */
  219. ADDIB<=,n -1, %arg2, fdsync /* Outer loop decr */
  220. fdoneloop: /* Loop if LOOP = 1 */
  221. ADDIB> -1, %arg2, fdoneloop /* Outer loop count decr */
  222. fdce,m %arg1(%sr1, %arg0) /* Fdce for one loop */
  223. fdsync:
  224. syncdma
  225. sync
  226. mtsm %r22 /* restore I-bit */
  227. bv %r0(%r2)
  228. nop
  229. .exit
  230. .procend
  231. .export copy_user_page_asm,code
  232. .align 16
  233. copy_user_page_asm:
  234. .proc
  235. .callinfo NO_CALLS
  236. .entry
  237. #ifdef CONFIG_64BIT
  238. /* PA8x00 CPUs can consume 2 loads or 1 store per cycle.
  239. * Unroll the loop by hand and arrange insn appropriately.
  240. * GCC probably can do this just as well.
  241. */
  242. ldd 0(%r25), %r19
  243. ldi ASM_PAGE_SIZE_DIV128, %r1
  244. ldw 64(%r25), %r0 /* prefetch 1 cacheline ahead */
  245. ldw 128(%r25), %r0 /* prefetch 2 */
  246. 1: ldd 8(%r25), %r20
  247. ldw 192(%r25), %r0 /* prefetch 3 */
  248. ldw 256(%r25), %r0 /* prefetch 4 */
  249. ldd 16(%r25), %r21
  250. ldd 24(%r25), %r22
  251. std %r19, 0(%r26)
  252. std %r20, 8(%r26)
  253. ldd 32(%r25), %r19
  254. ldd 40(%r25), %r20
  255. std %r21, 16(%r26)
  256. std %r22, 24(%r26)
  257. ldd 48(%r25), %r21
  258. ldd 56(%r25), %r22
  259. std %r19, 32(%r26)
  260. std %r20, 40(%r26)
  261. ldd 64(%r25), %r19
  262. ldd 72(%r25), %r20
  263. std %r21, 48(%r26)
  264. std %r22, 56(%r26)
  265. ldd 80(%r25), %r21
  266. ldd 88(%r25), %r22
  267. std %r19, 64(%r26)
  268. std %r20, 72(%r26)
  269. ldd 96(%r25), %r19
  270. ldd 104(%r25), %r20
  271. std %r21, 80(%r26)
  272. std %r22, 88(%r26)
  273. ldd 112(%r25), %r21
  274. ldd 120(%r25), %r22
  275. std %r19, 96(%r26)
  276. std %r20, 104(%r26)
  277. ldo 128(%r25), %r25
  278. std %r21, 112(%r26)
  279. std %r22, 120(%r26)
  280. ldo 128(%r26), %r26
  281. /* conditional branches nullify on forward taken branch, and on
  282. * non-taken backward branch. Note that .+4 is a backwards branch.
  283. * The ldd should only get executed if the branch is taken.
  284. */
  285. ADDIB>,n -1, %r1, 1b /* bundle 10 */
  286. ldd 0(%r25), %r19 /* start next loads */
  287. #else
  288. /*
  289. * This loop is optimized for PCXL/PCXL2 ldw/ldw and stw/stw
  290. * bundles (very restricted rules for bundling).
  291. * Note that until (if) we start saving
  292. * the full 64 bit register values on interrupt, we can't
  293. * use ldd/std on a 32 bit kernel.
  294. */
  295. ldw 0(%r25), %r19
  296. ldi ASM_PAGE_SIZE_DIV64, %r1
  297. 1:
  298. ldw 4(%r25), %r20
  299. ldw 8(%r25), %r21
  300. ldw 12(%r25), %r22
  301. stw %r19, 0(%r26)
  302. stw %r20, 4(%r26)
  303. stw %r21, 8(%r26)
  304. stw %r22, 12(%r26)
  305. ldw 16(%r25), %r19
  306. ldw 20(%r25), %r20
  307. ldw 24(%r25), %r21
  308. ldw 28(%r25), %r22
  309. stw %r19, 16(%r26)
  310. stw %r20, 20(%r26)
  311. stw %r21, 24(%r26)
  312. stw %r22, 28(%r26)
  313. ldw 32(%r25), %r19
  314. ldw 36(%r25), %r20
  315. ldw 40(%r25), %r21
  316. ldw 44(%r25), %r22
  317. stw %r19, 32(%r26)
  318. stw %r20, 36(%r26)
  319. stw %r21, 40(%r26)
  320. stw %r22, 44(%r26)
  321. ldw 48(%r25), %r19
  322. ldw 52(%r25), %r20
  323. ldw 56(%r25), %r21
  324. ldw 60(%r25), %r22
  325. stw %r19, 48(%r26)
  326. stw %r20, 52(%r26)
  327. ldo 64(%r25), %r25
  328. stw %r21, 56(%r26)
  329. stw %r22, 60(%r26)
  330. ldo 64(%r26), %r26
  331. ADDIB>,n -1, %r1, 1b
  332. ldw 0(%r25), %r19
  333. #endif
  334. bv %r0(%r2)
  335. nop
  336. .exit
  337. .procend
  338. /*
  339. * NOTE: Code in clear_user_page has a hard coded dependency on the
  340. * maximum alias boundary being 4 Mb. We've been assured by the
  341. * parisc chip designers that there will not ever be a parisc
  342. * chip with a larger alias boundary (Never say never :-) ).
  343. *
  344. * Subtle: the dtlb miss handlers support the temp alias region by
  345. * "knowing" that if a dtlb miss happens within the temp alias
  346. * region it must have occurred while in clear_user_page. Since
  347. * this routine makes use of processor local translations, we
  348. * don't want to insert them into the kernel page table. Instead,
  349. * we load up some general registers (they need to be registers
  350. * which aren't shadowed) with the physical page numbers (preshifted
  351. * for tlb insertion) needed to insert the translations. When we
  352. * miss on the translation, the dtlb miss handler inserts the
  353. * translation into the tlb using these values:
  354. *
  355. * %r26 physical page (shifted for tlb insert) of "to" translation
  356. * %r23 physical page (shifted for tlb insert) of "from" translation
  357. */
  358. #if 0
  359. /*
  360. * We can't do this since copy_user_page is used to bring in
  361. * file data that might have instructions. Since the data would
  362. * then need to be flushed out so the i-fetch can see it, it
  363. * makes more sense to just copy through the kernel translation
  364. * and flush it.
  365. *
  366. * I'm still keeping this around because it may be possible to
  367. * use it if more information is passed into copy_user_page().
  368. * Have to do some measurements to see if it is worthwhile to
  369. * lobby for such a change.
  370. */
  371. .export copy_user_page_asm,code
  372. copy_user_page_asm:
  373. .proc
  374. .callinfo NO_CALLS
  375. .entry
  376. ldil L%(__PAGE_OFFSET), %r1
  377. sub %r26, %r1, %r26
  378. sub %r25, %r1, %r23 /* move physical addr into non shadowed reg */
  379. ldil L%(TMPALIAS_MAP_START), %r28
  380. /* FIXME for different page sizes != 4k */
  381. #ifdef CONFIG_64BIT
  382. extrd,u %r26,56,32, %r26 /* convert phys addr to tlb insert format */
  383. extrd,u %r23,56,32, %r23 /* convert phys addr to tlb insert format */
  384. depd %r24,63,22, %r28 /* Form aliased virtual address 'to' */
  385. depdi 0, 63,12, %r28 /* Clear any offset bits */
  386. copy %r28, %r29
  387. depdi 1, 41,1, %r29 /* Form aliased virtual address 'from' */
  388. #else
  389. extrw,u %r26, 24,25, %r26 /* convert phys addr to tlb insert format */
  390. extrw,u %r23, 24,25, %r23 /* convert phys addr to tlb insert format */
  391. depw %r24, 31,22, %r28 /* Form aliased virtual address 'to' */
  392. depwi 0, 31,12, %r28 /* Clear any offset bits */
  393. copy %r28, %r29
  394. depwi 1, 9,1, %r29 /* Form aliased virtual address 'from' */
  395. #endif
  396. /* Purge any old translations */
  397. pdtlb 0(%r28)
  398. pdtlb 0(%r29)
  399. ldi 64, %r1
  400. /*
  401. * This loop is optimized for PCXL/PCXL2 ldw/ldw and stw/stw
  402. * bundles (very restricted rules for bundling). It probably
  403. * does OK on PCXU and better, but we could do better with
  404. * ldd/std instructions. Note that until (if) we start saving
  405. * the full 64 bit register values on interrupt, we can't
  406. * use ldd/std on a 32 bit kernel.
  407. */
  408. 1:
  409. ldw 0(%r29), %r19
  410. ldw 4(%r29), %r20
  411. ldw 8(%r29), %r21
  412. ldw 12(%r29), %r22
  413. stw %r19, 0(%r28)
  414. stw %r20, 4(%r28)
  415. stw %r21, 8(%r28)
  416. stw %r22, 12(%r28)
  417. ldw 16(%r29), %r19
  418. ldw 20(%r29), %r20
  419. ldw 24(%r29), %r21
  420. ldw 28(%r29), %r22
  421. stw %r19, 16(%r28)
  422. stw %r20, 20(%r28)
  423. stw %r21, 24(%r28)
  424. stw %r22, 28(%r28)
  425. ldw 32(%r29), %r19
  426. ldw 36(%r29), %r20
  427. ldw 40(%r29), %r21
  428. ldw 44(%r29), %r22
  429. stw %r19, 32(%r28)
  430. stw %r20, 36(%r28)
  431. stw %r21, 40(%r28)
  432. stw %r22, 44(%r28)
  433. ldw 48(%r29), %r19
  434. ldw 52(%r29), %r20
  435. ldw 56(%r29), %r21
  436. ldw 60(%r29), %r22
  437. stw %r19, 48(%r28)
  438. stw %r20, 52(%r28)
  439. stw %r21, 56(%r28)
  440. stw %r22, 60(%r28)
  441. ldo 64(%r28), %r28
  442. ADDIB> -1, %r1,1b
  443. ldo 64(%r29), %r29
  444. bv %r0(%r2)
  445. nop
  446. .exit
  447. .procend
  448. #endif
  449. .export __clear_user_page_asm,code
  450. __clear_user_page_asm:
  451. .proc
  452. .callinfo NO_CALLS
  453. .entry
  454. tophys_r1 %r26
  455. ldil L%(TMPALIAS_MAP_START), %r28
  456. #ifdef CONFIG_64BIT
  457. #if (TMPALIAS_MAP_START >= 0x80000000)
  458. depdi 0, 31,32, %r28 /* clear any sign extension */
  459. /* FIXME: page size dependend */
  460. #endif
  461. extrd,u %r26, 56,32, %r26 /* convert phys addr to tlb insert format */
  462. depd %r25, 63,22, %r28 /* Form aliased virtual address 'to' */
  463. depdi 0, 63,12, %r28 /* Clear any offset bits */
  464. #else
  465. extrw,u %r26, 24,25, %r26 /* convert phys addr to tlb insert format */
  466. depw %r25, 31,22, %r28 /* Form aliased virtual address 'to' */
  467. depwi 0, 31,12, %r28 /* Clear any offset bits */
  468. #endif
  469. /* Purge any old translation */
  470. pdtlb 0(%r28)
  471. #ifdef CONFIG_64BIT
  472. ldi ASM_PAGE_SIZE_DIV128, %r1
  473. /* PREFETCH (Write) has not (yet) been proven to help here */
  474. /* #define PREFETCHW_OP ldd 256(%0), %r0 */
  475. 1: std %r0, 0(%r28)
  476. std %r0, 8(%r28)
  477. std %r0, 16(%r28)
  478. std %r0, 24(%r28)
  479. std %r0, 32(%r28)
  480. std %r0, 40(%r28)
  481. std %r0, 48(%r28)
  482. std %r0, 56(%r28)
  483. std %r0, 64(%r28)
  484. std %r0, 72(%r28)
  485. std %r0, 80(%r28)
  486. std %r0, 88(%r28)
  487. std %r0, 96(%r28)
  488. std %r0, 104(%r28)
  489. std %r0, 112(%r28)
  490. std %r0, 120(%r28)
  491. ADDIB> -1, %r1, 1b
  492. ldo 128(%r28), %r28
  493. #else /* ! CONFIG_64BIT */
  494. ldi ASM_PAGE_SIZE_DIV64, %r1
  495. 1:
  496. stw %r0, 0(%r28)
  497. stw %r0, 4(%r28)
  498. stw %r0, 8(%r28)
  499. stw %r0, 12(%r28)
  500. stw %r0, 16(%r28)
  501. stw %r0, 20(%r28)
  502. stw %r0, 24(%r28)
  503. stw %r0, 28(%r28)
  504. stw %r0, 32(%r28)
  505. stw %r0, 36(%r28)
  506. stw %r0, 40(%r28)
  507. stw %r0, 44(%r28)
  508. stw %r0, 48(%r28)
  509. stw %r0, 52(%r28)
  510. stw %r0, 56(%r28)
  511. stw %r0, 60(%r28)
  512. ADDIB> -1, %r1, 1b
  513. ldo 64(%r28), %r28
  514. #endif /* CONFIG_64BIT */
  515. bv %r0(%r2)
  516. nop
  517. .exit
  518. .procend
  519. .export flush_kernel_dcache_page_asm
  520. flush_kernel_dcache_page_asm:
  521. .proc
  522. .callinfo NO_CALLS
  523. .entry
  524. ldil L%dcache_stride, %r1
  525. ldw R%dcache_stride(%r1), %r23
  526. #ifdef CONFIG_64BIT
  527. depdi,z 1, 63-PAGE_SHIFT,1, %r25
  528. #else
  529. depwi,z 1, 31-PAGE_SHIFT,1, %r25
  530. #endif
  531. add %r26, %r25, %r25
  532. sub %r25, %r23, %r25
  533. 1: fdc,m %r23(%r26)
  534. fdc,m %r23(%r26)
  535. fdc,m %r23(%r26)
  536. fdc,m %r23(%r26)
  537. fdc,m %r23(%r26)
  538. fdc,m %r23(%r26)
  539. fdc,m %r23(%r26)
  540. fdc,m %r23(%r26)
  541. fdc,m %r23(%r26)
  542. fdc,m %r23(%r26)
  543. fdc,m %r23(%r26)
  544. fdc,m %r23(%r26)
  545. fdc,m %r23(%r26)
  546. fdc,m %r23(%r26)
  547. fdc,m %r23(%r26)
  548. CMPB<< %r26, %r25,1b
  549. fdc,m %r23(%r26)
  550. sync
  551. bv %r0(%r2)
  552. nop
  553. .exit
  554. .procend
  555. .export flush_user_dcache_page
  556. flush_user_dcache_page:
  557. .proc
  558. .callinfo NO_CALLS
  559. .entry
  560. ldil L%dcache_stride, %r1
  561. ldw R%dcache_stride(%r1), %r23
  562. #ifdef CONFIG_64BIT
  563. depdi,z 1,63-PAGE_SHIFT,1, %r25
  564. #else
  565. depwi,z 1,31-PAGE_SHIFT,1, %r25
  566. #endif
  567. add %r26, %r25, %r25
  568. sub %r25, %r23, %r25
  569. 1: fdc,m %r23(%sr3, %r26)
  570. fdc,m %r23(%sr3, %r26)
  571. fdc,m %r23(%sr3, %r26)
  572. fdc,m %r23(%sr3, %r26)
  573. fdc,m %r23(%sr3, %r26)
  574. fdc,m %r23(%sr3, %r26)
  575. fdc,m %r23(%sr3, %r26)
  576. fdc,m %r23(%sr3, %r26)
  577. fdc,m %r23(%sr3, %r26)
  578. fdc,m %r23(%sr3, %r26)
  579. fdc,m %r23(%sr3, %r26)
  580. fdc,m %r23(%sr3, %r26)
  581. fdc,m %r23(%sr3, %r26)
  582. fdc,m %r23(%sr3, %r26)
  583. fdc,m %r23(%sr3, %r26)
  584. CMPB<< %r26, %r25,1b
  585. fdc,m %r23(%sr3, %r26)
  586. sync
  587. bv %r0(%r2)
  588. nop
  589. .exit
  590. .procend
  591. .export flush_user_icache_page
  592. flush_user_icache_page:
  593. .proc
  594. .callinfo NO_CALLS
  595. .entry
  596. ldil L%dcache_stride, %r1
  597. ldw R%dcache_stride(%r1), %r23
  598. #ifdef CONFIG_64BIT
  599. depdi,z 1, 63-PAGE_SHIFT,1, %r25
  600. #else
  601. depwi,z 1, 31-PAGE_SHIFT,1, %r25
  602. #endif
  603. add %r26, %r25, %r25
  604. sub %r25, %r23, %r25
  605. 1: fic,m %r23(%sr3, %r26)
  606. fic,m %r23(%sr3, %r26)
  607. fic,m %r23(%sr3, %r26)
  608. fic,m %r23(%sr3, %r26)
  609. fic,m %r23(%sr3, %r26)
  610. fic,m %r23(%sr3, %r26)
  611. fic,m %r23(%sr3, %r26)
  612. fic,m %r23(%sr3, %r26)
  613. fic,m %r23(%sr3, %r26)
  614. fic,m %r23(%sr3, %r26)
  615. fic,m %r23(%sr3, %r26)
  616. fic,m %r23(%sr3, %r26)
  617. fic,m %r23(%sr3, %r26)
  618. fic,m %r23(%sr3, %r26)
  619. fic,m %r23(%sr3, %r26)
  620. CMPB<< %r26, %r25,1b
  621. fic,m %r23(%sr3, %r26)
  622. sync
  623. bv %r0(%r2)
  624. nop
  625. .exit
  626. .procend
  627. .export purge_kernel_dcache_page
  628. purge_kernel_dcache_page:
  629. .proc
  630. .callinfo NO_CALLS
  631. .entry
  632. ldil L%dcache_stride, %r1
  633. ldw R%dcache_stride(%r1), %r23
  634. #ifdef CONFIG_64BIT
  635. depdi,z 1, 63-PAGE_SHIFT,1, %r25
  636. #else
  637. depwi,z 1, 31-PAGE_SHIFT,1, %r25
  638. #endif
  639. add %r26, %r25, %r25
  640. sub %r25, %r23, %r25
  641. 1: pdc,m %r23(%r26)
  642. pdc,m %r23(%r26)
  643. pdc,m %r23(%r26)
  644. pdc,m %r23(%r26)
  645. pdc,m %r23(%r26)
  646. pdc,m %r23(%r26)
  647. pdc,m %r23(%r26)
  648. pdc,m %r23(%r26)
  649. pdc,m %r23(%r26)
  650. pdc,m %r23(%r26)
  651. pdc,m %r23(%r26)
  652. pdc,m %r23(%r26)
  653. pdc,m %r23(%r26)
  654. pdc,m %r23(%r26)
  655. pdc,m %r23(%r26)
  656. CMPB<< %r26, %r25, 1b
  657. pdc,m %r23(%r26)
  658. sync
  659. bv %r0(%r2)
  660. nop
  661. .exit
  662. .procend
  663. #if 0
  664. /* Currently not used, but it still is a possible alternate
  665. * solution.
  666. */
  667. .export flush_alias_page
  668. flush_alias_page:
  669. .proc
  670. .callinfo NO_CALLS
  671. .entry
  672. tophys_r1 %r26
  673. ldil L%(TMPALIAS_MAP_START), %r28
  674. #ifdef CONFIG_64BIT
  675. extrd,u %r26, 56,32, %r26 /* convert phys addr to tlb insert format */
  676. depd %r25, 63,22, %r28 /* Form aliased virtual address 'to' */
  677. depdi 0, 63,12, %r28 /* Clear any offset bits */
  678. #else
  679. extrw,u %r26, 24,25, %r26 /* convert phys addr to tlb insert format */
  680. depw %r25, 31,22, %r28 /* Form aliased virtual address 'to' */
  681. depwi 0, 31,12, %r28 /* Clear any offset bits */
  682. #endif
  683. /* Purge any old translation */
  684. pdtlb 0(%r28)
  685. ldil L%dcache_stride, %r1
  686. ldw R%dcache_stride(%r1), %r23
  687. #ifdef CONFIG_64BIT
  688. depdi,z 1, 63-PAGE_SHIFT,1, %r29
  689. #else
  690. depwi,z 1, 31-PAGE_SHIFT,1, %r29
  691. #endif
  692. add %r28, %r29, %r29
  693. sub %r29, %r23, %r29
  694. 1: fdc,m %r23(%r28)
  695. fdc,m %r23(%r28)
  696. fdc,m %r23(%r28)
  697. fdc,m %r23(%r28)
  698. fdc,m %r23(%r28)
  699. fdc,m %r23(%r28)
  700. fdc,m %r23(%r28)
  701. fdc,m %r23(%r28)
  702. fdc,m %r23(%r28)
  703. fdc,m %r23(%r28)
  704. fdc,m %r23(%r28)
  705. fdc,m %r23(%r28)
  706. fdc,m %r23(%r28)
  707. fdc,m %r23(%r28)
  708. fdc,m %r23(%r28)
  709. CMPB<< %r28, %r29, 1b
  710. fdc,m %r23(%r28)
  711. sync
  712. bv %r0(%r2)
  713. nop
  714. .exit
  715. .procend
  716. #endif
  717. .export flush_user_dcache_range_asm
  718. flush_user_dcache_range_asm:
  719. .proc
  720. .callinfo NO_CALLS
  721. .entry
  722. ldil L%dcache_stride, %r1
  723. ldw R%dcache_stride(%r1), %r23
  724. ldo -1(%r23), %r21
  725. ANDCM %r26, %r21, %r26
  726. 1: CMPB<<,n %r26, %r25, 1b
  727. fdc,m %r23(%sr3, %r26)
  728. sync
  729. bv %r0(%r2)
  730. nop
  731. .exit
  732. .procend
  733. .export flush_kernel_dcache_range_asm
  734. flush_kernel_dcache_range_asm:
  735. .proc
  736. .callinfo NO_CALLS
  737. .entry
  738. ldil L%dcache_stride, %r1
  739. ldw R%dcache_stride(%r1), %r23
  740. ldo -1(%r23), %r21
  741. ANDCM %r26, %r21, %r26
  742. 1: CMPB<<,n %r26, %r25,1b
  743. fdc,m %r23(%r26)
  744. sync
  745. syncdma
  746. bv %r0(%r2)
  747. nop
  748. .exit
  749. .procend
  750. .export flush_user_icache_range_asm
  751. flush_user_icache_range_asm:
  752. .proc
  753. .callinfo NO_CALLS
  754. .entry
  755. ldil L%icache_stride, %r1
  756. ldw R%icache_stride(%r1), %r23
  757. ldo -1(%r23), %r21
  758. ANDCM %r26, %r21, %r26
  759. 1: CMPB<<,n %r26, %r25,1b
  760. fic,m %r23(%sr3, %r26)
  761. sync
  762. bv %r0(%r2)
  763. nop
  764. .exit
  765. .procend
  766. .export flush_kernel_icache_page
  767. flush_kernel_icache_page:
  768. .proc
  769. .callinfo NO_CALLS
  770. .entry
  771. ldil L%icache_stride, %r1
  772. ldw R%icache_stride(%r1), %r23
  773. #ifdef CONFIG_64BIT
  774. depdi,z 1, 63-PAGE_SHIFT,1, %r25
  775. #else
  776. depwi,z 1, 31-PAGE_SHIFT,1, %r25
  777. #endif
  778. add %r26, %r25, %r25
  779. sub %r25, %r23, %r25
  780. 1: fic,m %r23(%sr4, %r26)
  781. fic,m %r23(%sr4, %r26)
  782. fic,m %r23(%sr4, %r26)
  783. fic,m %r23(%sr4, %r26)
  784. fic,m %r23(%sr4, %r26)
  785. fic,m %r23(%sr4, %r26)
  786. fic,m %r23(%sr4, %r26)
  787. fic,m %r23(%sr4, %r26)
  788. fic,m %r23(%sr4, %r26)
  789. fic,m %r23(%sr4, %r26)
  790. fic,m %r23(%sr4, %r26)
  791. fic,m %r23(%sr4, %r26)
  792. fic,m %r23(%sr4, %r26)
  793. fic,m %r23(%sr4, %r26)
  794. fic,m %r23(%sr4, %r26)
  795. CMPB<< %r26, %r25, 1b
  796. fic,m %r23(%sr4, %r26)
  797. sync
  798. bv %r0(%r2)
  799. nop
  800. .exit
  801. .procend
  802. .export flush_kernel_icache_range_asm
  803. flush_kernel_icache_range_asm:
  804. .proc
  805. .callinfo NO_CALLS
  806. .entry
  807. ldil L%icache_stride, %r1
  808. ldw R%icache_stride(%r1), %r23
  809. ldo -1(%r23), %r21
  810. ANDCM %r26, %r21, %r26
  811. 1: CMPB<<,n %r26, %r25, 1b
  812. fic,m %r23(%sr4, %r26)
  813. sync
  814. bv %r0(%r2)
  815. nop
  816. .exit
  817. .procend
  818. /* align should cover use of rfi in disable_sr_hashing_asm and
  819. * srdis_done.
  820. */
  821. .align 256
  822. .export disable_sr_hashing_asm,code
  823. disable_sr_hashing_asm:
  824. .proc
  825. .callinfo NO_CALLS
  826. .entry
  827. /*
  828. * Switch to real mode
  829. */
  830. /* pcxt_ssm_bug */
  831. rsm PSW_SM_I, %r0
  832. load32 PA(1f), %r1
  833. nop
  834. nop
  835. nop
  836. nop
  837. nop
  838. rsm PSW_SM_Q, %r0 /* prep to load iia queue */
  839. mtctl %r0, %cr17 /* Clear IIASQ tail */
  840. mtctl %r0, %cr17 /* Clear IIASQ head */
  841. mtctl %r1, %cr18 /* IIAOQ head */
  842. ldo 4(%r1), %r1
  843. mtctl %r1, %cr18 /* IIAOQ tail */
  844. load32 REAL_MODE_PSW, %r1
  845. mtctl %r1, %ipsw
  846. rfi
  847. nop
  848. 1: cmpib,=,n SRHASH_PCXST, %r26,srdis_pcxs
  849. cmpib,=,n SRHASH_PCXL, %r26,srdis_pcxl
  850. cmpib,=,n SRHASH_PA20, %r26,srdis_pa20
  851. b,n srdis_done
  852. srdis_pcxs:
  853. /* Disable Space Register Hashing for PCXS,PCXT,PCXT' */
  854. .word 0x141c1a00 /* mfdiag %dr0, %r28 */
  855. .word 0x141c1a00 /* must issue twice */
  856. depwi 0,18,1, %r28 /* Clear DHE (dcache hash enable) */
  857. depwi 0,20,1, %r28 /* Clear IHE (icache hash enable) */
  858. .word 0x141c1600 /* mtdiag %r28, %dr0 */
  859. .word 0x141c1600 /* must issue twice */
  860. b,n srdis_done
  861. srdis_pcxl:
  862. /* Disable Space Register Hashing for PCXL */
  863. .word 0x141c0600 /* mfdiag %dr0, %r28 */
  864. depwi 0,28,2, %r28 /* Clear DHASH_EN & IHASH_EN */
  865. .word 0x141c0240 /* mtdiag %r28, %dr0 */
  866. b,n srdis_done
  867. srdis_pa20:
  868. /* Disable Space Register Hashing for PCXU,PCXU+,PCXW,PCXW+,PCXW2 */
  869. .word 0x144008bc /* mfdiag %dr2, %r28 */
  870. depdi 0, 54,1, %r28 /* clear DIAG_SPHASH_ENAB (bit 54) */
  871. .word 0x145c1840 /* mtdiag %r28, %dr2 */
  872. srdis_done:
  873. /* Switch back to virtual mode */
  874. rsm PSW_SM_I, %r0 /* prep to load iia queue */
  875. load32 2f, %r1
  876. nop
  877. nop
  878. nop
  879. nop
  880. nop
  881. rsm PSW_SM_Q, %r0 /* prep to load iia queue */
  882. mtctl %r0, %cr17 /* Clear IIASQ tail */
  883. mtctl %r0, %cr17 /* Clear IIASQ head */
  884. mtctl %r1, %cr18 /* IIAOQ head */
  885. ldo 4(%r1), %r1
  886. mtctl %r1, %cr18 /* IIAOQ tail */
  887. load32 KERNEL_PSW, %r1
  888. mtctl %r1, %ipsw
  889. rfi
  890. nop
  891. 2: bv %r0(%r2)
  892. nop
  893. .exit
  894. .procend
  895. .end