cpu_setup_6xx.S 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477
  1. /*
  2. * This file contains low level CPU setup functions.
  3. * Copyright (C) 2003 Benjamin Herrenschmidt (benh@kernel.crashing.org)
  4. *
  5. * This program is free software; you can redistribute it and/or
  6. * modify it under the terms of the GNU General Public License
  7. * as published by the Free Software Foundation; either version
  8. * 2 of the License, or (at your option) any later version.
  9. *
  10. */
  11. #include <linux/config.h>
  12. #include <asm/processor.h>
  13. #include <asm/page.h>
  14. #include <asm/ppc_asm.h>
  15. #include <asm/cputable.h>
  16. #include <asm/ppc_asm.h>
  17. #include <asm/offsets.h>
  18. #include <asm/cache.h>
  19. _GLOBAL(__setup_cpu_601)
  20. blr
  21. _GLOBAL(__setup_cpu_603)
  22. b setup_common_caches
  23. _GLOBAL(__setup_cpu_604)
  24. mflr r4
  25. bl setup_common_caches
  26. bl setup_604_hid0
  27. mtlr r4
  28. blr
  29. _GLOBAL(__setup_cpu_750)
  30. mflr r4
  31. bl __init_fpu_registers
  32. bl setup_common_caches
  33. bl setup_750_7400_hid0
  34. mtlr r4
  35. blr
  36. _GLOBAL(__setup_cpu_750cx)
  37. mflr r4
  38. bl __init_fpu_registers
  39. bl setup_common_caches
  40. bl setup_750_7400_hid0
  41. bl setup_750cx
  42. mtlr r4
  43. blr
  44. _GLOBAL(__setup_cpu_750fx)
  45. mflr r4
  46. bl __init_fpu_registers
  47. bl setup_common_caches
  48. bl setup_750_7400_hid0
  49. bl setup_750fx
  50. mtlr r4
  51. blr
  52. _GLOBAL(__setup_cpu_7400)
  53. mflr r4
  54. bl __init_fpu_registers
  55. bl setup_7400_workarounds
  56. bl setup_common_caches
  57. bl setup_750_7400_hid0
  58. mtlr r4
  59. blr
  60. _GLOBAL(__setup_cpu_7410)
  61. mflr r4
  62. bl __init_fpu_registers
  63. bl setup_7410_workarounds
  64. bl setup_common_caches
  65. bl setup_750_7400_hid0
  66. li r3,0
  67. mtspr SPRN_L2CR2,r3
  68. mtlr r4
  69. blr
  70. _GLOBAL(__setup_cpu_745x)
  71. mflr r4
  72. bl setup_common_caches
  73. bl setup_745x_specifics
  74. mtlr r4
  75. blr
  76. /* Enable caches for 603's, 604, 750 & 7400 */
  77. setup_common_caches:
  78. mfspr r11,SPRN_HID0
  79. andi. r0,r11,HID0_DCE
  80. ori r11,r11,HID0_ICE|HID0_DCE
  81. ori r8,r11,HID0_ICFI
  82. bne 1f /* don't invalidate the D-cache */
  83. ori r8,r8,HID0_DCI /* unless it wasn't enabled */
  84. 1: sync
  85. mtspr SPRN_HID0,r8 /* enable and invalidate caches */
  86. sync
  87. mtspr SPRN_HID0,r11 /* enable caches */
  88. sync
  89. isync
  90. blr
  91. /* 604, 604e, 604ev, ...
  92. * Enable superscalar execution & branch history table
  93. */
  94. setup_604_hid0:
  95. mfspr r11,SPRN_HID0
  96. ori r11,r11,HID0_SIED|HID0_BHTE
  97. ori r8,r11,HID0_BTCD
  98. sync
  99. mtspr SPRN_HID0,r8 /* flush branch target address cache */
  100. sync /* on 604e/604r */
  101. mtspr SPRN_HID0,r11
  102. sync
  103. isync
  104. blr
  105. /* 7400 <= rev 2.7 and 7410 rev = 1.0 suffer from some
  106. * erratas we work around here.
  107. * Moto MPC710CE.pdf describes them, those are errata
  108. * #3, #4 and #5
  109. * Note that we assume the firmware didn't choose to
  110. * apply other workarounds (there are other ones documented
  111. * in the .pdf). It appear that Apple firmware only works
  112. * around #3 and with the same fix we use. We may want to
  113. * check if the CPU is using 60x bus mode in which case
  114. * the workaround for errata #4 is useless. Also, we may
  115. * want to explicitely clear HID0_NOPDST as this is not
  116. * needed once we have applied workaround #5 (though it's
  117. * not set by Apple's firmware at least).
  118. */
  119. setup_7400_workarounds:
  120. mfpvr r3
  121. rlwinm r3,r3,0,20,31
  122. cmpwi 0,r3,0x0207
  123. ble 1f
  124. blr
  125. setup_7410_workarounds:
  126. mfpvr r3
  127. rlwinm r3,r3,0,20,31
  128. cmpwi 0,r3,0x0100
  129. bnelr
  130. 1:
  131. mfspr r11,SPRN_MSSSR0
  132. /* Errata #3: Set L1OPQ_SIZE to 0x10 */
  133. rlwinm r11,r11,0,9,6
  134. oris r11,r11,0x0100
  135. /* Errata #4: Set L2MQ_SIZE to 1 (check for MPX mode first ?) */
  136. oris r11,r11,0x0002
  137. /* Errata #5: Set DRLT_SIZE to 0x01 */
  138. rlwinm r11,r11,0,5,2
  139. oris r11,r11,0x0800
  140. sync
  141. mtspr SPRN_MSSSR0,r11
  142. sync
  143. isync
  144. blr
  145. /* 740/750/7400/7410
  146. * Enable Store Gathering (SGE), Address Brodcast (ABE),
  147. * Branch History Table (BHTE), Branch Target ICache (BTIC)
  148. * Dynamic Power Management (DPM), Speculative (SPD)
  149. * Clear Instruction cache throttling (ICTC)
  150. */
  151. setup_750_7400_hid0:
  152. mfspr r11,SPRN_HID0
  153. ori r11,r11,HID0_SGE | HID0_ABE | HID0_BHTE | HID0_BTIC
  154. oris r11,r11,HID0_DPM@h
  155. BEGIN_FTR_SECTION
  156. xori r11,r11,HID0_BTIC
  157. END_FTR_SECTION_IFSET(CPU_FTR_NO_BTIC)
  158. BEGIN_FTR_SECTION
  159. xoris r11,r11,HID0_DPM@h /* disable dynamic power mgmt */
  160. END_FTR_SECTION_IFSET(CPU_FTR_NO_DPM)
  161. li r3,HID0_SPD
  162. andc r11,r11,r3 /* clear SPD: enable speculative */
  163. li r3,0
  164. mtspr SPRN_ICTC,r3 /* Instruction Cache Throttling off */
  165. isync
  166. mtspr SPRN_HID0,r11
  167. sync
  168. isync
  169. blr
  170. /* 750cx specific
  171. * Looks like we have to disable NAP feature for some PLL settings...
  172. * (waiting for confirmation)
  173. */
  174. setup_750cx:
  175. mfspr r10, SPRN_HID1
  176. rlwinm r10,r10,4,28,31
  177. cmpwi cr0,r10,7
  178. cmpwi cr1,r10,9
  179. cmpwi cr2,r10,11
  180. cror 4*cr0+eq,4*cr0+eq,4*cr1+eq
  181. cror 4*cr0+eq,4*cr0+eq,4*cr2+eq
  182. bnelr
  183. lwz r6,CPU_SPEC_FEATURES(r5)
  184. li r7,CPU_FTR_CAN_NAP
  185. andc r6,r6,r7
  186. stw r6,CPU_SPEC_FEATURES(r5)
  187. blr
  188. /* 750fx specific
  189. */
  190. setup_750fx:
  191. blr
  192. /* MPC 745x
  193. * Enable Store Gathering (SGE), Branch Folding (FOLD)
  194. * Branch History Table (BHTE), Branch Target ICache (BTIC)
  195. * Dynamic Power Management (DPM), Speculative (SPD)
  196. * Ensure our data cache instructions really operate.
  197. * Timebase has to be running or we wouldn't have made it here,
  198. * just ensure we don't disable it.
  199. * Clear Instruction cache throttling (ICTC)
  200. * Enable L2 HW prefetch
  201. */
  202. setup_745x_specifics:
  203. /* We check for the presence of an L3 cache setup by
  204. * the firmware. If any, we disable NAP capability as
  205. * it's known to be bogus on rev 2.1 and earlier
  206. */
  207. mfspr r11,SPRN_L3CR
  208. andis. r11,r11,L3CR_L3E@h
  209. beq 1f
  210. lwz r6,CPU_SPEC_FEATURES(r5)
  211. andi. r0,r6,CPU_FTR_L3_DISABLE_NAP
  212. beq 1f
  213. li r7,CPU_FTR_CAN_NAP
  214. andc r6,r6,r7
  215. stw r6,CPU_SPEC_FEATURES(r5)
  216. 1:
  217. mfspr r11,SPRN_HID0
  218. /* All of the bits we have to set.....
  219. */
  220. ori r11,r11,HID0_SGE | HID0_FOLD | HID0_BHTE
  221. ori r11,r11,HID0_LRSTK | HID0_BTIC
  222. oris r11,r11,HID0_DPM@h
  223. BEGIN_FTR_SECTION
  224. xori r11,r11,HID0_BTIC
  225. END_FTR_SECTION_IFSET(CPU_FTR_NO_BTIC)
  226. BEGIN_FTR_SECTION
  227. xoris r11,r11,HID0_DPM@h /* disable dynamic power mgmt */
  228. END_FTR_SECTION_IFSET(CPU_FTR_NO_DPM)
  229. /* All of the bits we have to clear....
  230. */
  231. li r3,HID0_SPD | HID0_NOPDST | HID0_NOPTI
  232. andc r11,r11,r3 /* clear SPD: enable speculative */
  233. li r3,0
  234. mtspr SPRN_ICTC,r3 /* Instruction Cache Throttling off */
  235. isync
  236. mtspr SPRN_HID0,r11
  237. sync
  238. isync
  239. /* Enable L2 HW prefetch, if L2 is enabled
  240. */
  241. mfspr r3,SPRN_L2CR
  242. andis. r3,r3,L2CR_L2E@h
  243. beqlr
  244. mfspr r3,SPRN_MSSCR0
  245. ori r3,r3,3
  246. sync
  247. mtspr SPRN_MSSCR0,r3
  248. sync
  249. isync
  250. blr
  251. /*
  252. * Initialize the FPU registers. This is needed to work around an errata
  253. * in some 750 cpus where using a not yet initialized FPU register after
  254. * power on reset may hang the CPU
  255. */
  256. _GLOBAL(__init_fpu_registers)
  257. mfmsr r10
  258. ori r11,r10,MSR_FP
  259. mtmsr r11
  260. isync
  261. addis r9,r3,empty_zero_page@ha
  262. addi r9,r9,empty_zero_page@l
  263. REST_32FPRS(0,r9)
  264. sync
  265. mtmsr r10
  266. isync
  267. blr
  268. /* Definitions for the table use to save CPU states */
  269. #define CS_HID0 0
  270. #define CS_HID1 4
  271. #define CS_HID2 8
  272. #define CS_MSSCR0 12
  273. #define CS_MSSSR0 16
  274. #define CS_ICTRL 20
  275. #define CS_LDSTCR 24
  276. #define CS_LDSTDB 28
  277. #define CS_SIZE 32
  278. .data
  279. .balign L1_CACHE_LINE_SIZE
  280. cpu_state_storage:
  281. .space CS_SIZE
  282. .balign L1_CACHE_LINE_SIZE,0
  283. .text
  284. /* Called in normal context to backup CPU 0 state. This
  285. * does not include cache settings. This function is also
  286. * called for machine sleep. This does not include the MMU
  287. * setup, BATs, etc... but rather the "special" registers
  288. * like HID0, HID1, MSSCR0, etc...
  289. */
  290. _GLOBAL(__save_cpu_setup)
  291. /* Some CR fields are volatile, we back it up all */
  292. mfcr r7
  293. /* Get storage ptr */
  294. lis r5,cpu_state_storage@h
  295. ori r5,r5,cpu_state_storage@l
  296. /* Save HID0 (common to all CONFIG_6xx cpus) */
  297. mfspr r3,SPRN_HID0
  298. stw r3,CS_HID0(r5)
  299. /* Now deal with CPU type dependent registers */
  300. mfspr r3,SPRN_PVR
  301. srwi r3,r3,16
  302. cmplwi cr0,r3,0x8000 /* 7450 */
  303. cmplwi cr1,r3,0x000c /* 7400 */
  304. cmplwi cr2,r3,0x800c /* 7410 */
  305. cmplwi cr3,r3,0x8001 /* 7455 */
  306. cmplwi cr4,r3,0x8002 /* 7457 */
  307. cmplwi cr5,r3,0x8003 /* 7447A */
  308. cmplwi cr6,r3,0x7000 /* 750FX */
  309. cmplwi cr7,r3,0x8004 /* 7448 */
  310. /* cr1 is 7400 || 7410 */
  311. cror 4*cr1+eq,4*cr1+eq,4*cr2+eq
  312. /* cr0 is 74xx */
  313. cror 4*cr0+eq,4*cr0+eq,4*cr3+eq
  314. cror 4*cr0+eq,4*cr0+eq,4*cr4+eq
  315. cror 4*cr0+eq,4*cr0+eq,4*cr1+eq
  316. cror 4*cr0+eq,4*cr0+eq,4*cr5+eq
  317. cror 4*cr0+eq,4*cr0+eq,4*cr7+eq
  318. bne 1f
  319. /* Backup 74xx specific regs */
  320. mfspr r4,SPRN_MSSCR0
  321. stw r4,CS_MSSCR0(r5)
  322. mfspr r4,SPRN_MSSSR0
  323. stw r4,CS_MSSSR0(r5)
  324. beq cr1,1f
  325. /* Backup 745x specific registers */
  326. mfspr r4,SPRN_HID1
  327. stw r4,CS_HID1(r5)
  328. mfspr r4,SPRN_ICTRL
  329. stw r4,CS_ICTRL(r5)
  330. mfspr r4,SPRN_LDSTCR
  331. stw r4,CS_LDSTCR(r5)
  332. mfspr r4,SPRN_LDSTDB
  333. stw r4,CS_LDSTDB(r5)
  334. 1:
  335. bne cr6,1f
  336. /* Backup 750FX specific registers */
  337. mfspr r4,SPRN_HID1
  338. stw r4,CS_HID1(r5)
  339. /* If rev 2.x, backup HID2 */
  340. mfspr r3,SPRN_PVR
  341. andi. r3,r3,0xff00
  342. cmpwi cr0,r3,0x0200
  343. bne 1f
  344. mfspr r4,SPRN_HID2
  345. stw r4,CS_HID2(r5)
  346. 1:
  347. mtcr r7
  348. blr
  349. /* Called with no MMU context (typically MSR:IR/DR off) to
  350. * restore CPU state as backed up by the previous
  351. * function. This does not include cache setting
  352. */
  353. _GLOBAL(__restore_cpu_setup)
  354. /* Some CR fields are volatile, we back it up all */
  355. mfcr r7
  356. /* Get storage ptr */
  357. lis r5,(cpu_state_storage-KERNELBASE)@h
  358. ori r5,r5,cpu_state_storage@l
  359. /* Restore HID0 */
  360. lwz r3,CS_HID0(r5)
  361. sync
  362. isync
  363. mtspr SPRN_HID0,r3
  364. sync
  365. isync
  366. /* Now deal with CPU type dependent registers */
  367. mfspr r3,SPRN_PVR
  368. srwi r3,r3,16
  369. cmplwi cr0,r3,0x8000 /* 7450 */
  370. cmplwi cr1,r3,0x000c /* 7400 */
  371. cmplwi cr2,r3,0x800c /* 7410 */
  372. cmplwi cr3,r3,0x8001 /* 7455 */
  373. cmplwi cr4,r3,0x8002 /* 7457 */
  374. cmplwi cr5,r3,0x8003 /* 7447A */
  375. cmplwi cr6,r3,0x7000 /* 750FX */
  376. cmplwi cr7,r3,0x8004 /* 7448 */
  377. /* cr1 is 7400 || 7410 */
  378. cror 4*cr1+eq,4*cr1+eq,4*cr2+eq
  379. /* cr0 is 74xx */
  380. cror 4*cr0+eq,4*cr0+eq,4*cr3+eq
  381. cror 4*cr0+eq,4*cr0+eq,4*cr4+eq
  382. cror 4*cr0+eq,4*cr0+eq,4*cr1+eq
  383. cror 4*cr0+eq,4*cr0+eq,4*cr5+eq
  384. cror 4*cr0+eq,4*cr0+eq,4*cr7+eq
  385. bne 2f
  386. /* Restore 74xx specific regs */
  387. lwz r4,CS_MSSCR0(r5)
  388. sync
  389. mtspr SPRN_MSSCR0,r4
  390. sync
  391. isync
  392. lwz r4,CS_MSSSR0(r5)
  393. sync
  394. mtspr SPRN_MSSSR0,r4
  395. sync
  396. isync
  397. bne cr2,1f
  398. /* Clear 7410 L2CR2 */
  399. li r4,0
  400. mtspr SPRN_L2CR2,r4
  401. 1: beq cr1,2f
  402. /* Restore 745x specific registers */
  403. lwz r4,CS_HID1(r5)
  404. sync
  405. mtspr SPRN_HID1,r4
  406. isync
  407. sync
  408. lwz r4,CS_ICTRL(r5)
  409. sync
  410. mtspr SPRN_ICTRL,r4
  411. isync
  412. sync
  413. lwz r4,CS_LDSTCR(r5)
  414. sync
  415. mtspr SPRN_LDSTCR,r4
  416. isync
  417. sync
  418. lwz r4,CS_LDSTDB(r5)
  419. sync
  420. mtspr SPRN_LDSTDB,r4
  421. isync
  422. sync
  423. 2: bne cr6,1f
  424. /* Restore 750FX specific registers
  425. * that is restore HID2 on rev 2.x and PLL config & switch
  426. * to PLL 0 on all
  427. */
  428. /* If rev 2.x, restore HID2 with low voltage bit cleared */
  429. mfspr r3,SPRN_PVR
  430. andi. r3,r3,0xff00
  431. cmpwi cr0,r3,0x0200
  432. bne 4f
  433. lwz r4,CS_HID2(r5)
  434. rlwinm r4,r4,0,19,17
  435. mtspr SPRN_HID2,r4
  436. sync
  437. 4:
  438. lwz r4,CS_HID1(r5)
  439. rlwinm r5,r4,0,16,14
  440. mtspr SPRN_HID1,r5
  441. /* Wait for PLL to stabilize */
  442. mftbl r5
  443. 3: mftbl r6
  444. sub r6,r6,r5
  445. cmplwi cr0,r6,10000
  446. ble 3b
  447. /* Setup final PLL */
  448. mtspr SPRN_HID1,r4
  449. 1:
  450. mtcr r7
  451. blr