cpu_setup_6xx.S 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481
  1. /*
  2. * This file contains low level CPU setup functions.
  3. * Copyright (C) 2003 Benjamin Herrenschmidt (benh@kernel.crashing.org)
  4. *
  5. * This program is free software; you can redistribute it and/or
  6. * modify it under the terms of the GNU General Public License
  7. * as published by the Free Software Foundation; either version
  8. * 2 of the License, or (at your option) any later version.
  9. *
  10. */
  11. #include <asm/processor.h>
  12. #include <asm/page.h>
  13. #include <asm/cputable.h>
  14. #include <asm/ppc_asm.h>
  15. #include <asm/asm-offsets.h>
  16. #include <asm/cache.h>
  17. _GLOBAL(__setup_cpu_603)
  18. mflr r4
  19. BEGIN_FTR_SECTION
  20. bl __init_fpu_registers
  21. END_FTR_SECTION_IFCLR(CPU_FTR_FPU_UNAVAILABLE)
  22. bl setup_common_caches
  23. mtlr r4
  24. blr
  25. _GLOBAL(__setup_cpu_604)
  26. mflr r4
  27. bl setup_common_caches
  28. bl setup_604_hid0
  29. mtlr r4
  30. blr
  31. _GLOBAL(__setup_cpu_750)
  32. mflr r4
  33. bl __init_fpu_registers
  34. bl setup_common_caches
  35. bl setup_750_7400_hid0
  36. mtlr r4
  37. blr
  38. _GLOBAL(__setup_cpu_750cx)
  39. mflr r4
  40. bl __init_fpu_registers
  41. bl setup_common_caches
  42. bl setup_750_7400_hid0
  43. bl setup_750cx
  44. mtlr r4
  45. blr
  46. _GLOBAL(__setup_cpu_750fx)
  47. mflr r4
  48. bl __init_fpu_registers
  49. bl setup_common_caches
  50. bl setup_750_7400_hid0
  51. bl setup_750fx
  52. mtlr r4
  53. blr
  54. _GLOBAL(__setup_cpu_7400)
  55. mflr r4
  56. bl __init_fpu_registers
  57. bl setup_7400_workarounds
  58. bl setup_common_caches
  59. bl setup_750_7400_hid0
  60. mtlr r4
  61. blr
  62. _GLOBAL(__setup_cpu_7410)
  63. mflr r4
  64. bl __init_fpu_registers
  65. bl setup_7410_workarounds
  66. bl setup_common_caches
  67. bl setup_750_7400_hid0
  68. li r3,0
  69. mtspr SPRN_L2CR2,r3
  70. mtlr r4
  71. blr
  72. _GLOBAL(__setup_cpu_745x)
  73. mflr r4
  74. bl setup_common_caches
  75. bl setup_745x_specifics
  76. mtlr r4
  77. blr
  78. /* Enable caches for 603's, 604, 750 & 7400 */
  79. setup_common_caches:
  80. mfspr r11,SPRN_HID0
  81. andi. r0,r11,HID0_DCE
  82. ori r11,r11,HID0_ICE|HID0_DCE
  83. ori r8,r11,HID0_ICFI
  84. bne 1f /* don't invalidate the D-cache */
  85. ori r8,r8,HID0_DCI /* unless it wasn't enabled */
  86. 1: sync
  87. mtspr SPRN_HID0,r8 /* enable and invalidate caches */
  88. sync
  89. mtspr SPRN_HID0,r11 /* enable caches */
  90. sync
  91. isync
  92. blr
  93. /* 604, 604e, 604ev, ...
  94. * Enable superscalar execution & branch history table
  95. */
  96. setup_604_hid0:
  97. mfspr r11,SPRN_HID0
  98. ori r11,r11,HID0_SIED|HID0_BHTE
  99. ori r8,r11,HID0_BTCD
  100. sync
  101. mtspr SPRN_HID0,r8 /* flush branch target address cache */
  102. sync /* on 604e/604r */
  103. mtspr SPRN_HID0,r11
  104. sync
  105. isync
  106. blr
  107. /* 7400 <= rev 2.7 and 7410 rev = 1.0 suffer from some
  108. * erratas we work around here.
  109. * Moto MPC710CE.pdf describes them, those are errata
  110. * #3, #4 and #5
  111. * Note that we assume the firmware didn't choose to
  112. * apply other workarounds (there are other ones documented
  113. * in the .pdf). It appear that Apple firmware only works
  114. * around #3 and with the same fix we use. We may want to
  115. * check if the CPU is using 60x bus mode in which case
  116. * the workaround for errata #4 is useless. Also, we may
  117. * want to explicitly clear HID0_NOPDST as this is not
  118. * needed once we have applied workaround #5 (though it's
  119. * not set by Apple's firmware at least).
  120. */
  121. setup_7400_workarounds:
  122. mfpvr r3
  123. rlwinm r3,r3,0,20,31
  124. cmpwi 0,r3,0x0207
  125. ble 1f
  126. blr
  127. setup_7410_workarounds:
  128. mfpvr r3
  129. rlwinm r3,r3,0,20,31
  130. cmpwi 0,r3,0x0100
  131. bnelr
  132. 1:
  133. mfspr r11,SPRN_MSSSR0
  134. /* Errata #3: Set L1OPQ_SIZE to 0x10 */
  135. rlwinm r11,r11,0,9,6
  136. oris r11,r11,0x0100
  137. /* Errata #4: Set L2MQ_SIZE to 1 (check for MPX mode first ?) */
  138. oris r11,r11,0x0002
  139. /* Errata #5: Set DRLT_SIZE to 0x01 */
  140. rlwinm r11,r11,0,5,2
  141. oris r11,r11,0x0800
  142. sync
  143. mtspr SPRN_MSSSR0,r11
  144. sync
  145. isync
  146. blr
  147. /* 740/750/7400/7410
  148. * Enable Store Gathering (SGE), Address Brodcast (ABE),
  149. * Branch History Table (BHTE), Branch Target ICache (BTIC)
  150. * Dynamic Power Management (DPM), Speculative (SPD)
  151. * Clear Instruction cache throttling (ICTC)
  152. */
  153. setup_750_7400_hid0:
  154. mfspr r11,SPRN_HID0
  155. ori r11,r11,HID0_SGE | HID0_ABE | HID0_BHTE | HID0_BTIC
  156. oris r11,r11,HID0_DPM@h
  157. BEGIN_FTR_SECTION
  158. xori r11,r11,HID0_BTIC
  159. END_FTR_SECTION_IFSET(CPU_FTR_NO_BTIC)
  160. BEGIN_FTR_SECTION
  161. xoris r11,r11,HID0_DPM@h /* disable dynamic power mgmt */
  162. END_FTR_SECTION_IFSET(CPU_FTR_NO_DPM)
  163. li r3,HID0_SPD
  164. andc r11,r11,r3 /* clear SPD: enable speculative */
  165. li r3,0
  166. mtspr SPRN_ICTC,r3 /* Instruction Cache Throttling off */
  167. isync
  168. mtspr SPRN_HID0,r11
  169. sync
  170. isync
  171. blr
  172. /* 750cx specific
  173. * Looks like we have to disable NAP feature for some PLL settings...
  174. * (waiting for confirmation)
  175. */
  176. setup_750cx:
  177. mfspr r10, SPRN_HID1
  178. rlwinm r10,r10,4,28,31
  179. cmpwi cr0,r10,7
  180. cmpwi cr1,r10,9
  181. cmpwi cr2,r10,11
  182. cror 4*cr0+eq,4*cr0+eq,4*cr1+eq
  183. cror 4*cr0+eq,4*cr0+eq,4*cr2+eq
  184. bnelr
  185. lwz r6,CPU_SPEC_FEATURES(r5)
  186. li r7,CPU_FTR_CAN_NAP
  187. andc r6,r6,r7
  188. stw r6,CPU_SPEC_FEATURES(r5)
  189. blr
  190. /* 750fx specific
  191. */
  192. setup_750fx:
  193. blr
  194. /* MPC 745x
  195. * Enable Store Gathering (SGE), Branch Folding (FOLD)
  196. * Branch History Table (BHTE), Branch Target ICache (BTIC)
  197. * Dynamic Power Management (DPM), Speculative (SPD)
  198. * Ensure our data cache instructions really operate.
  199. * Timebase has to be running or we wouldn't have made it here,
  200. * just ensure we don't disable it.
  201. * Clear Instruction cache throttling (ICTC)
  202. * Enable L2 HW prefetch
  203. */
  204. setup_745x_specifics:
  205. /* We check for the presence of an L3 cache setup by
  206. * the firmware. If any, we disable NAP capability as
  207. * it's known to be bogus on rev 2.1 and earlier
  208. */
  209. BEGIN_FTR_SECTION
  210. mfspr r11,SPRN_L3CR
  211. andis. r11,r11,L3CR_L3E@h
  212. beq 1f
  213. END_FTR_SECTION_IFSET(CPU_FTR_L3CR)
  214. lwz r6,CPU_SPEC_FEATURES(r5)
  215. andi. r0,r6,CPU_FTR_L3_DISABLE_NAP
  216. beq 1f
  217. li r7,CPU_FTR_CAN_NAP
  218. andc r6,r6,r7
  219. stw r6,CPU_SPEC_FEATURES(r5)
  220. 1:
  221. mfspr r11,SPRN_HID0
  222. /* All of the bits we have to set.....
  223. */
  224. ori r11,r11,HID0_SGE | HID0_FOLD | HID0_BHTE
  225. ori r11,r11,HID0_LRSTK | HID0_BTIC
  226. oris r11,r11,HID0_DPM@h
  227. BEGIN_FTR_SECTION
  228. xori r11,r11,HID0_BTIC
  229. END_FTR_SECTION_IFSET(CPU_FTR_NO_BTIC)
  230. BEGIN_FTR_SECTION
  231. xoris r11,r11,HID0_DPM@h /* disable dynamic power mgmt */
  232. END_FTR_SECTION_IFSET(CPU_FTR_NO_DPM)
  233. /* All of the bits we have to clear....
  234. */
  235. li r3,HID0_SPD | HID0_NOPDST | HID0_NOPTI
  236. andc r11,r11,r3 /* clear SPD: enable speculative */
  237. li r3,0
  238. mtspr SPRN_ICTC,r3 /* Instruction Cache Throttling off */
  239. isync
  240. mtspr SPRN_HID0,r11
  241. sync
  242. isync
  243. /* Enable L2 HW prefetch, if L2 is enabled
  244. */
  245. mfspr r3,SPRN_L2CR
  246. andis. r3,r3,L2CR_L2E@h
  247. beqlr
  248. mfspr r3,SPRN_MSSCR0
  249. ori r3,r3,3
  250. sync
  251. mtspr SPRN_MSSCR0,r3
  252. sync
  253. isync
  254. blr
  255. /*
  256. * Initialize the FPU registers. This is needed to work around an errata
  257. * in some 750 cpus where using a not yet initialized FPU register after
  258. * power on reset may hang the CPU
  259. */
  260. _GLOBAL(__init_fpu_registers)
  261. mfmsr r10
  262. ori r11,r10,MSR_FP
  263. mtmsr r11
  264. isync
  265. addis r9,r3,empty_zero_page@ha
  266. addi r9,r9,empty_zero_page@l
  267. REST_32FPRS(0,r9)
  268. sync
  269. mtmsr r10
  270. isync
  271. blr
  272. /* Definitions for the table use to save CPU states */
  273. #define CS_HID0 0
  274. #define CS_HID1 4
  275. #define CS_HID2 8
  276. #define CS_MSSCR0 12
  277. #define CS_MSSSR0 16
  278. #define CS_ICTRL 20
  279. #define CS_LDSTCR 24
  280. #define CS_LDSTDB 28
  281. #define CS_SIZE 32
  282. .data
  283. .balign L1_CACHE_BYTES
  284. cpu_state_storage:
  285. .space CS_SIZE
  286. .balign L1_CACHE_BYTES,0
  287. .text
  288. /* Called in normal context to backup CPU 0 state. This
  289. * does not include cache settings. This function is also
  290. * called for machine sleep. This does not include the MMU
  291. * setup, BATs, etc... but rather the "special" registers
  292. * like HID0, HID1, MSSCR0, etc...
  293. */
  294. _GLOBAL(__save_cpu_setup)
  295. /* Some CR fields are volatile, we back it up all */
  296. mfcr r7
  297. /* Get storage ptr */
  298. lis r5,cpu_state_storage@h
  299. ori r5,r5,cpu_state_storage@l
  300. /* Save HID0 (common to all CONFIG_6xx cpus) */
  301. mfspr r3,SPRN_HID0
  302. stw r3,CS_HID0(r5)
  303. /* Now deal with CPU type dependent registers */
  304. mfspr r3,SPRN_PVR
  305. srwi r3,r3,16
  306. cmplwi cr0,r3,0x8000 /* 7450 */
  307. cmplwi cr1,r3,0x000c /* 7400 */
  308. cmplwi cr2,r3,0x800c /* 7410 */
  309. cmplwi cr3,r3,0x8001 /* 7455 */
  310. cmplwi cr4,r3,0x8002 /* 7457 */
  311. cmplwi cr5,r3,0x8003 /* 7447A */
  312. cmplwi cr6,r3,0x7000 /* 750FX */
  313. cmplwi cr7,r3,0x8004 /* 7448 */
  314. /* cr1 is 7400 || 7410 */
  315. cror 4*cr1+eq,4*cr1+eq,4*cr2+eq
  316. /* cr0 is 74xx */
  317. cror 4*cr0+eq,4*cr0+eq,4*cr3+eq
  318. cror 4*cr0+eq,4*cr0+eq,4*cr4+eq
  319. cror 4*cr0+eq,4*cr0+eq,4*cr1+eq
  320. cror 4*cr0+eq,4*cr0+eq,4*cr5+eq
  321. cror 4*cr0+eq,4*cr0+eq,4*cr7+eq
  322. bne 1f
  323. /* Backup 74xx specific regs */
  324. mfspr r4,SPRN_MSSCR0
  325. stw r4,CS_MSSCR0(r5)
  326. mfspr r4,SPRN_MSSSR0
  327. stw r4,CS_MSSSR0(r5)
  328. beq cr1,1f
  329. /* Backup 745x specific registers */
  330. mfspr r4,SPRN_HID1
  331. stw r4,CS_HID1(r5)
  332. mfspr r4,SPRN_ICTRL
  333. stw r4,CS_ICTRL(r5)
  334. mfspr r4,SPRN_LDSTCR
  335. stw r4,CS_LDSTCR(r5)
  336. mfspr r4,SPRN_LDSTDB
  337. stw r4,CS_LDSTDB(r5)
  338. 1:
  339. bne cr6,1f
  340. /* Backup 750FX specific registers */
  341. mfspr r4,SPRN_HID1
  342. stw r4,CS_HID1(r5)
  343. /* If rev 2.x, backup HID2 */
  344. mfspr r3,SPRN_PVR
  345. andi. r3,r3,0xff00
  346. cmpwi cr0,r3,0x0200
  347. bne 1f
  348. mfspr r4,SPRN_HID2
  349. stw r4,CS_HID2(r5)
  350. 1:
  351. mtcr r7
  352. blr
  353. /* Called with no MMU context (typically MSR:IR/DR off) to
  354. * restore CPU state as backed up by the previous
  355. * function. This does not include cache setting
  356. */
  357. _GLOBAL(__restore_cpu_setup)
  358. /* Some CR fields are volatile, we back it up all */
  359. mfcr r7
  360. /* Get storage ptr */
  361. lis r5,(cpu_state_storage-KERNELBASE)@h
  362. ori r5,r5,cpu_state_storage@l
  363. /* Restore HID0 */
  364. lwz r3,CS_HID0(r5)
  365. sync
  366. isync
  367. mtspr SPRN_HID0,r3
  368. sync
  369. isync
  370. /* Now deal with CPU type dependent registers */
  371. mfspr r3,SPRN_PVR
  372. srwi r3,r3,16
  373. cmplwi cr0,r3,0x8000 /* 7450 */
  374. cmplwi cr1,r3,0x000c /* 7400 */
  375. cmplwi cr2,r3,0x800c /* 7410 */
  376. cmplwi cr3,r3,0x8001 /* 7455 */
  377. cmplwi cr4,r3,0x8002 /* 7457 */
  378. cmplwi cr5,r3,0x8003 /* 7447A */
  379. cmplwi cr6,r3,0x7000 /* 750FX */
  380. cmplwi cr7,r3,0x8004 /* 7448 */
  381. /* cr1 is 7400 || 7410 */
  382. cror 4*cr1+eq,4*cr1+eq,4*cr2+eq
  383. /* cr0 is 74xx */
  384. cror 4*cr0+eq,4*cr0+eq,4*cr3+eq
  385. cror 4*cr0+eq,4*cr0+eq,4*cr4+eq
  386. cror 4*cr0+eq,4*cr0+eq,4*cr1+eq
  387. cror 4*cr0+eq,4*cr0+eq,4*cr5+eq
  388. cror 4*cr0+eq,4*cr0+eq,4*cr7+eq
  389. bne 2f
  390. /* Restore 74xx specific regs */
  391. lwz r4,CS_MSSCR0(r5)
  392. sync
  393. mtspr SPRN_MSSCR0,r4
  394. sync
  395. isync
  396. lwz r4,CS_MSSSR0(r5)
  397. sync
  398. mtspr SPRN_MSSSR0,r4
  399. sync
  400. isync
  401. bne cr2,1f
  402. /* Clear 7410 L2CR2 */
  403. li r4,0
  404. mtspr SPRN_L2CR2,r4
  405. 1: beq cr1,2f
  406. /* Restore 745x specific registers */
  407. lwz r4,CS_HID1(r5)
  408. sync
  409. mtspr SPRN_HID1,r4
  410. isync
  411. sync
  412. lwz r4,CS_ICTRL(r5)
  413. sync
  414. mtspr SPRN_ICTRL,r4
  415. isync
  416. sync
  417. lwz r4,CS_LDSTCR(r5)
  418. sync
  419. mtspr SPRN_LDSTCR,r4
  420. isync
  421. sync
  422. lwz r4,CS_LDSTDB(r5)
  423. sync
  424. mtspr SPRN_LDSTDB,r4
  425. isync
  426. sync
  427. 2: bne cr6,1f
  428. /* Restore 750FX specific registers
  429. * that is restore HID2 on rev 2.x and PLL config & switch
  430. * to PLL 0 on all
  431. */
  432. /* If rev 2.x, restore HID2 with low voltage bit cleared */
  433. mfspr r3,SPRN_PVR
  434. andi. r3,r3,0xff00
  435. cmpwi cr0,r3,0x0200
  436. bne 4f
  437. lwz r4,CS_HID2(r5)
  438. rlwinm r4,r4,0,19,17
  439. mtspr SPRN_HID2,r4
  440. sync
  441. 4:
  442. lwz r4,CS_HID1(r5)
  443. rlwinm r5,r4,0,16,14
  444. mtspr SPRN_HID1,r5
  445. /* Wait for PLL to stabilize */
  446. mftbl r5
  447. 3: mftbl r6
  448. sub r6,r6,r5
  449. cmplwi cr0,r6,10000
  450. ble 3b
  451. /* Setup final PLL */
  452. mtspr SPRN_HID1,r4
  453. 1:
  454. mtcr r7
  455. blr