start.S 23 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183
  1. /*
  2. * Copyright 2004, 2007 Freescale Semiconductor.
  3. * Srikanth Srinivasan <srikanth.srinivaan@freescale.com>
  4. *
  5. * See file CREDITS for list of people who contributed to this
  6. * project.
  7. *
  8. * This program is free software; you can redistribute it and/or
  9. * modify it under the terms of the GNU General Public License as
  10. * published by the Free Software Foundation; either version 2 of
  11. * the License, or (at your option) any later version.
  12. *
  13. * This program is distributed in the hope that it will be useful,
  14. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  15. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  16. * GNU General Public License for more details.
  17. *
  18. * You should have received a copy of the GNU General Public License
  19. * along with this program; if not, write to the Free Software
  20. * Foundation, Inc., 59 Temple Place, Suite 330, Boston,
  21. * MA 02111-1307 USA
  22. */
  23. /* U-Boot - Startup Code for 86xx PowerPC based Embedded Boards
  24. *
  25. *
  26. * The processor starts at 0xfff00100 and the code is executed
  27. * from flash. The code is organized to be at an other address
  28. * in memory, but as long we don't jump around before relocating.
  29. * board_init lies at a quite high address and when the cpu has
  30. * jumped there, everything is ok.
  31. */
  32. #include <config.h>
  33. #include <mpc86xx.h>
  34. #include <version.h>
  35. #include <ppc_asm.tmpl>
  36. #include <ppc_defs.h>
  37. #include <asm/cache.h>
  38. #include <asm/mmu.h>
  39. #ifndef CONFIG_IDENT_STRING
  40. #define CONFIG_IDENT_STRING ""
  41. #endif
  42. /*
  43. * Need MSR_DR | MSR_IR enabled to access I/O (printf) in exceptions
  44. */
  45. /*
  46. * Set up GOT: Global Offset Table
  47. *
  48. * Use r14 to access the GOT
  49. */
  50. START_GOT
  51. GOT_ENTRY(_GOT2_TABLE_)
  52. GOT_ENTRY(_FIXUP_TABLE_)
  53. GOT_ENTRY(_start)
  54. GOT_ENTRY(_start_of_vectors)
  55. GOT_ENTRY(_end_of_vectors)
  56. GOT_ENTRY(transfer_to_handler)
  57. GOT_ENTRY(__init_end)
  58. GOT_ENTRY(_end)
  59. GOT_ENTRY(__bss_start)
  60. END_GOT
  61. /*
  62. * r3 - 1st arg to board_init(): IMMP pointer
  63. * r4 - 2nd arg to board_init(): boot flag
  64. */
  65. .text
  66. .long 0x27051956 /* U-Boot Magic Number */
  67. .globl version_string
  68. version_string:
  69. .ascii U_BOOT_VERSION
  70. .ascii " (", __DATE__, " - ", __TIME__, ")"
  71. .ascii CONFIG_IDENT_STRING, "\0"
  72. . = EXC_OFF_SYS_RESET
  73. .globl _start
  74. _start:
  75. li r21, BOOTFLAG_COLD /* Normal Power-On: Boot from FLASH */
  76. b boot_cold
  77. sync
  78. . = EXC_OFF_SYS_RESET + 0x10
  79. .globl _start_warm
  80. _start_warm:
  81. li r21, BOOTFLAG_WARM /* Software reboot */
  82. b boot_warm
  83. sync
  84. /* the boot code is located below the exception table */
  85. .globl _start_of_vectors
  86. _start_of_vectors:
  87. /* Machine check */
  88. STD_EXCEPTION(0x200, MachineCheck, MachineCheckException)
  89. /* Data Storage exception. */
  90. STD_EXCEPTION(0x300, DataStorage, UnknownException)
  91. /* Instruction Storage exception. */
  92. STD_EXCEPTION(0x400, InstStorage, UnknownException)
  93. /* External Interrupt exception. */
  94. STD_EXCEPTION(0x500, ExtInterrupt, external_interrupt)
  95. /* Alignment exception. */
  96. . = 0x600
  97. Alignment:
  98. EXCEPTION_PROLOG(SRR0, SRR1)
  99. mfspr r4,DAR
  100. stw r4,_DAR(r21)
  101. mfspr r5,DSISR
  102. stw r5,_DSISR(r21)
  103. addi r3,r1,STACK_FRAME_OVERHEAD
  104. li r20,MSR_KERNEL
  105. rlwimi r20,r23,0,16,16 /* copy EE bit from saved MSR */
  106. lwz r6,GOT(transfer_to_handler)
  107. mtlr r6
  108. blrl
  109. .L_Alignment:
  110. .long AlignmentException - _start + EXC_OFF_SYS_RESET
  111. .long int_return - _start + EXC_OFF_SYS_RESET
  112. /* Program check exception */
  113. . = 0x700
  114. ProgramCheck:
  115. EXCEPTION_PROLOG(SRR0, SRR1)
  116. addi r3,r1,STACK_FRAME_OVERHEAD
  117. li r20,MSR_KERNEL
  118. rlwimi r20,r23,0,16,16 /* copy EE bit from saved MSR */
  119. lwz r6,GOT(transfer_to_handler)
  120. mtlr r6
  121. blrl
  122. .L_ProgramCheck:
  123. .long ProgramCheckException - _start + EXC_OFF_SYS_RESET
  124. .long int_return - _start + EXC_OFF_SYS_RESET
  125. STD_EXCEPTION(0x800, FPUnavailable, UnknownException)
  126. /* I guess we could implement decrementer, and may have
  127. * to someday for timekeeping.
  128. */
  129. STD_EXCEPTION(0x900, Decrementer, timer_interrupt)
  130. STD_EXCEPTION(0xa00, Trap_0a, UnknownException)
  131. STD_EXCEPTION(0xb00, Trap_0b, UnknownException)
  132. STD_EXCEPTION(0xc00, SystemCall, UnknownException)
  133. STD_EXCEPTION(0xd00, SingleStep, UnknownException)
  134. STD_EXCEPTION(0xe00, Trap_0e, UnknownException)
  135. STD_EXCEPTION(0xf00, Trap_0f, UnknownException)
  136. STD_EXCEPTION(0x1000, SoftEmu, SoftEmuException)
  137. STD_EXCEPTION(0x1100, InstructionTLBMiss, UnknownException)
  138. STD_EXCEPTION(0x1200, DataTLBMiss, UnknownException)
  139. STD_EXCEPTION(0x1300, InstructionTLBError, UnknownException)
  140. STD_EXCEPTION(0x1400, DataTLBError, UnknownException)
  141. STD_EXCEPTION(0x1500, Reserved5, UnknownException)
  142. STD_EXCEPTION(0x1600, Reserved6, UnknownException)
  143. STD_EXCEPTION(0x1700, Reserved7, UnknownException)
  144. STD_EXCEPTION(0x1800, Reserved8, UnknownException)
  145. STD_EXCEPTION(0x1900, Reserved9, UnknownException)
  146. STD_EXCEPTION(0x1a00, ReservedA, UnknownException)
  147. STD_EXCEPTION(0x1b00, ReservedB, UnknownException)
  148. STD_EXCEPTION(0x1c00, DataBreakpoint, UnknownException)
  149. STD_EXCEPTION(0x1d00, InstructionBreakpoint, UnknownException)
  150. STD_EXCEPTION(0x1e00, PeripheralBreakpoint, UnknownException)
  151. STD_EXCEPTION(0x1f00, DevPortBreakpoint, UnknownException)
  152. .globl _end_of_vectors
  153. _end_of_vectors:
  154. . = 0x2000
  155. boot_cold:
  156. boot_warm:
  157. /* if this is a multi-core system we need to check which cpu
  158. * this is, if it is not cpu 0 send the cpu to the linux reset
  159. * vector */
  160. #if (CONFIG_NUM_CPUS > 1)
  161. mfspr r0, MSSCR0
  162. andi. r0, r0, 0x0020
  163. rlwinm r0,r0,27,31,31
  164. mtspr PIR, r0
  165. beq 1f
  166. bl secondary_cpu_setup
  167. #endif
  168. 1:
  169. #ifdef CFG_RAMBOOT
  170. /* disable everything */
  171. li r0, 0
  172. mtspr HID0, r0
  173. sync
  174. mtmsr 0
  175. #endif
  176. bl invalidate_bats
  177. sync
  178. #ifdef CFG_L2
  179. /* init the L2 cache */
  180. lis r3, L2_INIT@h
  181. ori r3, r3, L2_INIT@l
  182. mtspr l2cr, r3
  183. /* invalidate the L2 cache */
  184. bl l2cache_invalidate
  185. sync
  186. #endif
  187. /*
  188. * Calculate absolute address in FLASH and jump there
  189. *------------------------------------------------------*/
  190. lis r3, CFG_MONITOR_BASE@h
  191. ori r3, r3, CFG_MONITOR_BASE@l
  192. addi r3, r3, in_flash - _start + EXC_OFF_SYS_RESET
  193. mtlr r3
  194. blr
  195. in_flash:
  196. /* let the C-code set up the rest */
  197. /* */
  198. /* Be careful to keep code relocatable ! */
  199. /*------------------------------------------------------*/
  200. /* perform low-level init */
  201. /* enable extended addressing */
  202. bl enable_ext_addr
  203. /* setup the bats */
  204. bl setup_bats
  205. sync
  206. #if (CFG_CCSRBAR_DEFAULT != CFG_CCSRBAR)
  207. /* setup ccsrbar */
  208. bl setup_ccsrbar
  209. #endif
  210. /* setup the law entries */
  211. bl law_entry
  212. sync
  213. /*
  214. * Cache must be enabled here for stack-in-cache trick.
  215. * This means we need to enable the BATS.
  216. * Cache should be turned on after BATs, since by default
  217. * everything is write-through.
  218. */
  219. /* enable address translation */
  220. bl enable_addr_trans
  221. sync
  222. /* enable and invalidate the data cache */
  223. /* bl l1dcache_enable */
  224. bl dcache_enable
  225. sync
  226. #if 1
  227. bl icache_enable
  228. #endif
  229. #ifdef CFG_INIT_RAM_LOCK
  230. bl lock_ram_in_cache
  231. sync
  232. #endif
  233. /* set up the stack pointer in our newly created
  234. * cache-ram (r1) */
  235. lis r1, (CFG_INIT_RAM_ADDR + CFG_GBL_DATA_OFFSET)@h
  236. ori r1, r1, (CFG_INIT_RAM_ADDR + CFG_GBL_DATA_OFFSET)@l
  237. li r0, 0 /* Make room for stack frame header and */
  238. stwu r0, -4(r1) /* clear final stack frame so that */
  239. stwu r0, -4(r1) /* stack backtraces terminate cleanly */
  240. GET_GOT /* initialize GOT access */
  241. /* run low-level CPU init code (from Flash) */
  242. bl cpu_init_f
  243. sync
  244. #ifdef RUN_DIAG
  245. /* Load PX_AUX register address in r4 */
  246. lis r4, 0xf810
  247. ori r4, r4, 0x6
  248. /* Load contents of PX_AUX in r3 bits 24 to 31*/
  249. lbz r3, 0(r4)
  250. /* Mask and obtain the bit in r3 */
  251. rlwinm. r3, r3, 0, 24, 24
  252. /* If not zero, jump and continue with u-boot */
  253. bne diag_done
  254. /* Load back contents of PX_AUX in r3 bits 24 to 31 */
  255. lbz r3, 0(r4)
  256. /* Set the MSB of the register value */
  257. ori r3, r3, 0x80
  258. /* Write value in r3 back to PX_AUX */
  259. stb r3, 0(r4)
  260. /* Get the address to jump to in r3*/
  261. lis r3, CFG_DIAG_ADDR@h
  262. ori r3, r3, CFG_DIAG_ADDR@l
  263. /* Load the LR with the branch address */
  264. mtlr r3
  265. /* Branch to diagnostic */
  266. blr
  267. diag_done:
  268. #endif
  269. /* bl l2cache_enable */
  270. mr r3, r21
  271. /* r3: BOOTFLAG */
  272. /* run 1st part of board init code (from Flash) */
  273. bl board_init_f
  274. sync
  275. /* NOTREACHED */
  276. .globl invalidate_bats
  277. invalidate_bats:
  278. li r0, 0
  279. /* invalidate BATs */
  280. mtspr IBAT0U, r0
  281. mtspr IBAT1U, r0
  282. mtspr IBAT2U, r0
  283. mtspr IBAT3U, r0
  284. mtspr IBAT4U, r0
  285. mtspr IBAT5U, r0
  286. mtspr IBAT6U, r0
  287. mtspr IBAT7U, r0
  288. isync
  289. mtspr DBAT0U, r0
  290. mtspr DBAT1U, r0
  291. mtspr DBAT2U, r0
  292. mtspr DBAT3U, r0
  293. mtspr DBAT4U, r0
  294. mtspr DBAT5U, r0
  295. mtspr DBAT6U, r0
  296. mtspr DBAT7U, r0
  297. isync
  298. sync
  299. blr
  300. /* setup_bats - set them up to some initial state */
  301. .globl setup_bats
  302. setup_bats:
  303. addis r0, r0, 0x0000
  304. /* IBAT 0 */
  305. addis r4, r0, CFG_IBAT0L@h
  306. ori r4, r4, CFG_IBAT0L@l
  307. addis r3, r0, CFG_IBAT0U@h
  308. ori r3, r3, CFG_IBAT0U@l
  309. mtspr IBAT0L, r4
  310. mtspr IBAT0U, r3
  311. isync
  312. /* DBAT 0 */
  313. addis r4, r0, CFG_DBAT0L@h
  314. ori r4, r4, CFG_DBAT0L@l
  315. addis r3, r0, CFG_DBAT0U@h
  316. ori r3, r3, CFG_DBAT0U@l
  317. mtspr DBAT0L, r4
  318. mtspr DBAT0U, r3
  319. isync
  320. /* IBAT 1 */
  321. addis r4, r0, CFG_IBAT1L@h
  322. ori r4, r4, CFG_IBAT1L@l
  323. addis r3, r0, CFG_IBAT1U@h
  324. ori r3, r3, CFG_IBAT1U@l
  325. mtspr IBAT1L, r4
  326. mtspr IBAT1U, r3
  327. isync
  328. /* DBAT 1 */
  329. addis r4, r0, CFG_DBAT1L@h
  330. ori r4, r4, CFG_DBAT1L@l
  331. addis r3, r0, CFG_DBAT1U@h
  332. ori r3, r3, CFG_DBAT1U@l
  333. mtspr DBAT1L, r4
  334. mtspr DBAT1U, r3
  335. isync
  336. /* IBAT 2 */
  337. addis r4, r0, CFG_IBAT2L@h
  338. ori r4, r4, CFG_IBAT2L@l
  339. addis r3, r0, CFG_IBAT2U@h
  340. ori r3, r3, CFG_IBAT2U@l
  341. mtspr IBAT2L, r4
  342. mtspr IBAT2U, r3
  343. isync
  344. /* DBAT 2 */
  345. addis r4, r0, CFG_DBAT2L@h
  346. ori r4, r4, CFG_DBAT2L@l
  347. addis r3, r0, CFG_DBAT2U@h
  348. ori r3, r3, CFG_DBAT2U@l
  349. mtspr DBAT2L, r4
  350. mtspr DBAT2U, r3
  351. isync
  352. /* IBAT 3 */
  353. addis r4, r0, CFG_IBAT3L@h
  354. ori r4, r4, CFG_IBAT3L@l
  355. addis r3, r0, CFG_IBAT3U@h
  356. ori r3, r3, CFG_IBAT3U@l
  357. mtspr IBAT3L, r4
  358. mtspr IBAT3U, r3
  359. isync
  360. /* DBAT 3 */
  361. addis r4, r0, CFG_DBAT3L@h
  362. ori r4, r4, CFG_DBAT3L@l
  363. addis r3, r0, CFG_DBAT3U@h
  364. ori r3, r3, CFG_DBAT3U@l
  365. mtspr DBAT3L, r4
  366. mtspr DBAT3U, r3
  367. isync
  368. /* IBAT 4 */
  369. addis r4, r0, CFG_IBAT4L@h
  370. ori r4, r4, CFG_IBAT4L@l
  371. addis r3, r0, CFG_IBAT4U@h
  372. ori r3, r3, CFG_IBAT4U@l
  373. mtspr IBAT4L, r4
  374. mtspr IBAT4U, r3
  375. isync
  376. /* DBAT 4 */
  377. addis r4, r0, CFG_DBAT4L@h
  378. ori r4, r4, CFG_DBAT4L@l
  379. addis r3, r0, CFG_DBAT4U@h
  380. ori r3, r3, CFG_DBAT4U@l
  381. mtspr DBAT4L, r4
  382. mtspr DBAT4U, r3
  383. isync
  384. /* IBAT 5 */
  385. addis r4, r0, CFG_IBAT5L@h
  386. ori r4, r4, CFG_IBAT5L@l
  387. addis r3, r0, CFG_IBAT5U@h
  388. ori r3, r3, CFG_IBAT5U@l
  389. mtspr IBAT5L, r4
  390. mtspr IBAT5U, r3
  391. isync
  392. /* DBAT 5 */
  393. addis r4, r0, CFG_DBAT5L@h
  394. ori r4, r4, CFG_DBAT5L@l
  395. addis r3, r0, CFG_DBAT5U@h
  396. ori r3, r3, CFG_DBAT5U@l
  397. mtspr DBAT5L, r4
  398. mtspr DBAT5U, r3
  399. isync
  400. /* IBAT 6 */
  401. addis r4, r0, CFG_IBAT6L@h
  402. ori r4, r4, CFG_IBAT6L@l
  403. addis r3, r0, CFG_IBAT6U@h
  404. ori r3, r3, CFG_IBAT6U@l
  405. mtspr IBAT6L, r4
  406. mtspr IBAT6U, r3
  407. isync
  408. /* DBAT 6 */
  409. addis r4, r0, CFG_DBAT6L@h
  410. ori r4, r4, CFG_DBAT6L@l
  411. addis r3, r0, CFG_DBAT6U@h
  412. ori r3, r3, CFG_DBAT6U@l
  413. mtspr DBAT6L, r4
  414. mtspr DBAT6U, r3
  415. isync
  416. /* IBAT 7 */
  417. addis r4, r0, CFG_IBAT7L@h
  418. ori r4, r4, CFG_IBAT7L@l
  419. addis r3, r0, CFG_IBAT7U@h
  420. ori r3, r3, CFG_IBAT7U@l
  421. mtspr IBAT7L, r4
  422. mtspr IBAT7U, r3
  423. isync
  424. /* DBAT 7 */
  425. addis r4, r0, CFG_DBAT7L@h
  426. ori r4, r4, CFG_DBAT7L@l
  427. addis r3, r0, CFG_DBAT7U@h
  428. ori r3, r3, CFG_DBAT7U@l
  429. mtspr DBAT7L, r4
  430. mtspr DBAT7U, r3
  431. isync
  432. 1:
  433. addis r3, 0, 0x0000
  434. addis r5, 0, 0x4 /* upper bound of 0x00040000 for 7400/750 */
  435. isync
  436. tlblp:
  437. tlbie r3
  438. sync
  439. addi r3, r3, 0x1000
  440. cmp 0, 0, r3, r5
  441. blt tlblp
  442. blr
  443. .globl enable_addr_trans
  444. enable_addr_trans:
  445. /* enable address translation */
  446. mfmsr r5
  447. ori r5, r5, (MSR_IR | MSR_DR)
  448. mtmsr r5
  449. isync
  450. blr
  451. .globl disable_addr_trans
  452. disable_addr_trans:
  453. /* disable address translation */
  454. mflr r4
  455. mfmsr r3
  456. andi. r0, r3, (MSR_IR | MSR_DR)
  457. beqlr
  458. andc r3, r3, r0
  459. mtspr SRR0, r4
  460. mtspr SRR1, r3
  461. rfi
  462. /*
  463. * This code finishes saving the registers to the exception frame
  464. * and jumps to the appropriate handler for the exception.
  465. * Register r21 is pointer into trap frame, r1 has new stack pointer.
  466. */
  467. .globl transfer_to_handler
  468. transfer_to_handler:
  469. stw r22,_NIP(r21)
  470. lis r22,MSR_POW@h
  471. andc r23,r23,r22
  472. stw r23,_MSR(r21)
  473. SAVE_GPR(7, r21)
  474. SAVE_4GPRS(8, r21)
  475. SAVE_8GPRS(12, r21)
  476. SAVE_8GPRS(24, r21)
  477. mflr r23
  478. andi. r24,r23,0x3f00 /* get vector offset */
  479. stw r24,TRAP(r21)
  480. li r22,0
  481. stw r22,RESULT(r21)
  482. mtspr SPRG2,r22 /* r1 is now kernel sp */
  483. lwz r24,0(r23) /* virtual address of handler */
  484. lwz r23,4(r23) /* where to go when done */
  485. mtspr SRR0,r24
  486. mtspr SRR1,r20
  487. mtlr r23
  488. SYNC
  489. rfi /* jump to handler, enable MMU */
  490. int_return:
  491. mfmsr r28 /* Disable interrupts */
  492. li r4,0
  493. ori r4,r4,MSR_EE
  494. andc r28,r28,r4
  495. SYNC /* Some chip revs need this... */
  496. mtmsr r28
  497. SYNC
  498. lwz r2,_CTR(r1)
  499. lwz r0,_LINK(r1)
  500. mtctr r2
  501. mtlr r0
  502. lwz r2,_XER(r1)
  503. lwz r0,_CCR(r1)
  504. mtspr XER,r2
  505. mtcrf 0xFF,r0
  506. REST_10GPRS(3, r1)
  507. REST_10GPRS(13, r1)
  508. REST_8GPRS(23, r1)
  509. REST_GPR(31, r1)
  510. lwz r2,_NIP(r1) /* Restore environment */
  511. lwz r0,_MSR(r1)
  512. mtspr SRR0,r2
  513. mtspr SRR1,r0
  514. lwz r0,GPR0(r1)
  515. lwz r2,GPR2(r1)
  516. lwz r1,GPR1(r1)
  517. SYNC
  518. rfi
  519. .globl dc_read
  520. dc_read:
  521. blr
  522. .globl get_pvr
  523. get_pvr:
  524. mfspr r3, PVR
  525. blr
  526. .globl get_svr
  527. get_svr:
  528. mfspr r3, SVR
  529. blr
  530. /*
  531. * Function: in8
  532. * Description: Input 8 bits
  533. */
  534. .globl in8
  535. in8:
  536. lbz r3,0x0000(r3)
  537. blr
  538. /*
  539. * Function: out8
  540. * Description: Output 8 bits
  541. */
  542. .globl out8
  543. out8:
  544. stb r4,0x0000(r3)
  545. blr
  546. /*
  547. * Function: out16
  548. * Description: Output 16 bits
  549. */
  550. .globl out16
  551. out16:
  552. sth r4,0x0000(r3)
  553. blr
  554. /*
  555. * Function: out16r
  556. * Description: Byte reverse and output 16 bits
  557. */
  558. .globl out16r
  559. out16r:
  560. sthbrx r4,r0,r3
  561. blr
  562. /*
  563. * Function: out32
  564. * Description: Output 32 bits
  565. */
  566. .globl out32
  567. out32:
  568. stw r4,0x0000(r3)
  569. blr
  570. /*
  571. * Function: out32r
  572. * Description: Byte reverse and output 32 bits
  573. */
  574. .globl out32r
  575. out32r:
  576. stwbrx r4,r0,r3
  577. blr
  578. /*
  579. * Function: in16
  580. * Description: Input 16 bits
  581. */
  582. .globl in16
  583. in16:
  584. lhz r3,0x0000(r3)
  585. blr
  586. /*
  587. * Function: in16r
  588. * Description: Input 16 bits and byte reverse
  589. */
  590. .globl in16r
  591. in16r:
  592. lhbrx r3,r0,r3
  593. blr
  594. /*
  595. * Function: in32
  596. * Description: Input 32 bits
  597. */
  598. .globl in32
  599. in32:
  600. lwz 3,0x0000(3)
  601. blr
  602. /*
  603. * Function: in32r
  604. * Description: Input 32 bits and byte reverse
  605. */
  606. .globl in32r
  607. in32r:
  608. lwbrx r3,r0,r3
  609. blr
  610. /*
  611. * Function: ppcDcbf
  612. * Description: Data Cache block flush
  613. * Input: r3 = effective address
  614. * Output: none.
  615. */
  616. .globl ppcDcbf
  617. ppcDcbf:
  618. dcbf r0,r3
  619. blr
  620. /*
  621. * Function: ppcDcbi
  622. * Description: Data Cache block Invalidate
  623. * Input: r3 = effective address
  624. * Output: none.
  625. */
  626. .globl ppcDcbi
  627. ppcDcbi:
  628. dcbi r0,r3
  629. blr
  630. /*
  631. * Function: ppcDcbz
  632. * Description: Data Cache block zero.
  633. * Input: r3 = effective address
  634. * Output: none.
  635. */
  636. .globl ppcDcbz
  637. ppcDcbz:
  638. dcbz r0,r3
  639. blr
  640. /*
  641. * Function: ppcSync
  642. * Description: Processor Synchronize
  643. * Input: none.
  644. * Output: none.
  645. */
  646. .globl ppcSync
  647. ppcSync:
  648. sync
  649. blr
  650. /*
  651. * void relocate_code (addr_sp, gd, addr_moni)
  652. *
  653. * This "function" does not return, instead it continues in RAM
  654. * after relocating the monitor code.
  655. *
  656. * r3 = dest
  657. * r4 = src
  658. * r5 = length in bytes
  659. * r6 = cachelinesize
  660. */
  661. .globl relocate_code
  662. relocate_code:
  663. mr r1, r3 /* Set new stack pointer */
  664. mr r9, r4 /* Save copy of Global Data pointer */
  665. mr r29, r9 /* Save for DECLARE_GLOBAL_DATA_PTR */
  666. mr r10, r5 /* Save copy of Destination Address */
  667. mr r3, r5 /* Destination Address */
  668. lis r4, CFG_MONITOR_BASE@h /* Source Address */
  669. ori r4, r4, CFG_MONITOR_BASE@l
  670. lwz r5, GOT(__init_end)
  671. sub r5, r5, r4
  672. li r6, CFG_CACHELINE_SIZE /* Cache Line Size */
  673. /*
  674. * Fix GOT pointer:
  675. *
  676. * New GOT-PTR = (old GOT-PTR - CFG_MONITOR_BASE) + Destination Address
  677. *
  678. * Offset:
  679. */
  680. sub r15, r10, r4
  681. /* First our own GOT */
  682. add r14, r14, r15
  683. /* then the one used by the C code */
  684. add r30, r30, r15
  685. /*
  686. * Now relocate code
  687. */
  688. #ifdef CONFIG_ECC
  689. bl board_relocate_rom
  690. sync
  691. mr r3, r10 /* Destination Address */
  692. lis r4, CFG_MONITOR_BASE@h /* Source Address */
  693. ori r4, r4, CFG_MONITOR_BASE@l
  694. lwz r5, GOT(__init_end)
  695. sub r5, r5, r4
  696. li r6, CFG_CACHELINE_SIZE /* Cache Line Size */
  697. #else
  698. cmplw cr1,r3,r4
  699. addi r0,r5,3
  700. srwi. r0,r0,2
  701. beq cr1,4f /* In place copy is not necessary */
  702. beq 7f /* Protect against 0 count */
  703. mtctr r0
  704. bge cr1,2f
  705. la r8,-4(r4)
  706. la r7,-4(r3)
  707. 1: lwzu r0,4(r8)
  708. stwu r0,4(r7)
  709. bdnz 1b
  710. b 4f
  711. 2: slwi r0,r0,2
  712. add r8,r4,r0
  713. add r7,r3,r0
  714. 3: lwzu r0,-4(r8)
  715. stwu r0,-4(r7)
  716. bdnz 3b
  717. #endif
  718. /*
  719. * Now flush the cache: note that we must start from a cache aligned
  720. * address. Otherwise we might miss one cache line.
  721. */
  722. 4: cmpwi r6,0
  723. add r5,r3,r5
  724. beq 7f /* Always flush prefetch queue in any case */
  725. subi r0,r6,1
  726. andc r3,r3,r0
  727. mr r4,r3
  728. 5: dcbst 0,r4
  729. add r4,r4,r6
  730. cmplw r4,r5
  731. blt 5b
  732. sync /* Wait for all dcbst to complete on bus */
  733. mr r4,r3
  734. 6: icbi 0,r4
  735. add r4,r4,r6
  736. cmplw r4,r5
  737. blt 6b
  738. 7: sync /* Wait for all icbi to complete on bus */
  739. isync
  740. /*
  741. * We are done. Do not return, instead branch to second part of board
  742. * initialization, now running from RAM.
  743. */
  744. addi r0, r10, in_ram - _start + EXC_OFF_SYS_RESET
  745. mtlr r0
  746. blr
  747. in_ram:
  748. #ifdef CONFIG_ECC
  749. bl board_init_ecc
  750. #endif
  751. /*
  752. * Relocation Function, r14 point to got2+0x8000
  753. *
  754. * Adjust got2 pointers, no need to check for 0, this code
  755. * already puts a few entries in the table.
  756. */
  757. li r0,__got2_entries@sectoff@l
  758. la r3,GOT(_GOT2_TABLE_)
  759. lwz r11,GOT(_GOT2_TABLE_)
  760. mtctr r0
  761. sub r11,r3,r11
  762. addi r3,r3,-4
  763. 1: lwzu r0,4(r3)
  764. add r0,r0,r11
  765. stw r0,0(r3)
  766. bdnz 1b
  767. /*
  768. * Now adjust the fixups and the pointers to the fixups
  769. * in case we need to move ourselves again.
  770. */
  771. 2: li r0,__fixup_entries@sectoff@l
  772. lwz r3,GOT(_FIXUP_TABLE_)
  773. cmpwi r0,0
  774. mtctr r0
  775. addi r3,r3,-4
  776. beq 4f
  777. 3: lwzu r4,4(r3)
  778. lwzux r0,r4,r11
  779. add r0,r0,r11
  780. stw r10,0(r3)
  781. stw r0,0(r4)
  782. bdnz 3b
  783. 4:
  784. /* clear_bss: */
  785. /*
  786. * Now clear BSS segment
  787. */
  788. lwz r3,GOT(__bss_start)
  789. lwz r4,GOT(_end)
  790. cmplw 0, r3, r4
  791. beq 6f
  792. li r0, 0
  793. 5:
  794. stw r0, 0(r3)
  795. addi r3, r3, 4
  796. cmplw 0, r3, r4
  797. bne 5b
  798. 6:
  799. mr r3, r9 /* Init Date pointer */
  800. mr r4, r10 /* Destination Address */
  801. bl board_init_r
  802. /* not reached - end relocate_code */
  803. /*-----------------------------------------------------------------------*/
  804. /*
  805. * Copy exception vector code to low memory
  806. *
  807. * r3: dest_addr
  808. * r7: source address, r8: end address, r9: target address
  809. */
  810. .globl trap_init
  811. trap_init:
  812. lwz r7, GOT(_start)
  813. lwz r8, GOT(_end_of_vectors)
  814. li r9, 0x100 /* reset vector always at 0x100 */
  815. cmplw 0, r7, r8
  816. bgelr /* return if r7>=r8 - just in case */
  817. mflr r4 /* save link register */
  818. 1:
  819. lwz r0, 0(r7)
  820. stw r0, 0(r9)
  821. addi r7, r7, 4
  822. addi r9, r9, 4
  823. cmplw 0, r7, r8
  824. bne 1b
  825. /*
  826. * relocate `hdlr' and `int_return' entries
  827. */
  828. li r7, .L_MachineCheck - _start + EXC_OFF_SYS_RESET
  829. li r8, Alignment - _start + EXC_OFF_SYS_RESET
  830. 2:
  831. bl trap_reloc
  832. addi r7, r7, 0x100 /* next exception vector */
  833. cmplw 0, r7, r8
  834. blt 2b
  835. li r7, .L_Alignment - _start + EXC_OFF_SYS_RESET
  836. bl trap_reloc
  837. li r7, .L_ProgramCheck - _start + EXC_OFF_SYS_RESET
  838. bl trap_reloc
  839. li r7, .L_FPUnavailable - _start + EXC_OFF_SYS_RESET
  840. li r8, SystemCall - _start + EXC_OFF_SYS_RESET
  841. 3:
  842. bl trap_reloc
  843. addi r7, r7, 0x100 /* next exception vector */
  844. cmplw 0, r7, r8
  845. blt 3b
  846. li r7, .L_SingleStep - _start + EXC_OFF_SYS_RESET
  847. li r8, _end_of_vectors - _start + EXC_OFF_SYS_RESET
  848. 4:
  849. bl trap_reloc
  850. addi r7, r7, 0x100 /* next exception vector */
  851. cmplw 0, r7, r8
  852. blt 4b
  853. /* enable execptions from RAM vectors */
  854. mfmsr r7
  855. li r8,MSR_IP
  856. andc r7,r7,r8
  857. ori r7,r7,MSR_ME /* Enable Machine Check */
  858. mtmsr r7
  859. mtlr r4 /* restore link register */
  860. blr
  861. /*
  862. * Function: relocate entries for one exception vector
  863. */
  864. trap_reloc:
  865. lwz r0, 0(r7) /* hdlr ... */
  866. add r0, r0, r3 /* ... += dest_addr */
  867. stw r0, 0(r7)
  868. lwz r0, 4(r7) /* int_return ... */
  869. add r0, r0, r3 /* ... += dest_addr */
  870. stw r0, 4(r7)
  871. sync
  872. isync
  873. blr
  874. .globl enable_ext_addr
  875. enable_ext_addr:
  876. mfspr r0, HID0
  877. lis r0, (HID0_HIGH_BAT_EN | HID0_XBSEN | HID0_XAEN)@h
  878. ori r0, r0, (HID0_HIGH_BAT_EN | HID0_XBSEN | HID0_XAEN)@l
  879. mtspr HID0, r0
  880. sync
  881. isync
  882. blr
  883. #if (CFG_CCSRBAR_DEFAULT != CFG_CCSRBAR)
  884. .globl setup_ccsrbar
  885. setup_ccsrbar:
  886. /* Special sequence needed to update CCSRBAR itself */
  887. lis r4, CFG_CCSRBAR_DEFAULT@h
  888. ori r4, r4, CFG_CCSRBAR_DEFAULT@l
  889. lis r5, CFG_CCSRBAR@h
  890. ori r5, r5, CFG_CCSRBAR@l
  891. srwi r6,r5,12
  892. stw r6, 0(r4)
  893. isync
  894. lis r5, 0xffff
  895. ori r5,r5,0xf000
  896. lwz r5, 0(r5)
  897. isync
  898. lis r3, CFG_CCSRBAR@h
  899. lwz r5, CFG_CCSRBAR@l(r3)
  900. isync
  901. blr
  902. #endif
  903. #ifdef CFG_INIT_RAM_LOCK
  904. lock_ram_in_cache:
  905. /* Allocate Initial RAM in data cache.
  906. */
  907. lis r3, (CFG_INIT_RAM_ADDR & ~31)@h
  908. ori r3, r3, (CFG_INIT_RAM_ADDR & ~31)@l
  909. li r2, ((CFG_INIT_RAM_END & ~31) + \
  910. (CFG_INIT_RAM_ADDR & 31) + 31) / 32
  911. mtctr r2
  912. 1:
  913. dcbz r0, r3
  914. addi r3, r3, 32
  915. bdnz 1b
  916. #if 1
  917. /* Lock the data cache */
  918. mfspr r0, HID0
  919. ori r0, r0, 0x1000
  920. sync
  921. mtspr HID0, r0
  922. sync
  923. blr
  924. #endif
  925. #if 0
  926. /* Lock the first way of the data cache */
  927. mfspr r0, LDSTCR
  928. ori r0, r0, 0x0080
  929. #if defined(CONFIG_ALTIVEC)
  930. dssall
  931. #endif
  932. sync
  933. mtspr LDSTCR, r0
  934. sync
  935. isync
  936. blr
  937. #endif
  938. .globl unlock_ram_in_cache
  939. unlock_ram_in_cache:
  940. /* invalidate the INIT_RAM section */
  941. lis r3, (CFG_INIT_RAM_ADDR & ~31)@h
  942. ori r3, r3, (CFG_INIT_RAM_ADDR & ~31)@l
  943. li r2, ((CFG_INIT_RAM_END & ~31) + \
  944. (CFG_INIT_RAM_ADDR & 31) + 31) / 32
  945. mtctr r2
  946. 1: icbi r0, r3
  947. addi r3, r3, 32
  948. bdnz 1b
  949. sync /* Wait for all icbi to complete on bus */
  950. isync
  951. #if 1
  952. /* Unlock the data cache and invalidate it */
  953. mfspr r0, HID0
  954. li r3,0x1000
  955. andc r0,r0,r3
  956. li r3,0x0400
  957. or r0,r0,r3
  958. sync
  959. mtspr HID0, r0
  960. sync
  961. blr
  962. #endif
  963. #if 0
  964. /* Unlock the first way of the data cache */
  965. mfspr r0, LDSTCR
  966. li r3,0x0080
  967. andc r0,r0,r3
  968. #ifdef CONFIG_ALTIVEC
  969. dssall
  970. #endif
  971. sync
  972. mtspr LDSTCR, r0
  973. sync
  974. isync
  975. li r3,0x0400
  976. or r0,r0,r3
  977. sync
  978. mtspr HID0, r0
  979. sync
  980. blr
  981. #endif
  982. #endif
  983. /* If this is a multi-cpu system then we need to handle the
  984. * 2nd cpu. The assumption is that the 2nd cpu is being
  985. * held in boot holdoff mode until the 1st cpu unlocks it
  986. * from Linux. We'll do some basic cpu init and then pass
  987. * it to the Linux Reset Vector.
  988. * Sri: Much of this initialization is not required. Linux
  989. * rewrites the bats, and the sprs and also enables the L1 cache.
  990. */
  991. #if (CONFIG_NUM_CPUS > 1)
  992. .globl secondary_cpu_setup
  993. secondary_cpu_setup:
  994. /* Do only core setup on all cores except cpu0 */
  995. bl invalidate_bats
  996. sync
  997. bl enable_ext_addr
  998. #ifdef CFG_L2
  999. /* init the L2 cache */
  1000. addis r3, r0, L2_INIT@h
  1001. ori r3, r3, L2_INIT@l
  1002. sync
  1003. mtspr l2cr, r3
  1004. #ifdef CONFIG_ALTIVEC
  1005. dssall
  1006. #endif
  1007. /* invalidate the L2 cache */
  1008. bl l2cache_invalidate
  1009. sync
  1010. #endif
  1011. /* enable and invalidate the data cache */
  1012. bl dcache_enable
  1013. sync
  1014. /* enable and invalidate the instruction cache*/
  1015. bl icache_enable
  1016. sync
  1017. /* TBEN in HID0 */
  1018. mfspr r4, HID0
  1019. oris r4, r4, 0x0400
  1020. mtspr HID0, r4
  1021. sync
  1022. isync
  1023. /* MCP|SYNCBE|ABE in HID1 */
  1024. mfspr r4, HID1
  1025. oris r4, r4, 0x8000
  1026. ori r4, r4, 0x0C00
  1027. mtspr HID1, r4
  1028. sync
  1029. isync
  1030. lis r3, CONFIG_LINUX_RESET_VEC@h
  1031. ori r3, r3, CONFIG_LINUX_RESET_VEC@l
  1032. mtlr r3
  1033. blr
  1034. /* Never Returns, Running in Linux Now */
  1035. #endif