start.S 21 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008
  1. /*
  2. * Copyright 2004, 2007, 2011 Freescale Semiconductor.
  3. * Srikanth Srinivasan <srikanth.srinivaan@freescale.com>
  4. *
  5. * See file CREDITS for list of people who contributed to this
  6. * project.
  7. *
  8. * This program is free software; you can redistribute it and/or
  9. * modify it under the terms of the GNU General Public License as
  10. * published by the Free Software Foundation; either version 2 of
  11. * the License, or (at your option) any later version.
  12. *
  13. * This program is distributed in the hope that it will be useful,
  14. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  15. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  16. * GNU General Public License for more details.
  17. *
  18. * You should have received a copy of the GNU General Public License
  19. * along with this program; if not, write to the Free Software
  20. * Foundation, Inc., 59 Temple Place, Suite 330, Boston,
  21. * MA 02111-1307 USA
  22. */
  23. /* U-Boot - Startup Code for 86xx PowerPC based Embedded Boards
  24. *
  25. *
  26. * The processor starts at 0xfff00100 and the code is executed
  27. * from flash. The code is organized to be at an other address
  28. * in memory, but as long we don't jump around before relocating.
  29. * board_init lies at a quite high address and when the cpu has
  30. * jumped there, everything is ok.
  31. */
  32. #include <asm-offsets.h>
  33. #include <config.h>
  34. #include <mpc86xx.h>
  35. #include <version.h>
  36. #include <ppc_asm.tmpl>
  37. #include <ppc_defs.h>
  38. #include <asm/cache.h>
  39. #include <asm/mmu.h>
  40. #include <asm/u-boot.h>
  41. /*
  42. * Need MSR_DR | MSR_IR enabled to access I/O (printf) in exceptions
  43. */
  44. /*
  45. * Set up GOT: Global Offset Table
  46. *
  47. * Use r12 to access the GOT
  48. */
  49. START_GOT
  50. GOT_ENTRY(_GOT2_TABLE_)
  51. GOT_ENTRY(_FIXUP_TABLE_)
  52. GOT_ENTRY(_start)
  53. GOT_ENTRY(_start_of_vectors)
  54. GOT_ENTRY(_end_of_vectors)
  55. GOT_ENTRY(transfer_to_handler)
  56. GOT_ENTRY(__init_end)
  57. GOT_ENTRY(__bss_end__)
  58. GOT_ENTRY(__bss_start)
  59. END_GOT
  60. /*
  61. * r3 - 1st arg to board_init(): IMMP pointer
  62. * r4 - 2nd arg to board_init(): boot flag
  63. */
  64. .text
  65. .long 0x27051956 /* U-Boot Magic Number */
  66. .globl version_string
  67. version_string:
  68. .ascii U_BOOT_VERSION_STRING, "\0"
  69. . = EXC_OFF_SYS_RESET
  70. .globl _start
  71. _start:
  72. b boot_cold
  73. /* the boot code is located below the exception table */
  74. .globl _start_of_vectors
  75. _start_of_vectors:
  76. /* Machine check */
  77. STD_EXCEPTION(0x200, MachineCheck, MachineCheckException)
  78. /* Data Storage exception. */
  79. STD_EXCEPTION(0x300, DataStorage, UnknownException)
  80. /* Instruction Storage exception. */
  81. STD_EXCEPTION(0x400, InstStorage, UnknownException)
  82. /* External Interrupt exception. */
  83. STD_EXCEPTION(0x500, ExtInterrupt, external_interrupt)
  84. /* Alignment exception. */
  85. . = 0x600
  86. Alignment:
  87. EXCEPTION_PROLOG(SRR0, SRR1)
  88. mfspr r4,DAR
  89. stw r4,_DAR(r21)
  90. mfspr r5,DSISR
  91. stw r5,_DSISR(r21)
  92. addi r3,r1,STACK_FRAME_OVERHEAD
  93. EXC_XFER_TEMPLATE(Alignment, AlignmentException, MSR_KERNEL, COPY_EE)
  94. /* Program check exception */
  95. . = 0x700
  96. ProgramCheck:
  97. EXCEPTION_PROLOG(SRR0, SRR1)
  98. addi r3,r1,STACK_FRAME_OVERHEAD
  99. EXC_XFER_TEMPLATE(ProgramCheck, ProgramCheckException,
  100. MSR_KERNEL, COPY_EE)
  101. STD_EXCEPTION(0x800, FPUnavailable, UnknownException)
  102. /* I guess we could implement decrementer, and may have
  103. * to someday for timekeeping.
  104. */
  105. STD_EXCEPTION(0x900, Decrementer, timer_interrupt)
  106. STD_EXCEPTION(0xa00, Trap_0a, UnknownException)
  107. STD_EXCEPTION(0xb00, Trap_0b, UnknownException)
  108. STD_EXCEPTION(0xc00, SystemCall, UnknownException)
  109. STD_EXCEPTION(0xd00, SingleStep, UnknownException)
  110. STD_EXCEPTION(0xe00, Trap_0e, UnknownException)
  111. STD_EXCEPTION(0xf00, Trap_0f, UnknownException)
  112. STD_EXCEPTION(0x1000, SoftEmu, SoftEmuException)
  113. STD_EXCEPTION(0x1100, InstructionTLBMiss, UnknownException)
  114. STD_EXCEPTION(0x1200, DataTLBMiss, UnknownException)
  115. STD_EXCEPTION(0x1300, InstructionTLBError, UnknownException)
  116. STD_EXCEPTION(0x1400, DataTLBError, UnknownException)
  117. STD_EXCEPTION(0x1500, Reserved5, UnknownException)
  118. STD_EXCEPTION(0x1600, Reserved6, UnknownException)
  119. STD_EXCEPTION(0x1700, Reserved7, UnknownException)
  120. STD_EXCEPTION(0x1800, Reserved8, UnknownException)
  121. STD_EXCEPTION(0x1900, Reserved9, UnknownException)
  122. STD_EXCEPTION(0x1a00, ReservedA, UnknownException)
  123. STD_EXCEPTION(0x1b00, ReservedB, UnknownException)
  124. STD_EXCEPTION(0x1c00, DataBreakpoint, UnknownException)
  125. STD_EXCEPTION(0x1d00, InstructionBreakpoint, UnknownException)
  126. STD_EXCEPTION(0x1e00, PeripheralBreakpoint, UnknownException)
  127. STD_EXCEPTION(0x1f00, DevPortBreakpoint, UnknownException)
  128. .globl _end_of_vectors
  129. _end_of_vectors:
  130. . = 0x2000
  131. boot_cold:
  132. /*
  133. * NOTE: Only Cpu 0 will ever come here. Other cores go to an
  134. * address specified by the BPTR
  135. */
  136. 1:
  137. #ifdef CONFIG_SYS_RAMBOOT
  138. /* disable everything */
  139. li r0, 0
  140. mtspr HID0, r0
  141. sync
  142. mtmsr 0
  143. #endif
  144. /* Invalidate BATs */
  145. bl invalidate_bats
  146. sync
  147. /* Invalidate all of TLB before MMU turn on */
  148. bl clear_tlbs
  149. sync
  150. #ifdef CONFIG_SYS_L2
  151. /* init the L2 cache */
  152. lis r3, L2_INIT@h
  153. ori r3, r3, L2_INIT@l
  154. mtspr l2cr, r3
  155. /* invalidate the L2 cache */
  156. bl l2cache_invalidate
  157. sync
  158. #endif
  159. /*
  160. * Calculate absolute address in FLASH and jump there
  161. *------------------------------------------------------*/
  162. lis r3, CONFIG_SYS_MONITOR_BASE_EARLY@h
  163. ori r3, r3, CONFIG_SYS_MONITOR_BASE_EARLY@l
  164. addi r3, r3, in_flash - _start + EXC_OFF_SYS_RESET
  165. mtlr r3
  166. blr
  167. in_flash:
  168. /* let the C-code set up the rest */
  169. /* */
  170. /* Be careful to keep code relocatable ! */
  171. /*------------------------------------------------------*/
  172. /* perform low-level init */
  173. /* enable extended addressing */
  174. bl enable_ext_addr
  175. /* setup the bats */
  176. bl early_bats
  177. /*
  178. * Cache must be enabled here for stack-in-cache trick.
  179. * This means we need to enable the BATS.
  180. * Cache should be turned on after BATs, since by default
  181. * everything is write-through.
  182. */
  183. /* enable address translation */
  184. mfmsr r5
  185. ori r5, r5, (MSR_IR | MSR_DR)
  186. lis r3,addr_trans_enabled@h
  187. ori r3, r3, addr_trans_enabled@l
  188. mtspr SPRN_SRR0,r3
  189. mtspr SPRN_SRR1,r5
  190. rfi
  191. addr_trans_enabled:
  192. /* enable and invalidate the data cache */
  193. /* bl l1dcache_enable */
  194. bl dcache_enable
  195. sync
  196. #if 1
  197. bl icache_enable
  198. #endif
  199. #ifdef CONFIG_SYS_INIT_RAM_LOCK
  200. bl lock_ram_in_cache
  201. sync
  202. #endif
  203. #if (CONFIG_SYS_CCSRBAR_DEFAULT != CONFIG_SYS_CCSRBAR)
  204. bl setup_ccsrbar
  205. #endif
  206. /* set up the stack pointer in our newly created
  207. * cache-ram (r1) */
  208. lis r1, (CONFIG_SYS_INIT_RAM_ADDR + CONFIG_SYS_GBL_DATA_OFFSET)@h
  209. ori r1, r1, (CONFIG_SYS_INIT_RAM_ADDR + CONFIG_SYS_GBL_DATA_OFFSET)@l
  210. li r0, 0 /* Make room for stack frame header and */
  211. stwu r0, -4(r1) /* clear final stack frame so that */
  212. stwu r0, -4(r1) /* stack backtraces terminate cleanly */
  213. GET_GOT /* initialize GOT access */
  214. /* run low-level CPU init code (from Flash) */
  215. bl cpu_init_f
  216. sync
  217. #ifdef RUN_DIAG
  218. /* Load PX_AUX register address in r4 */
  219. lis r4, PIXIS_BASE@h
  220. ori r4, r4, 0x6
  221. /* Load contents of PX_AUX in r3 bits 24 to 31*/
  222. lbz r3, 0(r4)
  223. /* Mask and obtain the bit in r3 */
  224. rlwinm. r3, r3, 0, 24, 24
  225. /* If not zero, jump and continue with u-boot */
  226. bne diag_done
  227. /* Load back contents of PX_AUX in r3 bits 24 to 31 */
  228. lbz r3, 0(r4)
  229. /* Set the MSB of the register value */
  230. ori r3, r3, 0x80
  231. /* Write value in r3 back to PX_AUX */
  232. stb r3, 0(r4)
  233. /* Get the address to jump to in r3*/
  234. lis r3, CONFIG_SYS_DIAG_ADDR@h
  235. ori r3, r3, CONFIG_SYS_DIAG_ADDR@l
  236. /* Load the LR with the branch address */
  237. mtlr r3
  238. /* Branch to diagnostic */
  239. blr
  240. diag_done:
  241. #endif
  242. /* bl l2cache_enable */
  243. /* run 1st part of board init code (from Flash) */
  244. bl board_init_f
  245. sync
  246. /* NOTREACHED - board_init_f() does not return */
  247. .globl invalidate_bats
  248. invalidate_bats:
  249. li r0, 0
  250. /* invalidate BATs */
  251. mtspr IBAT0U, r0
  252. mtspr IBAT1U, r0
  253. mtspr IBAT2U, r0
  254. mtspr IBAT3U, r0
  255. mtspr IBAT4U, r0
  256. mtspr IBAT5U, r0
  257. mtspr IBAT6U, r0
  258. mtspr IBAT7U, r0
  259. isync
  260. mtspr DBAT0U, r0
  261. mtspr DBAT1U, r0
  262. mtspr DBAT2U, r0
  263. mtspr DBAT3U, r0
  264. mtspr DBAT4U, r0
  265. mtspr DBAT5U, r0
  266. mtspr DBAT6U, r0
  267. mtspr DBAT7U, r0
  268. isync
  269. sync
  270. blr
  271. #define CONFIG_BAT_PAIR(n) \
  272. lis r4, CONFIG_SYS_IBAT##n##L@h; \
  273. ori r4, r4, CONFIG_SYS_IBAT##n##L@l; \
  274. lis r3, CONFIG_SYS_IBAT##n##U@h; \
  275. ori r3, r3, CONFIG_SYS_IBAT##n##U@l; \
  276. mtspr IBAT##n##L, r4; \
  277. mtspr IBAT##n##U, r3; \
  278. lis r4, CONFIG_SYS_DBAT##n##L@h; \
  279. ori r4, r4, CONFIG_SYS_DBAT##n##L@l; \
  280. lis r3, CONFIG_SYS_DBAT##n##U@h; \
  281. ori r3, r3, CONFIG_SYS_DBAT##n##U@l; \
  282. mtspr DBAT##n##L, r4; \
  283. mtspr DBAT##n##U, r3;
  284. /*
  285. * setup_bats:
  286. *
  287. * Set up the final BAT registers now that setup is done.
  288. *
  289. * Assumes that:
  290. * 1) Address translation is enabled upon entry
  291. * 2) The boot rom is still accessible via 1:1 translation
  292. */
  293. .globl setup_bats
  294. setup_bats:
  295. mflr r5
  296. sync
  297. /*
  298. * When we disable address translation, we will get 1:1 (VA==PA)
  299. * translation. The only place we know for sure is safe for that is
  300. * the bootrom where we originally started out. Pop back into there.
  301. */
  302. lis r4, CONFIG_SYS_MONITOR_BASE_EARLY@h
  303. ori r4, r4, CONFIG_SYS_MONITOR_BASE_EARLY@l
  304. addi r4, r4, trans_disabled - _start + EXC_OFF_SYS_RESET
  305. /* disable address translation */
  306. mfmsr r3
  307. rlwinm r3, r3, 0, 28, 25
  308. mtspr SRR0, r4
  309. mtspr SRR1, r3
  310. rfi
  311. trans_disabled:
  312. #if defined(CONFIG_SYS_DBAT0U) && defined(CONFIG_SYS_DBAT0L) \
  313. && defined(CONFIG_SYS_IBAT0U) && defined(CONFIG_SYS_IBAT0L)
  314. CONFIG_BAT_PAIR(0)
  315. #endif
  316. CONFIG_BAT_PAIR(1)
  317. CONFIG_BAT_PAIR(2)
  318. CONFIG_BAT_PAIR(3)
  319. CONFIG_BAT_PAIR(4)
  320. CONFIG_BAT_PAIR(5)
  321. CONFIG_BAT_PAIR(6)
  322. CONFIG_BAT_PAIR(7)
  323. sync
  324. isync
  325. /* Turn translation back on and return */
  326. mfmsr r3
  327. ori r3, r3, (MSR_IR | MSR_DR)
  328. mtspr SPRN_SRR0,r5
  329. mtspr SPRN_SRR1,r3
  330. rfi
  331. /*
  332. * early_bats:
  333. *
  334. * Set up bats needed early on - this is usually the BAT for the
  335. * stack-in-cache, the Flash, and CCSR space
  336. */
  337. .globl early_bats
  338. early_bats:
  339. /* IBAT 3 */
  340. lis r4, CONFIG_SYS_IBAT3L@h
  341. ori r4, r4, CONFIG_SYS_IBAT3L@l
  342. lis r3, CONFIG_SYS_IBAT3U@h
  343. ori r3, r3, CONFIG_SYS_IBAT3U@l
  344. mtspr IBAT3L, r4
  345. mtspr IBAT3U, r3
  346. isync
  347. /* DBAT 3 */
  348. lis r4, CONFIG_SYS_DBAT3L@h
  349. ori r4, r4, CONFIG_SYS_DBAT3L@l
  350. lis r3, CONFIG_SYS_DBAT3U@h
  351. ori r3, r3, CONFIG_SYS_DBAT3U@l
  352. mtspr DBAT3L, r4
  353. mtspr DBAT3U, r3
  354. isync
  355. /* IBAT 5 */
  356. lis r4, CONFIG_SYS_IBAT5L@h
  357. ori r4, r4, CONFIG_SYS_IBAT5L@l
  358. lis r3, CONFIG_SYS_IBAT5U@h
  359. ori r3, r3, CONFIG_SYS_IBAT5U@l
  360. mtspr IBAT5L, r4
  361. mtspr IBAT5U, r3
  362. isync
  363. /* DBAT 5 */
  364. lis r4, CONFIG_SYS_DBAT5L@h
  365. ori r4, r4, CONFIG_SYS_DBAT5L@l
  366. lis r3, CONFIG_SYS_DBAT5U@h
  367. ori r3, r3, CONFIG_SYS_DBAT5U@l
  368. mtspr DBAT5L, r4
  369. mtspr DBAT5U, r3
  370. isync
  371. /* IBAT 6 */
  372. lis r4, CONFIG_SYS_IBAT6L_EARLY@h
  373. ori r4, r4, CONFIG_SYS_IBAT6L_EARLY@l
  374. lis r3, CONFIG_SYS_IBAT6U_EARLY@h
  375. ori r3, r3, CONFIG_SYS_IBAT6U_EARLY@l
  376. mtspr IBAT6L, r4
  377. mtspr IBAT6U, r3
  378. isync
  379. /* DBAT 6 */
  380. lis r4, CONFIG_SYS_DBAT6L_EARLY@h
  381. ori r4, r4, CONFIG_SYS_DBAT6L_EARLY@l
  382. lis r3, CONFIG_SYS_DBAT6U_EARLY@h
  383. ori r3, r3, CONFIG_SYS_DBAT6U_EARLY@l
  384. mtspr DBAT6L, r4
  385. mtspr DBAT6U, r3
  386. isync
  387. #if(CONFIG_SYS_CCSRBAR_DEFAULT != CONFIG_SYS_CCSRBAR)
  388. /* IBAT 7 */
  389. lis r4, CONFIG_SYS_CCSR_DEFAULT_IBATL@h
  390. ori r4, r4, CONFIG_SYS_CCSR_DEFAULT_IBATL@l
  391. lis r3, CONFIG_SYS_CCSR_DEFAULT_IBATU@h
  392. ori r3, r3, CONFIG_SYS_CCSR_DEFAULT_IBATU@l
  393. mtspr IBAT7L, r4
  394. mtspr IBAT7U, r3
  395. isync
  396. /* DBAT 7 */
  397. lis r4, CONFIG_SYS_CCSR_DEFAULT_DBATL@h
  398. ori r4, r4, CONFIG_SYS_CCSR_DEFAULT_DBATL@l
  399. lis r3, CONFIG_SYS_CCSR_DEFAULT_DBATU@h
  400. ori r3, r3, CONFIG_SYS_CCSR_DEFAULT_DBATU@l
  401. mtspr DBAT7L, r4
  402. mtspr DBAT7U, r3
  403. isync
  404. #endif
  405. blr
  406. .globl clear_tlbs
  407. clear_tlbs:
  408. addis r3, 0, 0x0000
  409. addis r5, 0, 0x4
  410. isync
  411. tlblp:
  412. tlbie r3
  413. sync
  414. addi r3, r3, 0x1000
  415. cmp 0, 0, r3, r5
  416. blt tlblp
  417. blr
  418. .globl disable_addr_trans
  419. disable_addr_trans:
  420. /* disable address translation */
  421. mflr r4
  422. mfmsr r3
  423. andi. r0, r3, (MSR_IR | MSR_DR)
  424. beqlr
  425. andc r3, r3, r0
  426. mtspr SRR0, r4
  427. mtspr SRR1, r3
  428. rfi
  429. /*
  430. * This code finishes saving the registers to the exception frame
  431. * and jumps to the appropriate handler for the exception.
  432. * Register r21 is pointer into trap frame, r1 has new stack pointer.
  433. */
  434. .globl transfer_to_handler
  435. transfer_to_handler:
  436. stw r22,_NIP(r21)
  437. lis r22,MSR_POW@h
  438. andc r23,r23,r22
  439. stw r23,_MSR(r21)
  440. SAVE_GPR(7, r21)
  441. SAVE_4GPRS(8, r21)
  442. SAVE_8GPRS(12, r21)
  443. SAVE_8GPRS(24, r21)
  444. mflr r23
  445. andi. r24,r23,0x3f00 /* get vector offset */
  446. stw r24,TRAP(r21)
  447. li r22,0
  448. stw r22,RESULT(r21)
  449. mtspr SPRG2,r22 /* r1 is now kernel sp */
  450. lwz r24,0(r23) /* virtual address of handler */
  451. lwz r23,4(r23) /* where to go when done */
  452. mtspr SRR0,r24
  453. mtspr SRR1,r20
  454. mtlr r23
  455. SYNC
  456. rfi /* jump to handler, enable MMU */
  457. int_return:
  458. mfmsr r28 /* Disable interrupts */
  459. li r4,0
  460. ori r4,r4,MSR_EE
  461. andc r28,r28,r4
  462. SYNC /* Some chip revs need this... */
  463. mtmsr r28
  464. SYNC
  465. lwz r2,_CTR(r1)
  466. lwz r0,_LINK(r1)
  467. mtctr r2
  468. mtlr r0
  469. lwz r2,_XER(r1)
  470. lwz r0,_CCR(r1)
  471. mtspr XER,r2
  472. mtcrf 0xFF,r0
  473. REST_10GPRS(3, r1)
  474. REST_10GPRS(13, r1)
  475. REST_8GPRS(23, r1)
  476. REST_GPR(31, r1)
  477. lwz r2,_NIP(r1) /* Restore environment */
  478. lwz r0,_MSR(r1)
  479. mtspr SRR0,r2
  480. mtspr SRR1,r0
  481. lwz r0,GPR0(r1)
  482. lwz r2,GPR2(r1)
  483. lwz r1,GPR1(r1)
  484. SYNC
  485. rfi
  486. .globl dc_read
  487. dc_read:
  488. blr
  489. .globl get_pvr
  490. get_pvr:
  491. mfspr r3, PVR
  492. blr
  493. .globl get_svr
  494. get_svr:
  495. mfspr r3, SVR
  496. blr
  497. /*
  498. * Function: in8
  499. * Description: Input 8 bits
  500. */
  501. .globl in8
  502. in8:
  503. lbz r3,0x0000(r3)
  504. blr
  505. /*
  506. * Function: out8
  507. * Description: Output 8 bits
  508. */
  509. .globl out8
  510. out8:
  511. stb r4,0x0000(r3)
  512. blr
  513. /*
  514. * Function: out16
  515. * Description: Output 16 bits
  516. */
  517. .globl out16
  518. out16:
  519. sth r4,0x0000(r3)
  520. blr
  521. /*
  522. * Function: out16r
  523. * Description: Byte reverse and output 16 bits
  524. */
  525. .globl out16r
  526. out16r:
  527. sthbrx r4,r0,r3
  528. blr
  529. /*
  530. * Function: out32
  531. * Description: Output 32 bits
  532. */
  533. .globl out32
  534. out32:
  535. stw r4,0x0000(r3)
  536. blr
  537. /*
  538. * Function: out32r
  539. * Description: Byte reverse and output 32 bits
  540. */
  541. .globl out32r
  542. out32r:
  543. stwbrx r4,r0,r3
  544. blr
  545. /*
  546. * Function: in16
  547. * Description: Input 16 bits
  548. */
  549. .globl in16
  550. in16:
  551. lhz r3,0x0000(r3)
  552. blr
  553. /*
  554. * Function: in16r
  555. * Description: Input 16 bits and byte reverse
  556. */
  557. .globl in16r
  558. in16r:
  559. lhbrx r3,r0,r3
  560. blr
  561. /*
  562. * Function: in32
  563. * Description: Input 32 bits
  564. */
  565. .globl in32
  566. in32:
  567. lwz 3,0x0000(3)
  568. blr
  569. /*
  570. * Function: in32r
  571. * Description: Input 32 bits and byte reverse
  572. */
  573. .globl in32r
  574. in32r:
  575. lwbrx r3,r0,r3
  576. blr
  577. /*
  578. * void relocate_code (addr_sp, gd, addr_moni)
  579. *
  580. * This "function" does not return, instead it continues in RAM
  581. * after relocating the monitor code.
  582. *
  583. * r3 = dest
  584. * r4 = src
  585. * r5 = length in bytes
  586. * r6 = cachelinesize
  587. */
  588. .globl relocate_code
  589. relocate_code:
  590. mr r1, r3 /* Set new stack pointer */
  591. mr r9, r4 /* Save copy of Global Data pointer */
  592. mr r10, r5 /* Save copy of Destination Address */
  593. GET_GOT
  594. mr r3, r5 /* Destination Address */
  595. lis r4, CONFIG_SYS_MONITOR_BASE@h /* Source Address */
  596. ori r4, r4, CONFIG_SYS_MONITOR_BASE@l
  597. lwz r5, GOT(__init_end)
  598. sub r5, r5, r4
  599. li r6, CONFIG_SYS_CACHELINE_SIZE /* Cache Line Size */
  600. /*
  601. * Fix GOT pointer:
  602. *
  603. * New GOT-PTR = (old GOT-PTR - CONFIG_SYS_MONITOR_BASE) + Destination Address
  604. *
  605. * Offset:
  606. */
  607. sub r15, r10, r4
  608. /* First our own GOT */
  609. add r12, r12, r15
  610. /* then the one used by the C code */
  611. add r30, r30, r15
  612. /*
  613. * Now relocate code
  614. */
  615. cmplw cr1,r3,r4
  616. addi r0,r5,3
  617. srwi. r0,r0,2
  618. beq cr1,4f /* In place copy is not necessary */
  619. beq 7f /* Protect against 0 count */
  620. mtctr r0
  621. bge cr1,2f
  622. la r8,-4(r4)
  623. la r7,-4(r3)
  624. 1: lwzu r0,4(r8)
  625. stwu r0,4(r7)
  626. bdnz 1b
  627. b 4f
  628. 2: slwi r0,r0,2
  629. add r8,r4,r0
  630. add r7,r3,r0
  631. 3: lwzu r0,-4(r8)
  632. stwu r0,-4(r7)
  633. bdnz 3b
  634. /*
  635. * Now flush the cache: note that we must start from a cache aligned
  636. * address. Otherwise we might miss one cache line.
  637. */
  638. 4: cmpwi r6,0
  639. add r5,r3,r5
  640. beq 7f /* Always flush prefetch queue in any case */
  641. subi r0,r6,1
  642. andc r3,r3,r0
  643. mr r4,r3
  644. 5: dcbst 0,r4
  645. add r4,r4,r6
  646. cmplw r4,r5
  647. blt 5b
  648. sync /* Wait for all dcbst to complete on bus */
  649. mr r4,r3
  650. 6: icbi 0,r4
  651. add r4,r4,r6
  652. cmplw r4,r5
  653. blt 6b
  654. 7: sync /* Wait for all icbi to complete on bus */
  655. isync
  656. /*
  657. * We are done. Do not return, instead branch to second part of board
  658. * initialization, now running from RAM.
  659. */
  660. addi r0, r10, in_ram - _start + EXC_OFF_SYS_RESET
  661. mtlr r0
  662. blr
  663. in_ram:
  664. /*
  665. * Relocation Function, r12 point to got2+0x8000
  666. *
  667. * Adjust got2 pointers, no need to check for 0, this code
  668. * already puts a few entries in the table.
  669. */
  670. li r0,__got2_entries@sectoff@l
  671. la r3,GOT(_GOT2_TABLE_)
  672. lwz r11,GOT(_GOT2_TABLE_)
  673. mtctr r0
  674. sub r11,r3,r11
  675. addi r3,r3,-4
  676. 1: lwzu r0,4(r3)
  677. cmpwi r0,0
  678. beq- 2f
  679. add r0,r0,r11
  680. stw r0,0(r3)
  681. 2: bdnz 1b
  682. /*
  683. * Now adjust the fixups and the pointers to the fixups
  684. * in case we need to move ourselves again.
  685. */
  686. li r0,__fixup_entries@sectoff@l
  687. lwz r3,GOT(_FIXUP_TABLE_)
  688. cmpwi r0,0
  689. mtctr r0
  690. addi r3,r3,-4
  691. beq 4f
  692. 3: lwzu r4,4(r3)
  693. lwzux r0,r4,r11
  694. cmpwi r0,0
  695. add r0,r0,r11
  696. stw r4,0(r3)
  697. beq- 5f
  698. stw r0,0(r4)
  699. 5: bdnz 3b
  700. 4:
  701. /* clear_bss: */
  702. /*
  703. * Now clear BSS segment
  704. */
  705. lwz r3,GOT(__bss_start)
  706. lwz r4,GOT(__bss_end__)
  707. cmplw 0, r3, r4
  708. beq 6f
  709. li r0, 0
  710. 5:
  711. stw r0, 0(r3)
  712. addi r3, r3, 4
  713. cmplw 0, r3, r4
  714. bne 5b
  715. 6:
  716. mr r3, r9 /* Init Date pointer */
  717. mr r4, r10 /* Destination Address */
  718. bl board_init_r
  719. /* not reached - end relocate_code */
  720. /*-----------------------------------------------------------------------*/
  721. /*
  722. * Copy exception vector code to low memory
  723. *
  724. * r3: dest_addr
  725. * r7: source address, r8: end address, r9: target address
  726. */
  727. .globl trap_init
  728. trap_init:
  729. mflr r4 /* save link register */
  730. GET_GOT
  731. lwz r7, GOT(_start)
  732. lwz r8, GOT(_end_of_vectors)
  733. li r9, 0x100 /* reset vector always at 0x100 */
  734. cmplw 0, r7, r8
  735. bgelr /* return if r7>=r8 - just in case */
  736. 1:
  737. lwz r0, 0(r7)
  738. stw r0, 0(r9)
  739. addi r7, r7, 4
  740. addi r9, r9, 4
  741. cmplw 0, r7, r8
  742. bne 1b
  743. /*
  744. * relocate `hdlr' and `int_return' entries
  745. */
  746. li r7, .L_MachineCheck - _start + EXC_OFF_SYS_RESET
  747. li r8, Alignment - _start + EXC_OFF_SYS_RESET
  748. 2:
  749. bl trap_reloc
  750. addi r7, r7, 0x100 /* next exception vector */
  751. cmplw 0, r7, r8
  752. blt 2b
  753. li r7, .L_Alignment - _start + EXC_OFF_SYS_RESET
  754. bl trap_reloc
  755. li r7, .L_ProgramCheck - _start + EXC_OFF_SYS_RESET
  756. bl trap_reloc
  757. li r7, .L_FPUnavailable - _start + EXC_OFF_SYS_RESET
  758. li r8, SystemCall - _start + EXC_OFF_SYS_RESET
  759. 3:
  760. bl trap_reloc
  761. addi r7, r7, 0x100 /* next exception vector */
  762. cmplw 0, r7, r8
  763. blt 3b
  764. li r7, .L_SingleStep - _start + EXC_OFF_SYS_RESET
  765. li r8, _end_of_vectors - _start + EXC_OFF_SYS_RESET
  766. 4:
  767. bl trap_reloc
  768. addi r7, r7, 0x100 /* next exception vector */
  769. cmplw 0, r7, r8
  770. blt 4b
  771. /* enable execptions from RAM vectors */
  772. mfmsr r7
  773. li r8,MSR_IP
  774. andc r7,r7,r8
  775. ori r7,r7,MSR_ME /* Enable Machine Check */
  776. mtmsr r7
  777. mtlr r4 /* restore link register */
  778. blr
  779. .globl enable_ext_addr
  780. enable_ext_addr:
  781. mfspr r0, HID0
  782. lis r0, (HID0_HIGH_BAT_EN | HID0_XBSEN | HID0_XAEN)@h
  783. ori r0, r0, (HID0_HIGH_BAT_EN | HID0_XBSEN | HID0_XAEN)@l
  784. mtspr HID0, r0
  785. sync
  786. isync
  787. blr
  788. #if (CONFIG_SYS_CCSRBAR_DEFAULT != CONFIG_SYS_CCSRBAR)
  789. .globl setup_ccsrbar
  790. setup_ccsrbar:
  791. /* Special sequence needed to update CCSRBAR itself */
  792. lis r4, CONFIG_SYS_CCSRBAR_DEFAULT@h
  793. ori r4, r4, CONFIG_SYS_CCSRBAR_DEFAULT@l
  794. lis r5, CONFIG_SYS_CCSRBAR_PHYS_LOW@h
  795. ori r5, r5, CONFIG_SYS_CCSRBAR_PHYS_LOW@l
  796. srwi r5,r5,12
  797. li r6, CONFIG_SYS_CCSRBAR_PHYS_HIGH@l
  798. rlwimi r5,r6,20,8,11
  799. stw r5, 0(r4) /* Store physical value of CCSR */
  800. isync
  801. lis r5, CONFIG_SYS_TEXT_BASE@h
  802. ori r5,r5,CONFIG_SYS_TEXT_BASE@l
  803. lwz r5, 0(r5)
  804. isync
  805. /* Use VA of CCSR to do read */
  806. lis r3, CONFIG_SYS_CCSRBAR@h
  807. lwz r5, CONFIG_SYS_CCSRBAR@l(r3)
  808. isync
  809. blr
  810. #endif
  811. #ifdef CONFIG_SYS_INIT_RAM_LOCK
  812. lock_ram_in_cache:
  813. /* Allocate Initial RAM in data cache.
  814. */
  815. lis r3, (CONFIG_SYS_INIT_RAM_ADDR & ~31)@h
  816. ori r3, r3, (CONFIG_SYS_INIT_RAM_ADDR & ~31)@l
  817. li r4, ((CONFIG_SYS_INIT_RAM_SIZE & ~31) + \
  818. (CONFIG_SYS_INIT_RAM_ADDR & 31) + 31) / 32
  819. mtctr r4
  820. 1:
  821. dcbz r0, r3
  822. addi r3, r3, 32
  823. bdnz 1b
  824. #if 1
  825. /* Lock the data cache */
  826. mfspr r0, HID0
  827. ori r0, r0, 0x1000
  828. sync
  829. mtspr HID0, r0
  830. sync
  831. blr
  832. #endif
  833. #if 0
  834. /* Lock the first way of the data cache */
  835. mfspr r0, LDSTCR
  836. ori r0, r0, 0x0080
  837. #if defined(CONFIG_ALTIVEC)
  838. dssall
  839. #endif
  840. sync
  841. mtspr LDSTCR, r0
  842. sync
  843. isync
  844. blr
  845. #endif
  846. .globl unlock_ram_in_cache
  847. unlock_ram_in_cache:
  848. /* invalidate the INIT_RAM section */
  849. lis r3, (CONFIG_SYS_INIT_RAM_ADDR & ~31)@h
  850. ori r3, r3, (CONFIG_SYS_INIT_RAM_ADDR & ~31)@l
  851. li r4, ((CONFIG_SYS_INIT_RAM_SIZE & ~31) + \
  852. (CONFIG_SYS_INIT_RAM_ADDR & 31) + 31) / 32
  853. mtctr r4
  854. 1: icbi r0, r3
  855. addi r3, r3, 32
  856. bdnz 1b
  857. sync /* Wait for all icbi to complete on bus */
  858. isync
  859. #if 1
  860. /* Unlock the data cache and invalidate it */
  861. mfspr r0, HID0
  862. li r3,0x1000
  863. andc r0,r0,r3
  864. li r3,0x0400
  865. or r0,r0,r3
  866. sync
  867. mtspr HID0, r0
  868. sync
  869. blr
  870. #endif
  871. #if 0
  872. /* Unlock the first way of the data cache */
  873. mfspr r0, LDSTCR
  874. li r3,0x0080
  875. andc r0,r0,r3
  876. #ifdef CONFIG_ALTIVEC
  877. dssall
  878. #endif
  879. sync
  880. mtspr LDSTCR, r0
  881. sync
  882. isync
  883. li r3,0x0400
  884. or r0,r0,r3
  885. sync
  886. mtspr HID0, r0
  887. sync
  888. blr
  889. #endif
  890. #endif