start.S 19 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817
  1. /*
  2. * Copyright (C) 1998 Dan Malek <dmalek@jlc.net>
  3. * Copyright (C) 1999 Magnus Damm <kieraypc01.p.y.kie.era.ericsson.se>
  4. * Copyright (C) 2000 - 2003 Wolfgang Denk <wd@denx.de>
  5. *
  6. * See file CREDITS for list of people who contributed to this
  7. * project.
  8. *
  9. * This program is free software; you can redistribute it and/or
  10. * modify it under the terms of the GNU General Public License as
  11. * published by the Free Software Foundation; either version 2 of
  12. * the License, or (at your option) any later version.
  13. *
  14. * This program is distributed in the hope that it will be useful,
  15. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  16. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  17. * GNU General Public License for more details.
  18. *
  19. * You should have received a copy of the GNU General Public License
  20. * along with this program; if not, write to the Free Software
  21. * Foundation, Inc., 59 Temple Place, Suite 330, Boston,
  22. * MA 02111-1307 USA
  23. */
  24. /*
  25. * U-Boot - Startup Code for MPC5xxx CPUs
  26. */
  27. #include <config.h>
  28. #include <mpc5xxx.h>
  29. #include <version.h>
  30. #define CONFIG_MPC5XXX 1 /* needed for Linux kernel header files */
  31. #define _LINUX_CONFIG_H 1 /* avoid reading Linux autoconf.h file */
  32. #include <ppc_asm.tmpl>
  33. #include <ppc_defs.h>
  34. #include <asm/cache.h>
  35. #include <asm/mmu.h>
  36. #ifndef CONFIG_IDENT_STRING
  37. #define CONFIG_IDENT_STRING ""
  38. #endif
  39. /* We don't want the MMU yet.
  40. */
  41. #undef MSR_KERNEL
  42. /* Floating Point enable, Machine Check and Recoverable Interr. */
  43. #ifdef DEBUG
  44. #define MSR_KERNEL (MSR_FP|MSR_RI)
  45. #else
  46. #define MSR_KERNEL (MSR_FP|MSR_ME|MSR_RI)
  47. #endif
  48. /*
  49. * Set up GOT: Global Offset Table
  50. *
  51. * Use r14 to access the GOT
  52. */
  53. START_GOT
  54. GOT_ENTRY(_GOT2_TABLE_)
  55. GOT_ENTRY(_FIXUP_TABLE_)
  56. GOT_ENTRY(_start)
  57. GOT_ENTRY(_start_of_vectors)
  58. GOT_ENTRY(_end_of_vectors)
  59. GOT_ENTRY(transfer_to_handler)
  60. GOT_ENTRY(__init_end)
  61. GOT_ENTRY(_end)
  62. GOT_ENTRY(__bss_start)
  63. END_GOT
  64. /*
  65. * Version string
  66. */
  67. .data
  68. .globl version_string
  69. version_string:
  70. .ascii U_BOOT_VERSION
  71. .ascii " (", __DATE__, " - ", __TIME__, ")"
  72. .ascii CONFIG_IDENT_STRING, "\0"
  73. /*
  74. * Exception vectors
  75. */
  76. .text
  77. . = EXC_OFF_SYS_RESET
  78. .globl _start
  79. _start:
  80. li r21, BOOTFLAG_COLD /* Normal Power-On */
  81. nop
  82. b boot_cold
  83. . = EXC_OFF_SYS_RESET + 0x10
  84. .globl _start_warm
  85. _start_warm:
  86. li r21, BOOTFLAG_WARM /* Software reboot */
  87. b boot_warm
  88. boot_cold:
  89. boot_warm:
  90. mfmsr r5 /* save msr contents */
  91. #if defined(CFG_DEFAULT_MBAR)
  92. lis r3, CFG_MBAR@h
  93. ori r3, r3, CFG_MBAR@l
  94. #if defined(CONFIG_MPC5200)
  95. rlwinm r3, r3, 16, 16, 31
  96. #endif
  97. #if defined(CONFIG_MGT5100)
  98. rlwinm r3, r3, 17, 15, 31
  99. #endif
  100. lis r4, CFG_DEFAULT_MBAR@h
  101. stw r3, 0(r4)
  102. #endif /* CFG_DEFAULT_MBAR */
  103. /* Initialise the MPC5xxx processor core */
  104. /*--------------------------------------------------------------*/
  105. bl init_5xxx_core
  106. /* initialize some things that are hard to access from C */
  107. /*--------------------------------------------------------------*/
  108. /* set up stack in on-chip SRAM */
  109. lis r3, CFG_INIT_RAM_ADDR@h
  110. ori r3, r3, CFG_INIT_RAM_ADDR@l
  111. ori r1, r3, CFG_INIT_SP_OFFSET
  112. li r0, 0 /* Make room for stack frame header and */
  113. stwu r0, -4(r1) /* clear final stack frame so that */
  114. stwu r0, -4(r1) /* stack backtraces terminate cleanly */
  115. /* let the C-code set up the rest */
  116. /* */
  117. /* Be careful to keep code relocatable ! */
  118. /*--------------------------------------------------------------*/
  119. GET_GOT /* initialize GOT access */
  120. /* r3: IMMR */
  121. bl cpu_init_f /* run low-level CPU init code (in Flash)*/
  122. mr r3, r21
  123. /* r3: BOOTFLAG */
  124. bl board_init_f /* run 1st part of board init code (in Flash)*/
  125. /*
  126. * Vector Table
  127. */
  128. .globl _start_of_vectors
  129. _start_of_vectors:
  130. /* Machine check */
  131. STD_EXCEPTION(0x200, MachineCheck, MachineCheckException)
  132. /* Data Storage exception. */
  133. STD_EXCEPTION(0x300, DataStorage, UnknownException)
  134. /* Instruction Storage exception. */
  135. STD_EXCEPTION(0x400, InstStorage, UnknownException)
  136. /* External Interrupt exception. */
  137. STD_EXCEPTION(0x500, ExtInterrupt, external_interrupt)
  138. /* Alignment exception. */
  139. . = 0x600
  140. Alignment:
  141. EXCEPTION_PROLOG
  142. mfspr r4,DAR
  143. stw r4,_DAR(r21)
  144. mfspr r5,DSISR
  145. stw r5,_DSISR(r21)
  146. addi r3,r1,STACK_FRAME_OVERHEAD
  147. li r20,MSR_KERNEL
  148. rlwimi r20,r23,0,16,16 /* copy EE bit from saved MSR */
  149. rlwimi r20,r23,0,25,25 /* copy IP bit from saved MSR */
  150. lwz r6,GOT(transfer_to_handler)
  151. mtlr r6
  152. blrl
  153. .L_Alignment:
  154. .long AlignmentException - _start + EXC_OFF_SYS_RESET
  155. .long int_return - _start + EXC_OFF_SYS_RESET
  156. /* Program check exception */
  157. . = 0x700
  158. ProgramCheck:
  159. EXCEPTION_PROLOG
  160. addi r3,r1,STACK_FRAME_OVERHEAD
  161. li r20,MSR_KERNEL
  162. rlwimi r20,r23,0,16,16 /* copy EE bit from saved MSR */
  163. rlwimi r20,r23,0,25,25 /* copy IP bit from saved MSR */
  164. lwz r6,GOT(transfer_to_handler)
  165. mtlr r6
  166. blrl
  167. .L_ProgramCheck:
  168. .long ProgramCheckException - _start + EXC_OFF_SYS_RESET
  169. .long int_return - _start + EXC_OFF_SYS_RESET
  170. STD_EXCEPTION(0x800, FPUnavailable, UnknownException)
  171. /* I guess we could implement decrementer, and may have
  172. * to someday for timekeeping.
  173. */
  174. STD_EXCEPTION(0x900, Decrementer, timer_interrupt)
  175. STD_EXCEPTION(0xa00, Trap_0a, UnknownException)
  176. STD_EXCEPTION(0xb00, Trap_0b, UnknownException)
  177. . = 0xc00
  178. /*
  179. * r0 - SYSCALL number
  180. * r3-... arguments
  181. */
  182. SystemCall:
  183. addis r11,r0,0 /* get functions table addr */
  184. ori r11,r11,0 /* Note: this code is patched in trap_init */
  185. addis r12,r0,0 /* get number of functions */
  186. ori r12,r12,0
  187. cmplw 0, r0, r12
  188. bge 1f
  189. rlwinm r0,r0,2,0,31 /* fn_addr = fn_tbl[r0] */
  190. add r11,r11,r0
  191. lwz r11,0(r11)
  192. li r20,0xd00-4 /* Get stack pointer */
  193. lwz r12,0(r20)
  194. subi r12,r12,12 /* Adjust stack pointer */
  195. li r0,0xc00+_end_back-SystemCall
  196. cmplw 0, r0, r12 /* Check stack overflow */
  197. bgt 1f
  198. stw r12,0(r20)
  199. mflr r0
  200. stw r0,0(r12)
  201. mfspr r0,SRR0
  202. stw r0,4(r12)
  203. mfspr r0,SRR1
  204. stw r0,8(r12)
  205. li r12,0xc00+_back-SystemCall
  206. mtlr r12
  207. mtspr SRR0,r11
  208. 1: SYNC
  209. rfi
  210. _back:
  211. mfmsr r11 /* Disable interrupts */
  212. li r12,0
  213. ori r12,r12,MSR_EE
  214. andc r11,r11,r12
  215. SYNC /* Some chip revs need this... */
  216. mtmsr r11
  217. SYNC
  218. li r12,0xd00-4 /* restore regs */
  219. lwz r12,0(r12)
  220. lwz r11,0(r12)
  221. mtlr r11
  222. lwz r11,4(r12)
  223. mtspr SRR0,r11
  224. lwz r11,8(r12)
  225. mtspr SRR1,r11
  226. addi r12,r12,12 /* Adjust stack pointer */
  227. li r20,0xd00-4
  228. stw r12,0(r20)
  229. SYNC
  230. rfi
  231. _end_back:
  232. STD_EXCEPTION(0xd00, SingleStep, UnknownException)
  233. STD_EXCEPTION(0xe00, Trap_0e, UnknownException)
  234. STD_EXCEPTION(0xf00, Trap_0f, UnknownException)
  235. STD_EXCEPTION(0x1000, InstructionTLBMiss, UnknownException)
  236. STD_EXCEPTION(0x1100, DataLoadTLBMiss, UnknownException)
  237. STD_EXCEPTION(0x1200, DataStoreTLBMiss, UnknownException)
  238. #ifdef DEBUG
  239. . = 0x1300
  240. /*
  241. * This exception occurs when the program counter matches the
  242. * Instruction Address Breakpoint Register (IABR).
  243. *
  244. * I want the cpu to halt if this occurs so I can hunt around
  245. * with the debugger and look at things.
  246. *
  247. * When DEBUG is defined, both machine check enable (in the MSR)
  248. * and checkstop reset enable (in the reset mode register) are
  249. * turned off and so a checkstop condition will result in the cpu
  250. * halting.
  251. *
  252. * I force the cpu into a checkstop condition by putting an illegal
  253. * instruction here (at least this is the theory).
  254. *
  255. * well - that didnt work, so just do an infinite loop!
  256. */
  257. 1: b 1b
  258. #else
  259. STD_EXCEPTION(0x1300, InstructionBreakpoint, DebugException)
  260. #endif
  261. STD_EXCEPTION(0x1400, SMI, UnknownException)
  262. STD_EXCEPTION(0x1500, Trap_15, UnknownException)
  263. STD_EXCEPTION(0x1600, Trap_16, UnknownException)
  264. STD_EXCEPTION(0x1700, Trap_17, UnknownException)
  265. STD_EXCEPTION(0x1800, Trap_18, UnknownException)
  266. STD_EXCEPTION(0x1900, Trap_19, UnknownException)
  267. STD_EXCEPTION(0x1a00, Trap_1a, UnknownException)
  268. STD_EXCEPTION(0x1b00, Trap_1b, UnknownException)
  269. STD_EXCEPTION(0x1c00, Trap_1c, UnknownException)
  270. STD_EXCEPTION(0x1d00, Trap_1d, UnknownException)
  271. STD_EXCEPTION(0x1e00, Trap_1e, UnknownException)
  272. STD_EXCEPTION(0x1f00, Trap_1f, UnknownException)
  273. STD_EXCEPTION(0x2000, Trap_20, UnknownException)
  274. STD_EXCEPTION(0x2100, Trap_21, UnknownException)
  275. STD_EXCEPTION(0x2200, Trap_22, UnknownException)
  276. STD_EXCEPTION(0x2300, Trap_23, UnknownException)
  277. STD_EXCEPTION(0x2400, Trap_24, UnknownException)
  278. STD_EXCEPTION(0x2500, Trap_25, UnknownException)
  279. STD_EXCEPTION(0x2600, Trap_26, UnknownException)
  280. STD_EXCEPTION(0x2700, Trap_27, UnknownException)
  281. STD_EXCEPTION(0x2800, Trap_28, UnknownException)
  282. STD_EXCEPTION(0x2900, Trap_29, UnknownException)
  283. STD_EXCEPTION(0x2a00, Trap_2a, UnknownException)
  284. STD_EXCEPTION(0x2b00, Trap_2b, UnknownException)
  285. STD_EXCEPTION(0x2c00, Trap_2c, UnknownException)
  286. STD_EXCEPTION(0x2d00, Trap_2d, UnknownException)
  287. STD_EXCEPTION(0x2e00, Trap_2e, UnknownException)
  288. STD_EXCEPTION(0x2f00, Trap_2f, UnknownException)
  289. .globl _end_of_vectors
  290. _end_of_vectors:
  291. . = 0x3000
  292. /*
  293. * This code finishes saving the registers to the exception frame
  294. * and jumps to the appropriate handler for the exception.
  295. * Register r21 is pointer into trap frame, r1 has new stack pointer.
  296. */
  297. .globl transfer_to_handler
  298. transfer_to_handler:
  299. stw r22,_NIP(r21)
  300. lis r22,MSR_POW@h
  301. andc r23,r23,r22
  302. stw r23,_MSR(r21)
  303. SAVE_GPR(7, r21)
  304. SAVE_4GPRS(8, r21)
  305. SAVE_8GPRS(12, r21)
  306. SAVE_8GPRS(24, r21)
  307. mflr r23
  308. andi. r24,r23,0x3f00 /* get vector offset */
  309. stw r24,TRAP(r21)
  310. li r22,0
  311. stw r22,RESULT(r21)
  312. lwz r24,0(r23) /* virtual address of handler */
  313. lwz r23,4(r23) /* where to go when done */
  314. mtspr SRR0,r24
  315. mtspr SRR1,r20
  316. mtlr r23
  317. SYNC
  318. rfi /* jump to handler, enable MMU */
  319. int_return:
  320. mfmsr r28 /* Disable interrupts */
  321. li r4,0
  322. ori r4,r4,MSR_EE
  323. andc r28,r28,r4
  324. SYNC /* Some chip revs need this... */
  325. mtmsr r28
  326. SYNC
  327. lwz r2,_CTR(r1)
  328. lwz r0,_LINK(r1)
  329. mtctr r2
  330. mtlr r0
  331. lwz r2,_XER(r1)
  332. lwz r0,_CCR(r1)
  333. mtspr XER,r2
  334. mtcrf 0xFF,r0
  335. REST_10GPRS(3, r1)
  336. REST_10GPRS(13, r1)
  337. REST_8GPRS(23, r1)
  338. REST_GPR(31, r1)
  339. lwz r2,_NIP(r1) /* Restore environment */
  340. lwz r0,_MSR(r1)
  341. mtspr SRR0,r2
  342. mtspr SRR1,r0
  343. lwz r0,GPR0(r1)
  344. lwz r2,GPR2(r1)
  345. lwz r1,GPR1(r1)
  346. SYNC
  347. rfi
  348. /*
  349. * This code initialises the MPC5xxx processor core
  350. * (conforms to PowerPC 603e spec)
  351. * Note: expects original MSR contents to be in r5.
  352. */
  353. .globl init_5xx_core
  354. init_5xxx_core:
  355. /* Initialize machine status; enable machine check interrupt */
  356. /*--------------------------------------------------------------*/
  357. li r3, MSR_KERNEL /* Set ME and RI flags */
  358. rlwimi r3, r5, 0, 25, 25 /* preserve IP bit set by HRCW */
  359. #ifdef DEBUG
  360. rlwimi r3, r5, 0, 21, 22 /* debugger might set SE & BE bits */
  361. #endif
  362. SYNC /* Some chip revs need this... */
  363. mtmsr r3
  364. SYNC
  365. mtspr SRR1, r3 /* Make SRR1 match MSR */
  366. /* Initialize the Hardware Implementation-dependent Registers */
  367. /* HID0 also contains cache control */
  368. /*--------------------------------------------------------------*/
  369. lis r3, CFG_HID0_INIT@h
  370. ori r3, r3, CFG_HID0_INIT@l
  371. SYNC
  372. mtspr HID0, r3
  373. lis r3, CFG_HID0_FINAL@h
  374. ori r3, r3, CFG_HID0_FINAL@l
  375. SYNC
  376. mtspr HID0, r3
  377. /* clear all BAT's */
  378. /*--------------------------------------------------------------*/
  379. li r0, 0
  380. mtspr DBAT0U, r0
  381. mtspr DBAT0L, r0
  382. mtspr DBAT1U, r0
  383. mtspr DBAT1L, r0
  384. mtspr DBAT2U, r0
  385. mtspr DBAT2L, r0
  386. mtspr DBAT3U, r0
  387. mtspr DBAT3L, r0
  388. mtspr IBAT0U, r0
  389. mtspr IBAT0L, r0
  390. mtspr IBAT1U, r0
  391. mtspr IBAT1L, r0
  392. mtspr IBAT2U, r0
  393. mtspr IBAT2L, r0
  394. mtspr IBAT3U, r0
  395. mtspr IBAT3L, r0
  396. SYNC
  397. /* invalidate all tlb's */
  398. /* */
  399. /* From the 603e User Manual: "The 603e provides the ability to */
  400. /* invalidate a TLB entry. The TLB Invalidate Entry (tlbie) */
  401. /* instruction invalidates the TLB entry indexed by the EA, and */
  402. /* operates on both the instruction and data TLBs simultaneously*/
  403. /* invalidating four TLB entries (both sets in each TLB). The */
  404. /* index corresponds to bits 15-19 of the EA. To invalidate all */
  405. /* entries within both TLBs, 32 tlbie instructions should be */
  406. /* issued, incrementing this field by one each time." */
  407. /* */
  408. /* "Note that the tlbia instruction is not implemented on the */
  409. /* 603e." */
  410. /* */
  411. /* bits 15-19 correspond to addresses 0x00000000 to 0x0001F000 */
  412. /* incrementing by 0x1000 each time. The code below is sort of */
  413. /* based on code in "flush_tlbs" from arch/ppc/kernel/head.S */
  414. /* */
  415. /*--------------------------------------------------------------*/
  416. li r3, 32
  417. mtctr r3
  418. li r3, 0
  419. 1: tlbie r3
  420. addi r3, r3, 0x1000
  421. bdnz 1b
  422. SYNC
  423. /* Done! */
  424. /*--------------------------------------------------------------*/
  425. blr
  426. /* Cache functions.
  427. *
  428. * Note: requires that all cache bits in
  429. * HID0 are in the low half word.
  430. */
  431. .globl icache_enable
  432. icache_enable:
  433. mfspr r3, HID0
  434. ori r3, r3, HID0_ICE
  435. lis r4, 0
  436. ori r4, r4, HID0_ILOCK
  437. andc r3, r3, r4
  438. ori r4, r3, HID0_ICFI
  439. isync
  440. mtspr HID0, r4 /* sets enable and invalidate, clears lock */
  441. isync
  442. mtspr HID0, r3 /* clears invalidate */
  443. blr
  444. .globl icache_disable
  445. icache_disable:
  446. mfspr r3, HID0
  447. lis r4, 0
  448. ori r4, r4, HID0_ICE|HID0_ILOCK
  449. andc r3, r3, r4
  450. ori r4, r3, HID0_ICFI
  451. isync
  452. mtspr HID0, r4 /* sets invalidate, clears enable and lock */
  453. isync
  454. mtspr HID0, r3 /* clears invalidate */
  455. blr
  456. .globl icache_status
  457. icache_status:
  458. mfspr r3, HID0
  459. rlwinm r3, r3, HID0_ICE_BITPOS + 1, 31, 31
  460. blr
  461. .globl dcache_enable
  462. dcache_enable:
  463. mfspr r3, HID0
  464. ori r3, r3, HID0_DCE
  465. lis r4, 0
  466. ori r4, r4, HID0_DLOCK
  467. andc r3, r3, r4
  468. ori r4, r3, HID0_DCI
  469. sync
  470. mtspr HID0, r4 /* sets enable and invalidate, clears lock */
  471. sync
  472. mtspr HID0, r3 /* clears invalidate */
  473. blr
  474. .globl dcache_disable
  475. dcache_disable:
  476. mfspr r3, HID0
  477. lis r4, 0
  478. ori r4, r4, HID0_DCE|HID0_DLOCK
  479. andc r3, r3, r4
  480. ori r4, r3, HID0_DCI
  481. sync
  482. mtspr HID0, r4 /* sets invalidate, clears enable and lock */
  483. sync
  484. mtspr HID0, r3 /* clears invalidate */
  485. blr
  486. .globl dcache_status
  487. dcache_status:
  488. mfspr r3, HID0
  489. rlwinm r3, r3, HID0_DCE_BITPOS + 1, 31, 31
  490. blr
  491. .globl get_pvr
  492. get_pvr:
  493. mfspr r3, PVR
  494. blr
  495. /*------------------------------------------------------------------------------*/
  496. /*
  497. * void relocate_code (addr_sp, gd, addr_moni)
  498. *
  499. * This "function" does not return, instead it continues in RAM
  500. * after relocating the monitor code.
  501. *
  502. * r3 = dest
  503. * r4 = src
  504. * r5 = length in bytes
  505. * r6 = cachelinesize
  506. */
  507. .globl relocate_code
  508. relocate_code:
  509. mr r1, r3 /* Set new stack pointer */
  510. mr r9, r4 /* Save copy of Global Data pointer */
  511. mr r10, r5 /* Save copy of Destination Address */
  512. mr r3, r5 /* Destination Address */
  513. lis r4, CFG_MONITOR_BASE@h /* Source Address */
  514. ori r4, r4, CFG_MONITOR_BASE@l
  515. lwz r5, GOT(__init_end)
  516. sub r5, r5, r4
  517. li r6, CFG_CACHELINE_SIZE /* Cache Line Size */
  518. /*
  519. * Fix GOT pointer:
  520. *
  521. * New GOT-PTR = (old GOT-PTR - CFG_MONITOR_BASE) + Destination Address
  522. *
  523. * Offset:
  524. */
  525. sub r15, r10, r4
  526. /* First our own GOT */
  527. add r14, r14, r15
  528. /* then the one used by the C code */
  529. add r30, r30, r15
  530. /*
  531. * Now relocate code
  532. */
  533. cmplw cr1,r3,r4
  534. addi r0,r5,3
  535. srwi. r0,r0,2
  536. beq cr1,4f /* In place copy is not necessary */
  537. beq 7f /* Protect against 0 count */
  538. mtctr r0
  539. bge cr1,2f
  540. la r8,-4(r4)
  541. la r7,-4(r3)
  542. 1: lwzu r0,4(r8)
  543. stwu r0,4(r7)
  544. bdnz 1b
  545. b 4f
  546. 2: slwi r0,r0,2
  547. add r8,r4,r0
  548. add r7,r3,r0
  549. 3: lwzu r0,-4(r8)
  550. stwu r0,-4(r7)
  551. bdnz 3b
  552. /*
  553. * Now flush the cache: note that we must start from a cache aligned
  554. * address. Otherwise we might miss one cache line.
  555. */
  556. 4: cmpwi r6,0
  557. add r5,r3,r5
  558. beq 7f /* Always flush prefetch queue in any case */
  559. subi r0,r6,1
  560. andc r3,r3,r0
  561. mfspr r7,HID0 /* don't do dcbst if dcache is disabled */
  562. rlwinm r7,r7,HID0_DCE_BITPOS+1,31,31
  563. cmpwi r7,0
  564. beq 9f
  565. mr r4,r3
  566. 5: dcbst 0,r4
  567. add r4,r4,r6
  568. cmplw r4,r5
  569. blt 5b
  570. sync /* Wait for all dcbst to complete on bus */
  571. 9: mfspr r7,HID0 /* don't do icbi if icache is disabled */
  572. rlwinm r7,r7,HID0_ICE_BITPOS+1,31,31
  573. cmpwi r7,0
  574. beq 7f
  575. mr r4,r3
  576. 6: icbi 0,r4
  577. add r4,r4,r6
  578. cmplw r4,r5
  579. blt 6b
  580. 7: sync /* Wait for all icbi to complete on bus */
  581. isync
  582. /*
  583. * We are done. Do not return, instead branch to second part of board
  584. * initialization, now running from RAM.
  585. */
  586. addi r0, r10, in_ram - _start + EXC_OFF_SYS_RESET
  587. mtlr r0
  588. blr
  589. in_ram:
  590. /*
  591. * Relocation Function, r14 point to got2+0x8000
  592. *
  593. * Adjust got2 pointers, no need to check for 0, this code
  594. * already puts a few entries in the table.
  595. */
  596. li r0,__got2_entries@sectoff@l
  597. la r3,GOT(_GOT2_TABLE_)
  598. lwz r11,GOT(_GOT2_TABLE_)
  599. mtctr r0
  600. sub r11,r3,r11
  601. addi r3,r3,-4
  602. 1: lwzu r0,4(r3)
  603. add r0,r0,r11
  604. stw r0,0(r3)
  605. bdnz 1b
  606. /*
  607. * Now adjust the fixups and the pointers to the fixups
  608. * in case we need to move ourselves again.
  609. */
  610. 2: li r0,__fixup_entries@sectoff@l
  611. lwz r3,GOT(_FIXUP_TABLE_)
  612. cmpwi r0,0
  613. mtctr r0
  614. addi r3,r3,-4
  615. beq 4f
  616. 3: lwzu r4,4(r3)
  617. lwzux r0,r4,r11
  618. add r0,r0,r11
  619. stw r10,0(r3)
  620. stw r0,0(r4)
  621. bdnz 3b
  622. 4:
  623. clear_bss:
  624. /*
  625. * Now clear BSS segment
  626. */
  627. lwz r3,GOT(__bss_start)
  628. lwz r4,GOT(_end)
  629. cmplw 0, r3, r4
  630. beq 6f
  631. li r0, 0
  632. 5:
  633. stw r0, 0(r3)
  634. addi r3, r3, 4
  635. cmplw 0, r3, r4
  636. bne 5b
  637. 6:
  638. mr r3, r9 /* Global Data pointer */
  639. mr r4, r10 /* Destination Address */
  640. bl board_init_r
  641. /*
  642. * Copy exception vector code to low memory
  643. *
  644. * r3: dest_addr
  645. * r7: source address, r8: end address, r9: target address
  646. */
  647. .globl trap_init
  648. trap_init:
  649. lwz r7, GOT(_start)
  650. lwz r8, GOT(_end_of_vectors)
  651. li r9, 0x100 /* reset vector always at 0x100 */
  652. cmplw 0, r7, r8
  653. bgelr /* return if r7>=r8 - just in case */
  654. mflr r4 /* save link register */
  655. 1:
  656. lwz r0, 0(r7)
  657. stw r0, 0(r9)
  658. addi r7, r7, 4
  659. addi r9, r9, 4
  660. cmplw 0, r7, r8
  661. bne 1b
  662. /*
  663. * relocate `hdlr' and `int_return' entries
  664. */
  665. li r7, .L_MachineCheck - _start + EXC_OFF_SYS_RESET
  666. li r8, Alignment - _start + EXC_OFF_SYS_RESET
  667. 2:
  668. bl trap_reloc
  669. addi r7, r7, 0x100 /* next exception vector */
  670. cmplw 0, r7, r8
  671. blt 2b
  672. li r7, .L_Alignment - _start + EXC_OFF_SYS_RESET
  673. bl trap_reloc
  674. li r7, .L_ProgramCheck - _start + EXC_OFF_SYS_RESET
  675. bl trap_reloc
  676. li r7, .L_FPUnavailable - _start + EXC_OFF_SYS_RESET
  677. li r8, SystemCall - _start + EXC_OFF_SYS_RESET
  678. 3:
  679. bl trap_reloc
  680. addi r7, r7, 0x100 /* next exception vector */
  681. cmplw 0, r7, r8
  682. blt 3b
  683. li r7, .L_SingleStep - _start + EXC_OFF_SYS_RESET
  684. li r8, _end_of_vectors - _start + EXC_OFF_SYS_RESET
  685. 4:
  686. bl trap_reloc
  687. addi r7, r7, 0x100 /* next exception vector */
  688. cmplw 0, r7, r8
  689. blt 4b
  690. mfmsr r3 /* now that the vectors have */
  691. lis r7, MSR_IP@h /* relocated into low memory */
  692. ori r7, r7, MSR_IP@l /* MSR[IP] can be turned off */
  693. andc r3, r3, r7 /* (if it was on) */
  694. SYNC /* Some chip revs need this... */
  695. mtmsr r3
  696. SYNC
  697. mtlr r4 /* restore link register */
  698. blr
  699. /*
  700. * Function: relocate entries for one exception vector
  701. */
  702. trap_reloc:
  703. lwz r0, 0(r7) /* hdlr ... */
  704. add r0, r0, r3 /* ... += dest_addr */
  705. stw r0, 0(r7)
  706. lwz r0, 4(r7) /* int_return ... */
  707. add r0, r0, r3 /* ... += dest_addr */
  708. stw r0, 4(r7)
  709. blr