start.S 18 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774
  1. /*
  2. * Copyright (C) 1998 Dan Malek <dmalek@jlc.net>
  3. * Copyright (C) 1999 Magnus Damm <kieraypc01.p.y.kie.era.ericsson.se>
  4. * Copyright (C) 2000 - 2003 Wolfgang Denk <wd@denx.de>
  5. *
  6. * See file CREDITS for list of people who contributed to this
  7. * project.
  8. *
  9. * This program is free software; you can redistribute it and/or
  10. * modify it under the terms of the GNU General Public License as
  11. * published by the Free Software Foundation; either version 2 of
  12. * the License, or (at your option) any later version.
  13. *
  14. * This program is distributed in the hope that it will be useful,
  15. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  16. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  17. * GNU General Public License for more details.
  18. *
  19. * You should have received a copy of the GNU General Public License
  20. * along with this program; if not, write to the Free Software
  21. * Foundation, Inc., 59 Temple Place, Suite 330, Boston,
  22. * MA 02111-1307 USA
  23. */
  24. /*
  25. * U-Boot - Startup Code for MPC8220 CPUs
  26. */
  27. #include <config.h>
  28. #include <mpc8220.h>
  29. #include <version.h>
  30. #define _LINUX_CONFIG_H 1 /* avoid reading Linux autoconf.h file */
  31. #include <ppc_asm.tmpl>
  32. #include <ppc_defs.h>
  33. #include <asm/cache.h>
  34. #include <asm/mmu.h>
  35. #ifndef CONFIG_IDENT_STRING
  36. #define CONFIG_IDENT_STRING ""
  37. #endif
  38. /* We don't want the MMU yet.
  39. */
  40. #undef MSR_KERNEL
  41. /* Floating Point enable, Machine Check and Recoverable Interr. */
  42. #ifdef DEBUG
  43. #define MSR_KERNEL (MSR_FP|MSR_RI)
  44. #else
  45. #define MSR_KERNEL (MSR_FP|MSR_ME|MSR_RI)
  46. #endif
  47. /*
  48. * Set up GOT: Global Offset Table
  49. *
  50. * Use r14 to access the GOT
  51. */
  52. START_GOT
  53. GOT_ENTRY(_GOT2_TABLE_)
  54. GOT_ENTRY(_FIXUP_TABLE_)
  55. GOT_ENTRY(_start)
  56. GOT_ENTRY(_start_of_vectors)
  57. GOT_ENTRY(_end_of_vectors)
  58. GOT_ENTRY(transfer_to_handler)
  59. GOT_ENTRY(__init_end)
  60. GOT_ENTRY(_end)
  61. GOT_ENTRY(__bss_start)
  62. END_GOT
  63. /*
  64. * Version string
  65. */
  66. .data
  67. .globl version_string
  68. version_string:
  69. .ascii U_BOOT_VERSION
  70. .ascii " (", __DATE__, " - ", __TIME__, ")"
  71. .ascii CONFIG_IDENT_STRING, "\0"
  72. /*
  73. * Exception vectors
  74. */
  75. .text
  76. . = EXC_OFF_SYS_RESET
  77. .globl _start
  78. _start:
  79. li r21, BOOTFLAG_COLD /* Normal Power-On */
  80. nop
  81. b boot_cold
  82. . = EXC_OFF_SYS_RESET + 0x10
  83. .globl _start_warm
  84. _start_warm:
  85. li r21, BOOTFLAG_WARM /* Software reboot */
  86. b boot_warm
  87. boot_cold:
  88. boot_warm:
  89. mfmsr r5 /* save msr contents */
  90. /* replace default MBAR base address from 0x80000000
  91. to 0xf0000000 */
  92. #if defined(CFG_DEFAULT_MBAR) && !defined(CFG_RAMBOOT)
  93. lis r3, CFG_MBAR@h
  94. ori r3, r3, CFG_MBAR@l
  95. /* MBAR is mirrored into the MBAR SPR */
  96. mtspr MBAR,r3
  97. lis r4, CFG_DEFAULT_MBAR@h
  98. stw r3, 0(r4)
  99. #endif /* CFG_DEFAULT_MBAR */
  100. /* Initialise the MPC8220 processor core */
  101. /*--------------------------------------------------------------*/
  102. bl init_8220_core
  103. /* initialize some things that are hard to access from C */
  104. /*--------------------------------------------------------------*/
  105. /* set up stack in on-chip SRAM */
  106. lis r3, CFG_INIT_RAM_ADDR@h
  107. ori r3, r3, CFG_INIT_RAM_ADDR@l
  108. ori r1, r3, CFG_INIT_SP_OFFSET
  109. li r0, 0 /* Make room for stack frame header and */
  110. stwu r0, -4(r1) /* clear final stack frame so that */
  111. stwu r0, -4(r1) /* stack backtraces terminate cleanly */
  112. /* let the C-code set up the rest */
  113. /* */
  114. /* Be careful to keep code relocatable ! */
  115. /*--------------------------------------------------------------*/
  116. GET_GOT /* initialize GOT access */
  117. /* r3: IMMR */
  118. bl cpu_init_f /* run low-level CPU init code (in Flash)*/
  119. mr r3, r21
  120. /* r3: BOOTFLAG */
  121. bl board_init_f /* run 1st part of board init code (in Flash)*/
  122. /*
  123. * Vector Table
  124. */
  125. .globl _start_of_vectors
  126. _start_of_vectors:
  127. /* Machine check */
  128. STD_EXCEPTION(0x200, MachineCheck, MachineCheckException)
  129. /* Data Storage exception. */
  130. STD_EXCEPTION(0x300, DataStorage, UnknownException)
  131. /* Instruction Storage exception. */
  132. STD_EXCEPTION(0x400, InstStorage, UnknownException)
  133. /* External Interrupt exception. */
  134. STD_EXCEPTION(0x500, ExtInterrupt, external_interrupt)
  135. /* Alignment exception. */
  136. . = 0x600
  137. Alignment:
  138. EXCEPTION_PROLOG
  139. mfspr r4,DAR
  140. stw r4,_DAR(r21)
  141. mfspr r5,DSISR
  142. stw r5,_DSISR(r21)
  143. addi r3,r1,STACK_FRAME_OVERHEAD
  144. li r20,MSR_KERNEL
  145. rlwimi r20,r23,0,16,16 /* copy EE bit from saved MSR */
  146. rlwimi r20,r23,0,25,25 /* copy IP bit from saved MSR */
  147. lwz r6,GOT(transfer_to_handler)
  148. mtlr r6
  149. blrl
  150. .L_Alignment:
  151. .long AlignmentException - _start + EXC_OFF_SYS_RESET
  152. .long int_return - _start + EXC_OFF_SYS_RESET
  153. /* Program check exception */
  154. . = 0x700
  155. ProgramCheck:
  156. EXCEPTION_PROLOG
  157. addi r3,r1,STACK_FRAME_OVERHEAD
  158. li r20,MSR_KERNEL
  159. rlwimi r20,r23,0,16,16 /* copy EE bit from saved MSR */
  160. rlwimi r20,r23,0,25,25 /* copy IP bit from saved MSR */
  161. lwz r6,GOT(transfer_to_handler)
  162. mtlr r6
  163. blrl
  164. .L_ProgramCheck:
  165. .long ProgramCheckException - _start + EXC_OFF_SYS_RESET
  166. .long int_return - _start + EXC_OFF_SYS_RESET
  167. STD_EXCEPTION(0x800, FPUnavailable, UnknownException)
  168. /* I guess we could implement decrementer, and may have
  169. * to someday for timekeeping.
  170. */
  171. STD_EXCEPTION(0x900, Decrementer, timer_interrupt)
  172. STD_EXCEPTION(0xa00, Trap_0a, UnknownException)
  173. STD_EXCEPTION(0xb00, Trap_0b, UnknownException)
  174. STD_EXCEPTION(0xc00, SystemCall, UnknownException)
  175. STD_EXCEPTION(0xd00, SingleStep, UnknownException)
  176. STD_EXCEPTION(0xe00, Trap_0e, UnknownException)
  177. STD_EXCEPTION(0xf00, Trap_0f, UnknownException)
  178. STD_EXCEPTION(0x1000, InstructionTLBMiss, UnknownException)
  179. STD_EXCEPTION(0x1100, DataLoadTLBMiss, UnknownException)
  180. STD_EXCEPTION(0x1200, DataStoreTLBMiss, UnknownException)
  181. #ifdef DEBUG
  182. . = 0x1300
  183. /*
  184. * This exception occurs when the program counter matches the
  185. * Instruction Address Breakpoint Register (IABR).
  186. *
  187. * I want the cpu to halt if this occurs so I can hunt around
  188. * with the debugger and look at things.
  189. *
  190. * When DEBUG is defined, both machine check enable (in the MSR)
  191. * and checkstop reset enable (in the reset mode register) are
  192. * turned off and so a checkstop condition will result in the cpu
  193. * halting.
  194. *
  195. * I force the cpu into a checkstop condition by putting an illegal
  196. * instruction here (at least this is the theory).
  197. *
  198. * well - that didnt work, so just do an infinite loop!
  199. */
  200. 1: b 1b
  201. #else
  202. STD_EXCEPTION(0x1300, InstructionBreakpoint, DebugException)
  203. #endif
  204. STD_EXCEPTION(0x1400, SMI, UnknownException)
  205. STD_EXCEPTION(0x1500, Trap_15, UnknownException)
  206. STD_EXCEPTION(0x1600, Trap_16, UnknownException)
  207. STD_EXCEPTION(0x1700, Trap_17, UnknownException)
  208. STD_EXCEPTION(0x1800, Trap_18, UnknownException)
  209. STD_EXCEPTION(0x1900, Trap_19, UnknownException)
  210. STD_EXCEPTION(0x1a00, Trap_1a, UnknownException)
  211. STD_EXCEPTION(0x1b00, Trap_1b, UnknownException)
  212. STD_EXCEPTION(0x1c00, Trap_1c, UnknownException)
  213. STD_EXCEPTION(0x1d00, Trap_1d, UnknownException)
  214. STD_EXCEPTION(0x1e00, Trap_1e, UnknownException)
  215. STD_EXCEPTION(0x1f00, Trap_1f, UnknownException)
  216. STD_EXCEPTION(0x2000, Trap_20, UnknownException)
  217. STD_EXCEPTION(0x2100, Trap_21, UnknownException)
  218. STD_EXCEPTION(0x2200, Trap_22, UnknownException)
  219. STD_EXCEPTION(0x2300, Trap_23, UnknownException)
  220. STD_EXCEPTION(0x2400, Trap_24, UnknownException)
  221. STD_EXCEPTION(0x2500, Trap_25, UnknownException)
  222. STD_EXCEPTION(0x2600, Trap_26, UnknownException)
  223. STD_EXCEPTION(0x2700, Trap_27, UnknownException)
  224. STD_EXCEPTION(0x2800, Trap_28, UnknownException)
  225. STD_EXCEPTION(0x2900, Trap_29, UnknownException)
  226. STD_EXCEPTION(0x2a00, Trap_2a, UnknownException)
  227. STD_EXCEPTION(0x2b00, Trap_2b, UnknownException)
  228. STD_EXCEPTION(0x2c00, Trap_2c, UnknownException)
  229. STD_EXCEPTION(0x2d00, Trap_2d, UnknownException)
  230. STD_EXCEPTION(0x2e00, Trap_2e, UnknownException)
  231. STD_EXCEPTION(0x2f00, Trap_2f, UnknownException)
  232. .globl _end_of_vectors
  233. _end_of_vectors:
  234. . = 0x3000
  235. /*
  236. * This code finishes saving the registers to the exception frame
  237. * and jumps to the appropriate handler for the exception.
  238. * Register r21 is pointer into trap frame, r1 has new stack pointer.
  239. */
  240. .globl transfer_to_handler
  241. transfer_to_handler:
  242. stw r22,_NIP(r21)
  243. lis r22,MSR_POW@h
  244. andc r23,r23,r22
  245. stw r23,_MSR(r21)
  246. SAVE_GPR(7, r21)
  247. SAVE_4GPRS(8, r21)
  248. SAVE_8GPRS(12, r21)
  249. SAVE_8GPRS(24, r21)
  250. mflr r23
  251. andi. r24,r23,0x3f00 /* get vector offset */
  252. stw r24,TRAP(r21)
  253. li r22,0
  254. stw r22,RESULT(r21)
  255. lwz r24,0(r23) /* virtual address of handler */
  256. lwz r23,4(r23) /* where to go when done */
  257. mtspr SRR0,r24
  258. mtspr SRR1,r20
  259. mtlr r23
  260. SYNC
  261. rfi /* jump to handler, enable MMU */
  262. int_return:
  263. mfmsr r28 /* Disable interrupts */
  264. li r4,0
  265. ori r4,r4,MSR_EE
  266. andc r28,r28,r4
  267. SYNC /* Some chip revs need this... */
  268. mtmsr r28
  269. SYNC
  270. lwz r2,_CTR(r1)
  271. lwz r0,_LINK(r1)
  272. mtctr r2
  273. mtlr r0
  274. lwz r2,_XER(r1)
  275. lwz r0,_CCR(r1)
  276. mtspr XER,r2
  277. mtcrf 0xFF,r0
  278. REST_10GPRS(3, r1)
  279. REST_10GPRS(13, r1)
  280. REST_8GPRS(23, r1)
  281. REST_GPR(31, r1)
  282. lwz r2,_NIP(r1) /* Restore environment */
  283. lwz r0,_MSR(r1)
  284. mtspr SRR0,r2
  285. mtspr SRR1,r0
  286. lwz r0,GPR0(r1)
  287. lwz r2,GPR2(r1)
  288. lwz r1,GPR1(r1)
  289. SYNC
  290. rfi
  291. /*
  292. * This code initialises the MPC8220 processor core
  293. * (conforms to PowerPC 603e spec)
  294. * Note: expects original MSR contents to be in r5.
  295. */
  296. .globl init_8220_core
  297. init_8220_core:
  298. /* Initialize machine status; enable machine check interrupt */
  299. /*--------------------------------------------------------------*/
  300. li r3, MSR_KERNEL /* Set ME and RI flags */
  301. rlwimi r3, r5, 0, 25, 25 /* preserve IP bit set by HRCW */
  302. #ifdef DEBUG
  303. rlwimi r3, r5, 0, 21, 22 /* debugger might set SE & BE bits */
  304. #endif
  305. SYNC /* Some chip revs need this... */
  306. mtmsr r3
  307. SYNC
  308. mtspr SRR1, r3 /* Make SRR1 match MSR */
  309. /* Initialize the Hardware Implementation-dependent Registers */
  310. /* HID0 also contains cache control */
  311. /*--------------------------------------------------------------*/
  312. lis r3, CFG_HID0_INIT@h
  313. ori r3, r3, CFG_HID0_INIT@l
  314. SYNC
  315. mtspr HID0, r3
  316. lis r3, CFG_HID0_FINAL@h
  317. ori r3, r3, CFG_HID0_FINAL@l
  318. SYNC
  319. mtspr HID0, r3
  320. /* Enable Extra BATs */
  321. mfspr r3, 1011 /* HID2 */
  322. lis r4, 0x0004
  323. ori r4, r4, 0x0000
  324. or r4, r4, r3
  325. mtspr 1011, r4
  326. sync
  327. /* clear all BAT's */
  328. /*--------------------------------------------------------------*/
  329. li r0, 0
  330. mtspr DBAT0U, r0
  331. mtspr DBAT0L, r0
  332. mtspr DBAT1U, r0
  333. mtspr DBAT1L, r0
  334. mtspr DBAT2U, r0
  335. mtspr DBAT2L, r0
  336. mtspr DBAT3U, r0
  337. mtspr DBAT3L, r0
  338. mtspr DBAT4U, r0
  339. mtspr DBAT4L, r0
  340. mtspr DBAT5U, r0
  341. mtspr DBAT5L, r0
  342. mtspr DBAT6U, r0
  343. mtspr DBAT6L, r0
  344. mtspr DBAT7U, r0
  345. mtspr DBAT7L, r0
  346. mtspr IBAT0U, r0
  347. mtspr IBAT0L, r0
  348. mtspr IBAT1U, r0
  349. mtspr IBAT1L, r0
  350. mtspr IBAT2U, r0
  351. mtspr IBAT2L, r0
  352. mtspr IBAT3U, r0
  353. mtspr IBAT3L, r0
  354. mtspr IBAT4U, r0
  355. mtspr IBAT4L, r0
  356. mtspr IBAT5U, r0
  357. mtspr IBAT5L, r0
  358. mtspr IBAT6U, r0
  359. mtspr IBAT6L, r0
  360. mtspr IBAT7U, r0
  361. mtspr IBAT7L, r0
  362. SYNC
  363. /* invalidate all tlb's */
  364. /* */
  365. /* From the 603e User Manual: "The 603e provides the ability to */
  366. /* invalidate a TLB entry. The TLB Invalidate Entry (tlbie) */
  367. /* instruction invalidates the TLB entry indexed by the EA, and */
  368. /* operates on both the instruction and data TLBs simultaneously*/
  369. /* invalidating four TLB entries (both sets in each TLB). The */
  370. /* index corresponds to bits 15-19 of the EA. To invalidate all */
  371. /* entries within both TLBs, 32 tlbie instructions should be */
  372. /* issued, incrementing this field by one each time." */
  373. /* */
  374. /* "Note that the tlbia instruction is not implemented on the */
  375. /* 603e." */
  376. /* */
  377. /* bits 15-19 correspond to addresses 0x00000000 to 0x0001F000 */
  378. /* incrementing by 0x1000 each time. The code below is sort of */
  379. /* based on code in "flush_tlbs" from arch/ppc/kernel/head.S */
  380. /* */
  381. /*--------------------------------------------------------------*/
  382. li r3, 32
  383. mtctr r3
  384. li r3, 0
  385. 1: tlbie r3
  386. addi r3, r3, 0x1000
  387. bdnz 1b
  388. SYNC
  389. /* Done! */
  390. /*--------------------------------------------------------------*/
  391. blr
  392. /* Cache functions.
  393. *
  394. * Note: requires that all cache bits in
  395. * HID0 are in the low half word.
  396. */
  397. .globl icache_enable
  398. icache_enable:
  399. lis r4, 0
  400. ori r4, r4, CFG_HID0_INIT /* set ICE & ICFI bit */
  401. rlwinm r3, r4, 0, 21, 19 /* clear the ICFI bit */
  402. /*
  403. * The setting of the instruction cache enable (ICE) bit must be
  404. * preceded by an isync instruction to prevent the cache from being
  405. * enabled or disabled while an instruction access is in progress.
  406. */
  407. isync
  408. mtspr HID0, r4 /* Enable Instr Cache & Inval cache */
  409. mtspr HID0, r3 /* using 2 consec instructions */
  410. isync
  411. blr
  412. .globl icache_disable
  413. icache_disable:
  414. mfspr r3, HID0
  415. rlwinm r3, r3, 0, 17, 15 /* clear the ICE bit */
  416. mtspr HID0, r3
  417. isync
  418. blr
  419. .globl icache_status
  420. icache_status:
  421. mfspr r3, HID0
  422. rlwinm r3, r3, HID0_ICE_BITPOS + 1, 31, 31
  423. blr
  424. .globl dcache_enable
  425. dcache_enable:
  426. lis r4, 0
  427. ori r4, r4, HID0_DCE|HID0_DCFI /* set DCE & DCFI bit */
  428. rlwinm r3, r4, 0, 22, 20 /* clear the DCFI bit */
  429. /* Enable address translation in MSR bit */
  430. mfmsr r5
  431. ori r5, r5, 0x
  432. /*
  433. * The setting of the instruction cache enable (ICE) bit must be
  434. * preceded by an isync instruction to prevent the cache from being
  435. * enabled or disabled while an instruction access is in progress.
  436. */
  437. isync
  438. mtspr HID0, r4 /* Enable Data Cache & Inval cache*/
  439. mtspr HID0, r3 /* using 2 consec instructions */
  440. isync
  441. blr
  442. .globl dcache_disable
  443. dcache_disable:
  444. mfspr r3, HID0
  445. rlwinm r3, r3, 0, 18, 16 /* clear the DCE bit */
  446. mtspr HID0, r3
  447. isync
  448. blr
  449. .globl dcache_status
  450. dcache_status:
  451. mfspr r3, HID0
  452. rlwinm r3, r3, HID0_DCE_BITPOS + 1, 31, 31
  453. blr
  454. .globl get_pvr
  455. get_pvr:
  456. mfspr r3, PVR
  457. blr
  458. /*------------------------------------------------------------------------------*/
  459. /*
  460. * void relocate_code (addr_sp, gd, addr_moni)
  461. *
  462. * This "function" does not return, instead it continues in RAM
  463. * after relocating the monitor code.
  464. *
  465. * r3 = dest
  466. * r4 = src
  467. * r5 = length in bytes
  468. * r6 = cachelinesize
  469. */
  470. .globl relocate_code
  471. relocate_code:
  472. mr r1, r3 /* Set new stack pointer */
  473. mr r9, r4 /* Save copy of Global Data pointer */
  474. mr r10, r5 /* Save copy of Destination Address */
  475. mr r3, r5 /* Destination Address */
  476. lis r4, CFG_MONITOR_BASE@h /* Source Address */
  477. ori r4, r4, CFG_MONITOR_BASE@l
  478. lwz r5, GOT(__init_end)
  479. sub r5, r5, r4
  480. li r6, CFG_CACHELINE_SIZE /* Cache Line Size */
  481. /*
  482. * Fix GOT pointer:
  483. *
  484. * New GOT-PTR = (old GOT-PTR - CFG_MONITOR_BASE) + Destination Address
  485. *
  486. * Offset:
  487. */
  488. sub r15, r10, r4
  489. /* First our own GOT */
  490. add r14, r14, r15
  491. /* then the one used by the C code */
  492. add r30, r30, r15
  493. /*
  494. * Now relocate code
  495. */
  496. cmplw cr1,r3,r4
  497. addi r0,r5,3
  498. srwi. r0,r0,2
  499. beq cr1,4f /* In place copy is not necessary */
  500. beq 7f /* Protect against 0 count */
  501. mtctr r0
  502. bge cr1,2f
  503. la r8,-4(r4)
  504. la r7,-4(r3)
  505. 1: lwzu r0,4(r8)
  506. stwu r0,4(r7)
  507. bdnz 1b
  508. b 4f
  509. 2: slwi r0,r0,2
  510. add r8,r4,r0
  511. add r7,r3,r0
  512. 3: lwzu r0,-4(r8)
  513. stwu r0,-4(r7)
  514. bdnz 3b
  515. /*
  516. * Now flush the cache: note that we must start from a cache aligned
  517. * address. Otherwise we might miss one cache line.
  518. */
  519. 4: cmpwi r6,0
  520. add r5,r3,r5
  521. beq 7f /* Always flush prefetch queue in any case */
  522. subi r0,r6,1
  523. andc r3,r3,r0
  524. mfspr r7,HID0 /* don't do dcbst if dcache is disabled */
  525. rlwinm r7,r7,HID0_DCE_BITPOS+1,31,31
  526. cmpwi r7,0
  527. beq 9f
  528. mr r4,r3
  529. 5: dcbst 0,r4
  530. add r4,r4,r6
  531. cmplw r4,r5
  532. blt 5b
  533. sync /* Wait for all dcbst to complete on bus */
  534. 9: mfspr r7,HID0 /* don't do icbi if icache is disabled */
  535. rlwinm r7,r7,HID0_ICE_BITPOS+1,31,31
  536. cmpwi r7,0
  537. beq 7f
  538. mr r4,r3
  539. 6: icbi 0,r4
  540. add r4,r4,r6
  541. cmplw r4,r5
  542. blt 6b
  543. 7: sync /* Wait for all icbi to complete on bus */
  544. isync
  545. /*
  546. * We are done. Do not return, instead branch to second part of board
  547. * initialization, now running from RAM.
  548. */
  549. addi r0, r10, in_ram - _start + EXC_OFF_SYS_RESET
  550. mtlr r0
  551. blr
  552. in_ram:
  553. /*
  554. * Relocation Function, r14 point to got2+0x8000
  555. *
  556. * Adjust got2 pointers, no need to check for 0, this code
  557. * already puts a few entries in the table.
  558. */
  559. li r0,__got2_entries@sectoff@l
  560. la r3,GOT(_GOT2_TABLE_)
  561. lwz r11,GOT(_GOT2_TABLE_)
  562. mtctr r0
  563. sub r11,r3,r11
  564. addi r3,r3,-4
  565. 1: lwzu r0,4(r3)
  566. add r0,r0,r11
  567. stw r0,0(r3)
  568. bdnz 1b
  569. /*
  570. * Now adjust the fixups and the pointers to the fixups
  571. * in case we need to move ourselves again.
  572. */
  573. 2: li r0,__fixup_entries@sectoff@l
  574. lwz r3,GOT(_FIXUP_TABLE_)
  575. cmpwi r0,0
  576. mtctr r0
  577. addi r3,r3,-4
  578. beq 4f
  579. 3: lwzu r4,4(r3)
  580. lwzux r0,r4,r11
  581. add r0,r0,r11
  582. stw r10,0(r3)
  583. stw r0,0(r4)
  584. bdnz 3b
  585. 4:
  586. clear_bss:
  587. /*
  588. * Now clear BSS segment
  589. */
  590. lwz r3,GOT(__bss_start)
  591. lwz r4,GOT(_end)
  592. cmplw 0, r3, r4
  593. beq 6f
  594. li r0, 0
  595. 5:
  596. stw r0, 0(r3)
  597. addi r3, r3, 4
  598. cmplw 0, r3, r4
  599. bne 5b
  600. 6:
  601. mr r3, r9 /* Global Data pointer */
  602. mr r4, r10 /* Destination Address */
  603. bl board_init_r
  604. /*
  605. * Copy exception vector code to low memory
  606. *
  607. * r3: dest_addr
  608. * r7: source address, r8: end address, r9: target address
  609. */
  610. .globl trap_init
  611. trap_init:
  612. lwz r7, GOT(_start)
  613. lwz r8, GOT(_end_of_vectors)
  614. li r9, 0x100 /* reset vector always at 0x100 */
  615. cmplw 0, r7, r8
  616. bgelr /* return if r7>=r8 - just in case */
  617. mflr r4 /* save link register */
  618. 1:
  619. lwz r0, 0(r7)
  620. stw r0, 0(r9)
  621. addi r7, r7, 4
  622. addi r9, r9, 4
  623. cmplw 0, r7, r8
  624. bne 1b
  625. /*
  626. * relocate `hdlr' and `int_return' entries
  627. */
  628. li r7, .L_MachineCheck - _start + EXC_OFF_SYS_RESET
  629. li r8, Alignment - _start + EXC_OFF_SYS_RESET
  630. 2:
  631. bl trap_reloc
  632. addi r7, r7, 0x100 /* next exception vector */
  633. cmplw 0, r7, r8
  634. blt 2b
  635. li r7, .L_Alignment - _start + EXC_OFF_SYS_RESET
  636. bl trap_reloc
  637. li r7, .L_ProgramCheck - _start + EXC_OFF_SYS_RESET
  638. bl trap_reloc
  639. li r7, .L_FPUnavailable - _start + EXC_OFF_SYS_RESET
  640. li r8, SystemCall - _start + EXC_OFF_SYS_RESET
  641. 3:
  642. bl trap_reloc
  643. addi r7, r7, 0x100 /* next exception vector */
  644. cmplw 0, r7, r8
  645. blt 3b
  646. li r7, .L_SingleStep - _start + EXC_OFF_SYS_RESET
  647. li r8, _end_of_vectors - _start + EXC_OFF_SYS_RESET
  648. 4:
  649. bl trap_reloc
  650. addi r7, r7, 0x100 /* next exception vector */
  651. cmplw 0, r7, r8
  652. blt 4b
  653. mfmsr r3 /* now that the vectors have */
  654. lis r7, MSR_IP@h /* relocated into low memory */
  655. ori r7, r7, MSR_IP@l /* MSR[IP] can be turned off */
  656. andc r3, r3, r7 /* (if it was on) */
  657. SYNC /* Some chip revs need this... */
  658. mtmsr r3
  659. SYNC
  660. mtlr r4 /* restore link register */
  661. blr
  662. /*
  663. * Function: relocate entries for one exception vector
  664. */
  665. trap_reloc:
  666. lwz r0, 0(r7) /* hdlr ... */
  667. add r0, r0, r3 /* ... += dest_addr */
  668. stw r0, 0(r7)
  669. lwz r0, 4(r7) /* int_return ... */
  670. add r0, r0, r3 /* ... += dest_addr */
  671. stw r0, 4(r7)
  672. blr