start.S 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483
  1. /*
  2. * armboot - Startup Code for OMP2420/ARM1136 CPU-core
  3. *
  4. * Copyright (c) 2004 Texas Instruments <r-woodruff2@ti.com>
  5. *
  6. * Copyright (c) 2001 Marius Gröger <mag@sysgo.de>
  7. * Copyright (c) 2002 Alex Züpke <azu@sysgo.de>
  8. * Copyright (c) 2002 Gary Jennejohn <garyj@denx.de>
  9. * Copyright (c) 2003 Richard Woodruff <r-woodruff2@ti.com>
  10. * Copyright (c) 2003 Kshitij <kshitij@ti.com>
  11. *
  12. * See file CREDITS for list of people who contributed to this
  13. * project.
  14. *
  15. * This program is free software; you can redistribute it and/or
  16. * modify it under the terms of the GNU General Public License as
  17. * published by the Free Software Foundation; either version 2 of
  18. * the License, or (at your option) any later version.
  19. *
  20. * This program is distributed in the hope that it will be useful,
  21. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  22. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  23. * GNU General Public License for more details.
  24. *
  25. * You should have received a copy of the GNU General Public License
  26. * along with this program; if not, write to the Free Software
  27. * Foundation, Inc., 59 Temple Place, Suite 330, Boston,
  28. * MA 02111-1307 USA
  29. */
  30. #include <asm-offsets.h>
  31. #include <config.h>
  32. #include <version.h>
  33. .globl _start
  34. _start: b reset
  35. #ifdef CONFIG_SPL_BUILD
  36. ldr pc, _hang
  37. ldr pc, _hang
  38. ldr pc, _hang
  39. ldr pc, _hang
  40. ldr pc, _hang
  41. ldr pc, _hang
  42. ldr pc, _hang
  43. _hang:
  44. .word do_hang
  45. .word 0x12345678
  46. .word 0x12345678
  47. .word 0x12345678
  48. .word 0x12345678
  49. .word 0x12345678
  50. .word 0x12345678
  51. .word 0x12345678 /* now 16*4=64 */
  52. #else
  53. ldr pc, _undefined_instruction
  54. ldr pc, _software_interrupt
  55. ldr pc, _prefetch_abort
  56. ldr pc, _data_abort
  57. ldr pc, _not_used
  58. ldr pc, _irq
  59. ldr pc, _fiq
  60. _undefined_instruction: .word undefined_instruction
  61. _software_interrupt: .word software_interrupt
  62. _prefetch_abort: .word prefetch_abort
  63. _data_abort: .word data_abort
  64. _not_used: .word not_used
  65. _irq: .word irq
  66. _fiq: .word fiq
  67. _pad: .word 0x12345678 /* now 16*4=64 */
  68. #endif /* CONFIG_SPL_BUILD */
  69. .global _end_vect
  70. _end_vect:
  71. .balignl 16,0xdeadbeef
  72. /*
  73. *************************************************************************
  74. *
  75. * Startup Code (reset vector)
  76. *
  77. * do important init only if we don't start from memory!
  78. * setup Memory and board specific bits prior to relocation.
  79. * relocate armboot to ram
  80. * setup stack
  81. *
  82. *************************************************************************
  83. */
  84. .globl _TEXT_BASE
  85. _TEXT_BASE:
  86. #if defined(CONFIG_SPL_BUILD) && defined(CONFIG_SPL_TEXT_BASE)
  87. .word CONFIG_SPL_TEXT_BASE
  88. #else
  89. .word CONFIG_SYS_TEXT_BASE
  90. #endif
  91. /*
  92. * These are defined in the board-specific linker script.
  93. * Subtracting _start from them lets the linker put their
  94. * relative position in the executable instead of leaving
  95. * them null.
  96. */
  97. .globl _bss_start_ofs
  98. _bss_start_ofs:
  99. .word __bss_start - _start
  100. .globl _bss_end_ofs
  101. _bss_end_ofs:
  102. .word __bss_end - _start
  103. .globl _end_ofs
  104. _end_ofs:
  105. .word _end - _start
  106. #ifdef CONFIG_USE_IRQ
  107. /* IRQ stack memory (calculated at run-time) */
  108. .globl IRQ_STACK_START
  109. IRQ_STACK_START:
  110. .word 0x0badc0de
  111. /* IRQ stack memory (calculated at run-time) */
  112. .globl FIQ_STACK_START
  113. FIQ_STACK_START:
  114. .word 0x0badc0de
  115. #endif
  116. /* IRQ stack memory (calculated at run-time) + 8 bytes */
  117. .globl IRQ_STACK_START_IN
  118. IRQ_STACK_START_IN:
  119. .word 0x0badc0de
  120. /*
  121. * the actual reset code
  122. */
  123. reset:
  124. /*
  125. * set the cpu to SVC32 mode
  126. */
  127. mrs r0,cpsr
  128. bic r0,r0,#0x1f
  129. orr r0,r0,#0xd3
  130. msr cpsr,r0
  131. #ifdef CONFIG_OMAP2420H4
  132. /* Copy vectors to mask ROM indirect addr */
  133. adr r0, _start /* r0 <- current position of code */
  134. add r0, r0, #4 /* skip reset vector */
  135. mov r2, #64 /* r2 <- size to copy */
  136. add r2, r0, r2 /* r2 <- source end address */
  137. mov r1, #SRAM_OFFSET0 /* build vect addr */
  138. mov r3, #SRAM_OFFSET1
  139. add r1, r1, r3
  140. mov r3, #SRAM_OFFSET2
  141. add r1, r1, r3
  142. next:
  143. ldmia r0!, {r3-r10} /* copy from source address [r0] */
  144. stmia r1!, {r3-r10} /* copy to target address [r1] */
  145. cmp r0, r2 /* until source end address [r2] */
  146. bne next /* loop until equal */
  147. bl cpy_clk_code /* put dpll adjust code behind vectors */
  148. #endif
  149. /* the mask ROM code should have PLL and others stable */
  150. #ifndef CONFIG_SKIP_LOWLEVEL_INIT
  151. bl cpu_init_crit
  152. #endif
  153. bl _main
  154. /*------------------------------------------------------------------------------*/
  155. #ifndef CONFIG_SPL_BUILD
  156. /*
  157. * void relocate_code(addr_moni)
  158. *
  159. * This function relocates the monitor code.
  160. */
  161. .globl relocate_code
  162. relocate_code:
  163. mov r6, r0 /* save addr of destination */
  164. adr r0, _start
  165. subs r9, r6, r0 /* r9 <- relocation offset */
  166. beq relocate_done /* skip relocation */
  167. mov r1, r6 /* r1 <- scratch for copy_loop */
  168. ldr r3, _image_copy_end_ofs
  169. add r2, r0, r3 /* r2 <- source end address */
  170. copy_loop:
  171. ldmia r0!, {r10-r11} /* copy from source address [r0] */
  172. stmia r1!, {r10-r11} /* copy to target address [r1] */
  173. cmp r0, r2 /* until source end address [r2] */
  174. blo copy_loop
  175. /*
  176. * fix .rel.dyn relocations
  177. */
  178. ldr r0, _TEXT_BASE /* r0 <- Text base */
  179. ldr r10, _dynsym_start_ofs /* r10 <- sym table ofs */
  180. add r10, r10, r0 /* r10 <- sym table in FLASH */
  181. ldr r2, _rel_dyn_start_ofs /* r2 <- rel dyn start ofs */
  182. add r2, r2, r0 /* r2 <- rel dyn start in FLASH */
  183. ldr r3, _rel_dyn_end_ofs /* r3 <- rel dyn end ofs */
  184. add r3, r3, r0 /* r3 <- rel dyn end in FLASH */
  185. fixloop:
  186. ldr r0, [r2] /* r0 <- location to fix up, IN FLASH! */
  187. add r0, r0, r9 /* r0 <- location to fix up in RAM */
  188. ldr r1, [r2, #4]
  189. and r7, r1, #0xff
  190. cmp r7, #23 /* relative fixup? */
  191. beq fixrel
  192. cmp r7, #2 /* absolute fixup? */
  193. beq fixabs
  194. /* ignore unknown type of fixup */
  195. b fixnext
  196. fixabs:
  197. /* absolute fix: set location to (offset) symbol value */
  198. mov r1, r1, LSR #4 /* r1 <- symbol index in .dynsym */
  199. add r1, r10, r1 /* r1 <- address of symbol in table */
  200. ldr r1, [r1, #4] /* r1 <- symbol value */
  201. add r1, r1, r9 /* r1 <- relocated sym addr */
  202. b fixnext
  203. fixrel:
  204. /* relative fix: increase location by offset */
  205. ldr r1, [r0]
  206. add r1, r1, r9
  207. fixnext:
  208. str r1, [r0]
  209. add r2, r2, #8 /* each rel.dyn entry is 8 bytes */
  210. cmp r2, r3
  211. blo fixloop
  212. relocate_done:
  213. bx lr
  214. _image_copy_end_ofs:
  215. .word __image_copy_end - _start
  216. _rel_dyn_start_ofs:
  217. .word __rel_dyn_start - _start
  218. _rel_dyn_end_ofs:
  219. .word __rel_dyn_end - _start
  220. _dynsym_start_ofs:
  221. .word __dynsym_start - _start
  222. #endif
  223. .globl c_runtime_cpu_setup
  224. c_runtime_cpu_setup:
  225. bx lr
  226. /*
  227. *************************************************************************
  228. *
  229. * CPU_init_critical registers
  230. *
  231. * setup important registers
  232. * setup memory timing
  233. *
  234. *************************************************************************
  235. */
  236. #ifndef CONFIG_SKIP_LOWLEVEL_INIT
  237. cpu_init_crit:
  238. /*
  239. * flush v4 I/D caches
  240. */
  241. mov r0, #0
  242. mcr p15, 0, r0, c7, c7, 0 /* Invalidate I+D+BTB caches */
  243. mcr p15, 0, r0, c8, c7, 0 /* Invalidate Unified TLB */
  244. /*
  245. * disable MMU stuff and caches
  246. */
  247. mrc p15, 0, r0, c1, c0, 0
  248. bic r0, r0, #0x00002300 @ clear bits 13, 9:8 (--V- --RS)
  249. bic r0, r0, #0x00000087 @ clear bits 7, 2:0 (B--- -CAM)
  250. orr r0, r0, #0x00000002 @ set bit 2 (A) Align
  251. orr r0, r0, #0x00001000 @ set bit 12 (I) I-Cache
  252. mcr p15, 0, r0, c1, c0, 0
  253. /*
  254. * Jump to board specific initialization... The Mask ROM will have already initialized
  255. * basic memory. Go here to bump up clock rate and handle wake up conditions.
  256. */
  257. mov ip, lr /* persevere link reg across call */
  258. bl lowlevel_init /* go setup pll,mux,memory */
  259. mov lr, ip /* restore link */
  260. mov pc, lr /* back to my caller */
  261. #endif /* CONFIG_SKIP_LOWLEVEL_INIT */
  262. #ifndef CONFIG_SPL_BUILD
  263. /*
  264. *************************************************************************
  265. *
  266. * Interrupt handling
  267. *
  268. *************************************************************************
  269. */
  270. @
  271. @ IRQ stack frame.
  272. @
  273. #define S_FRAME_SIZE 72
  274. #define S_OLD_R0 68
  275. #define S_PSR 64
  276. #define S_PC 60
  277. #define S_LR 56
  278. #define S_SP 52
  279. #define S_IP 48
  280. #define S_FP 44
  281. #define S_R10 40
  282. #define S_R9 36
  283. #define S_R8 32
  284. #define S_R7 28
  285. #define S_R6 24
  286. #define S_R5 20
  287. #define S_R4 16
  288. #define S_R3 12
  289. #define S_R2 8
  290. #define S_R1 4
  291. #define S_R0 0
  292. #define MODE_SVC 0x13
  293. #define I_BIT 0x80
  294. /*
  295. * use bad_save_user_regs for abort/prefetch/undef/swi ...
  296. * use irq_save_user_regs / irq_restore_user_regs for IRQ/FIQ handling
  297. */
  298. .macro bad_save_user_regs
  299. sub sp, sp, #S_FRAME_SIZE @ carve out a frame on current user stack
  300. stmia sp, {r0 - r12} @ Save user registers (now in svc mode) r0-r12
  301. ldr r2, IRQ_STACK_START_IN @ set base 2 words into abort stack
  302. ldmia r2, {r2 - r3} @ get values for "aborted" pc and cpsr (into parm regs)
  303. add r0, sp, #S_FRAME_SIZE @ grab pointer to old stack
  304. add r5, sp, #S_SP
  305. mov r1, lr
  306. stmia r5, {r0 - r3} @ save sp_SVC, lr_SVC, pc, cpsr
  307. mov r0, sp @ save current stack into r0 (param register)
  308. .endm
  309. .macro irq_save_user_regs
  310. sub sp, sp, #S_FRAME_SIZE
  311. stmia sp, {r0 - r12} @ Calling r0-r12
  312. add r8, sp, #S_PC @ !!!! R8 NEEDS to be saved !!!! a reserved stack spot would be good.
  313. stmdb r8, {sp, lr}^ @ Calling SP, LR
  314. str lr, [r8, #0] @ Save calling PC
  315. mrs r6, spsr
  316. str r6, [r8, #4] @ Save CPSR
  317. str r0, [r8, #8] @ Save OLD_R0
  318. mov r0, sp
  319. .endm
  320. .macro irq_restore_user_regs
  321. ldmia sp, {r0 - lr}^ @ Calling r0 - lr
  322. mov r0, r0
  323. ldr lr, [sp, #S_PC] @ Get PC
  324. add sp, sp, #S_FRAME_SIZE
  325. subs pc, lr, #4 @ return & move spsr_svc into cpsr
  326. .endm
  327. .macro get_bad_stack
  328. ldr r13, IRQ_STACK_START_IN @ setup our mode stack (enter in banked mode)
  329. str lr, [r13] @ save caller lr in position 0 of saved stack
  330. mrs lr, spsr @ get the spsr
  331. str lr, [r13, #4] @ save spsr in position 1 of saved stack
  332. mov r13, #MODE_SVC @ prepare SVC-Mode
  333. @ msr spsr_c, r13
  334. msr spsr, r13 @ switch modes, make sure moves will execute
  335. mov lr, pc @ capture return pc
  336. movs pc, lr @ jump to next instruction & switch modes.
  337. .endm
  338. .macro get_bad_stack_swi
  339. sub r13, r13, #4 @ space on current stack for scratch reg.
  340. str r0, [r13] @ save R0's value.
  341. ldr r0, IRQ_STACK_START_IN @ get data regions start
  342. str lr, [r0] @ save caller lr in position 0 of saved stack
  343. mrs lr, spsr @ get the spsr
  344. str lr, [r0, #4] @ save spsr in position 1 of saved stack
  345. ldr lr, [r0] @ restore lr
  346. ldr r0, [r13] @ restore r0
  347. add r13, r13, #4 @ pop stack entry
  348. .endm
  349. .macro get_irq_stack @ setup IRQ stack
  350. ldr sp, IRQ_STACK_START
  351. .endm
  352. .macro get_fiq_stack @ setup FIQ stack
  353. ldr sp, FIQ_STACK_START
  354. .endm
  355. #endif /* CONFIG_SPL_BUILD */
  356. /*
  357. * exception handlers
  358. */
  359. #ifdef CONFIG_SPL_BUILD
  360. .align 5
  361. do_hang:
  362. ldr sp, _TEXT_BASE /* use 32 words about stack */
  363. bl hang /* hang and never return */
  364. #else /* !CONFIG_SPL_BUILD */
  365. .align 5
  366. undefined_instruction:
  367. get_bad_stack
  368. bad_save_user_regs
  369. bl do_undefined_instruction
  370. .align 5
  371. software_interrupt:
  372. get_bad_stack_swi
  373. bad_save_user_regs
  374. bl do_software_interrupt
  375. .align 5
  376. prefetch_abort:
  377. get_bad_stack
  378. bad_save_user_regs
  379. bl do_prefetch_abort
  380. .align 5
  381. data_abort:
  382. get_bad_stack
  383. bad_save_user_regs
  384. bl do_data_abort
  385. .align 5
  386. not_used:
  387. get_bad_stack
  388. bad_save_user_regs
  389. bl do_not_used
  390. #ifdef CONFIG_USE_IRQ
  391. .align 5
  392. irq:
  393. get_irq_stack
  394. irq_save_user_regs
  395. bl do_irq
  396. irq_restore_user_regs
  397. .align 5
  398. fiq:
  399. get_fiq_stack
  400. /* someone ought to write a more effiction fiq_save_user_regs */
  401. irq_save_user_regs
  402. bl do_fiq
  403. irq_restore_user_regs
  404. #else
  405. .align 5
  406. irq:
  407. get_bad_stack
  408. bad_save_user_regs
  409. bl do_irq
  410. .align 5
  411. fiq:
  412. get_bad_stack
  413. bad_save_user_regs
  414. bl do_fiq
  415. #endif
  416. .align 5
  417. .global arm1136_cache_flush
  418. arm1136_cache_flush:
  419. #if !defined(CONFIG_SYS_ICACHE_OFF)
  420. mcr p15, 0, r1, c7, c5, 0 @ invalidate I cache
  421. #endif
  422. #if !defined(CONFIG_SYS_DCACHE_OFF)
  423. mcr p15, 0, r1, c7, c14, 0 @ invalidate D cache
  424. #endif
  425. mov pc, lr @ back to caller
  426. #endif /* CONFIG_SPL_BUILD */