sleep34xx.S 20 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768
  1. /*
  2. * (C) Copyright 2007
  3. * Texas Instruments
  4. * Karthik Dasu <karthik-dp@ti.com>
  5. *
  6. * (C) Copyright 2004
  7. * Texas Instruments, <www.ti.com>
  8. * Richard Woodruff <r-woodruff2@ti.com>
  9. *
  10. * This program is free software; you can redistribute it and/or
  11. * modify it under the terms of the GNU General Public License as
  12. * published by the Free Software Foundation; either version 2 of
  13. * the License, or (at your option) any later version.
  14. *
  15. * This program is distributed in the hope that it will be useful,
  16. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  17. * MERCHANTABILITY or FITNESS FOR A PARTICULAR /PURPOSE. See the
  18. * GNU General Public License for more details.
  19. *
  20. * You should have received a copy of the GNU General Public License
  21. * along with this program; if not, write to the Free Software
  22. * Foundation, Inc., 59 Temple Place, Suite 330, Boston,
  23. * MA 02111-1307 USA
  24. */
  25. #include <linux/linkage.h>
  26. #include <asm/assembler.h>
  27. #include <plat/sram.h>
  28. #include <mach/io.h>
  29. #include "cm2xxx_3xxx.h"
  30. #include "prm2xxx_3xxx.h"
  31. #include "sdrc.h"
  32. #include "control.h"
  33. /*
  34. * Registers access definitions
  35. */
  36. #define SDRC_SCRATCHPAD_SEM_OFFS 0xc
  37. #define SDRC_SCRATCHPAD_SEM_V OMAP343X_SCRATCHPAD_REGADDR\
  38. (SDRC_SCRATCHPAD_SEM_OFFS)
  39. #define PM_PREPWSTST_CORE_P OMAP3430_PRM_BASE + CORE_MOD +\
  40. OMAP3430_PM_PREPWSTST
  41. #define PM_PWSTCTRL_MPU_P OMAP3430_PRM_BASE + MPU_MOD + OMAP2_PM_PWSTCTRL
  42. #define CM_IDLEST1_CORE_V OMAP34XX_CM_REGADDR(CORE_MOD, CM_IDLEST1)
  43. #define CM_IDLEST_CKGEN_V OMAP34XX_CM_REGADDR(PLL_MOD, CM_IDLEST)
  44. #define SRAM_BASE_P OMAP3_SRAM_PA
  45. #define CONTROL_STAT OMAP343X_CTRL_BASE + OMAP343X_CONTROL_STATUS
  46. #define CONTROL_MEM_RTA_CTRL (OMAP343X_CTRL_BASE +\
  47. OMAP36XX_CONTROL_MEM_RTA_CTRL)
  48. /* Move this as correct place is available */
  49. #define SCRATCHPAD_MEM_OFFS 0x310
  50. #define SCRATCHPAD_BASE_P (OMAP343X_CTRL_BASE +\
  51. OMAP343X_CONTROL_MEM_WKUP +\
  52. SCRATCHPAD_MEM_OFFS)
  53. #define SDRC_POWER_V OMAP34XX_SDRC_REGADDR(SDRC_POWER)
  54. #define SDRC_SYSCONFIG_P (OMAP343X_SDRC_BASE + SDRC_SYSCONFIG)
  55. #define SDRC_MR_0_P (OMAP343X_SDRC_BASE + SDRC_MR_0)
  56. #define SDRC_EMR2_0_P (OMAP343X_SDRC_BASE + SDRC_EMR2_0)
  57. #define SDRC_MANUAL_0_P (OMAP343X_SDRC_BASE + SDRC_MANUAL_0)
  58. #define SDRC_MR_1_P (OMAP343X_SDRC_BASE + SDRC_MR_1)
  59. #define SDRC_EMR2_1_P (OMAP343X_SDRC_BASE + SDRC_EMR2_1)
  60. #define SDRC_MANUAL_1_P (OMAP343X_SDRC_BASE + SDRC_MANUAL_1)
  61. #define SDRC_DLLA_STATUS_V OMAP34XX_SDRC_REGADDR(SDRC_DLLA_STATUS)
  62. #define SDRC_DLLA_CTRL_V OMAP34XX_SDRC_REGADDR(SDRC_DLLA_CTRL)
  63. /*
  64. * API functions
  65. */
  66. /*
  67. * The "get_*restore_pointer" functions are used to provide a
  68. * physical restore address where the ROM code jumps while waking
  69. * up from MPU OFF/OSWR state.
  70. * The restore pointer is stored into the scratchpad.
  71. */
  72. .text
  73. /* Function call to get the restore pointer for resume from OFF */
  74. ENTRY(get_restore_pointer)
  75. stmfd sp!, {lr} @ save registers on stack
  76. adr r0, restore
  77. ldmfd sp!, {pc} @ restore regs and return
  78. ENTRY(get_restore_pointer_sz)
  79. .word . - get_restore_pointer
  80. .text
  81. /* Function call to get the restore pointer for 3630 resume from OFF */
  82. ENTRY(get_omap3630_restore_pointer)
  83. stmfd sp!, {lr} @ save registers on stack
  84. adr r0, restore_3630
  85. ldmfd sp!, {pc} @ restore regs and return
  86. ENTRY(get_omap3630_restore_pointer_sz)
  87. .word . - get_omap3630_restore_pointer
  88. .text
  89. /* Function call to get the restore pointer for ES3 to resume from OFF */
  90. ENTRY(get_es3_restore_pointer)
  91. stmfd sp!, {lr} @ save registers on stack
  92. adr r0, restore_es3
  93. ldmfd sp!, {pc} @ restore regs and return
  94. ENTRY(get_es3_restore_pointer_sz)
  95. .word . - get_es3_restore_pointer
  96. .text
  97. /*
  98. * L2 cache needs to be toggled for stable OFF mode functionality on 3630.
  99. * This function sets up a flag that will allow for this toggling to take
  100. * place on 3630. Hopefully some version in the future may not need this.
  101. */
  102. ENTRY(enable_omap3630_toggle_l2_on_restore)
  103. stmfd sp!, {lr} @ save registers on stack
  104. /* Setup so that we will disable and enable l2 */
  105. mov r1, #0x1
  106. str r1, l2dis_3630
  107. ldmfd sp!, {pc} @ restore regs and return
  108. .text
  109. /* Function to call rom code to save secure ram context */
  110. ENTRY(save_secure_ram_context)
  111. stmfd sp!, {r1-r12, lr} @ save registers on stack
  112. adr r3, api_params @ r3 points to parameters
  113. str r0, [r3,#0x4] @ r0 has sdram address
  114. ldr r12, high_mask
  115. and r3, r3, r12
  116. ldr r12, sram_phy_addr_mask
  117. orr r3, r3, r12
  118. mov r0, #25 @ set service ID for PPA
  119. mov r12, r0 @ copy secure service ID in r12
  120. mov r1, #0 @ set task id for ROM code in r1
  121. mov r2, #4 @ set some flags in r2, r6
  122. mov r6, #0xff
  123. mcr p15, 0, r0, c7, c10, 4 @ data write barrier
  124. mcr p15, 0, r0, c7, c10, 5 @ data memory barrier
  125. .word 0xE1600071 @ call SMI monitor (smi #1)
  126. nop
  127. nop
  128. nop
  129. nop
  130. ldmfd sp!, {r1-r12, pc}
  131. sram_phy_addr_mask:
  132. .word SRAM_BASE_P
  133. high_mask:
  134. .word 0xffff
  135. api_params:
  136. .word 0x4, 0x0, 0x0, 0x1, 0x1
  137. ENTRY(save_secure_ram_context_sz)
  138. .word . - save_secure_ram_context
  139. /*
  140. * ======================
  141. * == Idle entry point ==
  142. * ======================
  143. */
  144. /*
  145. * Forces OMAP into idle state
  146. *
  147. * omap34xx_cpu_suspend() - This bit of code saves the CPU context if needed
  148. * and executes the WFI instruction. Calling WFI effectively changes the
  149. * power domains states to the desired target power states.
  150. *
  151. *
  152. * Notes:
  153. * - this code gets copied to internal SRAM at boot and after wake-up
  154. * from OFF mode. The execution pointer in SRAM is _omap_sram_idle.
  155. * - when the OMAP wakes up it continues at different execution points
  156. * depending on the low power mode (non-OFF vs OFF modes),
  157. * cf. 'Resume path for xxx mode' comments.
  158. */
  159. ENTRY(omap34xx_cpu_suspend)
  160. stmfd sp!, {r0-r12, lr} @ save registers on stack
  161. /*
  162. * r0 contains restore pointer in sdram
  163. * r1 contains information about saving context:
  164. * 0 - No context lost
  165. * 1 - Only L1 and logic lost
  166. * 2 - Only L2 lost
  167. * 3 - Both L1 and L2 lost
  168. */
  169. /* Directly jump to WFI is the context save is not required */
  170. cmp r1, #0x0
  171. beq omap3_do_wfi
  172. /* Otherwise fall through to the save context code */
  173. save_context_wfi:
  174. mov r8, r0 @ Store SDRAM address in r8
  175. mrc p15, 0, r5, c1, c0, 1 @ Read Auxiliary Control Register
  176. mov r4, #0x1 @ Number of parameters for restore call
  177. stmia r8!, {r4-r5} @ Push parameters for restore call
  178. mrc p15, 1, r5, c9, c0, 2 @ Read L2 AUX ctrl register
  179. stmia r8!, {r4-r5} @ Push parameters for restore call
  180. /* Check what that target sleep state is from r1 */
  181. cmp r1, #0x2 @ Only L2 lost, no need to save context
  182. beq clean_caches
  183. l1_logic_lost:
  184. /* Store sp and spsr to SDRAM */
  185. mov r4, sp
  186. mrs r5, spsr
  187. mov r6, lr
  188. stmia r8!, {r4-r6}
  189. /* Save all ARM registers */
  190. /* Coprocessor access control register */
  191. mrc p15, 0, r6, c1, c0, 2
  192. stmia r8!, {r6}
  193. /* TTBR0, TTBR1 and Translation table base control */
  194. mrc p15, 0, r4, c2, c0, 0
  195. mrc p15, 0, r5, c2, c0, 1
  196. mrc p15, 0, r6, c2, c0, 2
  197. stmia r8!, {r4-r6}
  198. /*
  199. * Domain access control register, data fault status register,
  200. * and instruction fault status register
  201. */
  202. mrc p15, 0, r4, c3, c0, 0
  203. mrc p15, 0, r5, c5, c0, 0
  204. mrc p15, 0, r6, c5, c0, 1
  205. stmia r8!, {r4-r6}
  206. /*
  207. * Data aux fault status register, instruction aux fault status,
  208. * data fault address register and instruction fault address register
  209. */
  210. mrc p15, 0, r4, c5, c1, 0
  211. mrc p15, 0, r5, c5, c1, 1
  212. mrc p15, 0, r6, c6, c0, 0
  213. mrc p15, 0, r7, c6, c0, 2
  214. stmia r8!, {r4-r7}
  215. /*
  216. * user r/w thread and process ID, user r/o thread and process ID,
  217. * priv only thread and process ID, cache size selection
  218. */
  219. mrc p15, 0, r4, c13, c0, 2
  220. mrc p15, 0, r5, c13, c0, 3
  221. mrc p15, 0, r6, c13, c0, 4
  222. mrc p15, 2, r7, c0, c0, 0
  223. stmia r8!, {r4-r7}
  224. /* Data TLB lockdown, instruction TLB lockdown registers */
  225. mrc p15, 0, r5, c10, c0, 0
  226. mrc p15, 0, r6, c10, c0, 1
  227. stmia r8!, {r5-r6}
  228. /* Secure or non secure vector base address, FCSE PID, Context PID*/
  229. mrc p15, 0, r4, c12, c0, 0
  230. mrc p15, 0, r5, c13, c0, 0
  231. mrc p15, 0, r6, c13, c0, 1
  232. stmia r8!, {r4-r6}
  233. /* Primary remap, normal remap registers */
  234. mrc p15, 0, r4, c10, c2, 0
  235. mrc p15, 0, r5, c10, c2, 1
  236. stmia r8!,{r4-r5}
  237. /* Store current cpsr*/
  238. mrs r2, cpsr
  239. stmia r8!, {r2}
  240. mrc p15, 0, r4, c1, c0, 0
  241. /* save control register */
  242. stmia r8!, {r4}
  243. clean_caches:
  244. /*
  245. * Clean Data or unified cache to POU
  246. * How to invalidate only L1 cache???? - #FIX_ME#
  247. * mcr p15, 0, r11, c7, c11, 1
  248. */
  249. cmp r1, #0x1 @ Check whether L2 inval is required
  250. beq omap3_do_wfi
  251. clean_l2:
  252. /*
  253. * jump out to kernel flush routine
  254. * - reuse that code is better
  255. * - it executes in a cached space so is faster than refetch per-block
  256. * - should be faster and will change with kernel
  257. * - 'might' have to copy address, load and jump to it
  258. */
  259. ldr r1, kernel_flush
  260. mov lr, pc
  261. bx r1
  262. omap3_do_wfi:
  263. ldr r4, sdrc_power @ read the SDRC_POWER register
  264. ldr r5, [r4] @ read the contents of SDRC_POWER
  265. orr r5, r5, #0x40 @ enable self refresh on idle req
  266. str r5, [r4] @ write back to SDRC_POWER register
  267. /* Data memory barrier and Data sync barrier */
  268. mov r1, #0
  269. mcr p15, 0, r1, c7, c10, 4
  270. mcr p15, 0, r1, c7, c10, 5
  271. /*
  272. * ===================================
  273. * == WFI instruction => Enter idle ==
  274. * ===================================
  275. */
  276. wfi @ wait for interrupt
  277. /*
  278. * ===================================
  279. * == Resume path for non-OFF modes ==
  280. * ===================================
  281. */
  282. nop
  283. nop
  284. nop
  285. nop
  286. nop
  287. nop
  288. nop
  289. nop
  290. nop
  291. nop
  292. bl wait_sdrc_ok
  293. /*
  294. * ===================================
  295. * == Exit point from non-OFF modes ==
  296. * ===================================
  297. */
  298. ldmfd sp!, {r0-r12, pc} @ restore regs and return
  299. /*
  300. * ==============================
  301. * == Resume path for OFF mode ==
  302. * ==============================
  303. */
  304. /*
  305. * The restore_* functions are called by the ROM code
  306. * when back from WFI in OFF mode.
  307. * Cf. the get_*restore_pointer functions.
  308. *
  309. * restore_es3: applies to 34xx >= ES3.0
  310. * restore_3630: applies to 36xx
  311. * restore: common code for 3xxx
  312. */
  313. restore_es3:
  314. ldr r5, pm_prepwstst_core_p
  315. ldr r4, [r5]
  316. and r4, r4, #0x3
  317. cmp r4, #0x0 @ Check if previous power state of CORE is OFF
  318. bne restore
  319. adr r0, es3_sdrc_fix
  320. ldr r1, sram_base
  321. ldr r2, es3_sdrc_fix_sz
  322. mov r2, r2, ror #2
  323. copy_to_sram:
  324. ldmia r0!, {r3} @ val = *src
  325. stmia r1!, {r3} @ *dst = val
  326. subs r2, r2, #0x1 @ num_words--
  327. bne copy_to_sram
  328. ldr r1, sram_base
  329. blx r1
  330. b restore
  331. restore_3630:
  332. ldr r1, pm_prepwstst_core_p
  333. ldr r2, [r1]
  334. and r2, r2, #0x3
  335. cmp r2, #0x0 @ Check if previous power state of CORE is OFF
  336. bne restore
  337. /* Disable RTA before giving control */
  338. ldr r1, control_mem_rta
  339. mov r2, #OMAP36XX_RTA_DISABLE
  340. str r2, [r1]
  341. /* Fall through to common code for the remaining logic */
  342. restore:
  343. /*
  344. * Check what was the reason for mpu reset and store the reason in r9:
  345. * 0 - No context lost
  346. * 1 - Only L1 and logic lost
  347. * 2 - Only L2 lost - In this case, we wont be here
  348. * 3 - Both L1 and L2 lost
  349. */
  350. ldr r1, pm_pwstctrl_mpu
  351. ldr r2, [r1]
  352. and r2, r2, #0x3
  353. cmp r2, #0x0 @ Check if target power state was OFF or RET
  354. moveq r9, #0x3 @ MPU OFF => L1 and L2 lost
  355. movne r9, #0x1 @ Only L1 and L2 lost => avoid L2 invalidation
  356. bne logic_l1_restore
  357. ldr r0, l2dis_3630
  358. cmp r0, #0x1 @ should we disable L2 on 3630?
  359. bne skipl2dis
  360. mrc p15, 0, r0, c1, c0, 1
  361. bic r0, r0, #2 @ disable L2 cache
  362. mcr p15, 0, r0, c1, c0, 1
  363. skipl2dis:
  364. ldr r0, control_stat
  365. ldr r1, [r0]
  366. and r1, #0x700
  367. cmp r1, #0x300
  368. beq l2_inv_gp
  369. mov r0, #40 @ set service ID for PPA
  370. mov r12, r0 @ copy secure Service ID in r12
  371. mov r1, #0 @ set task id for ROM code in r1
  372. mov r2, #4 @ set some flags in r2, r6
  373. mov r6, #0xff
  374. adr r3, l2_inv_api_params @ r3 points to dummy parameters
  375. mcr p15, 0, r0, c7, c10, 4 @ data write barrier
  376. mcr p15, 0, r0, c7, c10, 5 @ data memory barrier
  377. .word 0xE1600071 @ call SMI monitor (smi #1)
  378. /* Write to Aux control register to set some bits */
  379. mov r0, #42 @ set service ID for PPA
  380. mov r12, r0 @ copy secure Service ID in r12
  381. mov r1, #0 @ set task id for ROM code in r1
  382. mov r2, #4 @ set some flags in r2, r6
  383. mov r6, #0xff
  384. ldr r4, scratchpad_base
  385. ldr r3, [r4, #0xBC] @ r3 points to parameters
  386. mcr p15, 0, r0, c7, c10, 4 @ data write barrier
  387. mcr p15, 0, r0, c7, c10, 5 @ data memory barrier
  388. .word 0xE1600071 @ call SMI monitor (smi #1)
  389. #ifdef CONFIG_OMAP3_L2_AUX_SECURE_SAVE_RESTORE
  390. /* Restore L2 aux control register */
  391. @ set service ID for PPA
  392. mov r0, #CONFIG_OMAP3_L2_AUX_SECURE_SERVICE_SET_ID
  393. mov r12, r0 @ copy service ID in r12
  394. mov r1, #0 @ set task ID for ROM code in r1
  395. mov r2, #4 @ set some flags in r2, r6
  396. mov r6, #0xff
  397. ldr r4, scratchpad_base
  398. ldr r3, [r4, #0xBC]
  399. adds r3, r3, #8 @ r3 points to parameters
  400. mcr p15, 0, r0, c7, c10, 4 @ data write barrier
  401. mcr p15, 0, r0, c7, c10, 5 @ data memory barrier
  402. .word 0xE1600071 @ call SMI monitor (smi #1)
  403. #endif
  404. b logic_l1_restore
  405. l2_inv_api_params:
  406. .word 0x1, 0x00
  407. l2_inv_gp:
  408. /* Execute smi to invalidate L2 cache */
  409. mov r12, #0x1 @ set up to invalidate L2
  410. .word 0xE1600070 @ Call SMI monitor (smieq)
  411. /* Write to Aux control register to set some bits */
  412. ldr r4, scratchpad_base
  413. ldr r3, [r4,#0xBC]
  414. ldr r0, [r3,#4]
  415. mov r12, #0x3
  416. .word 0xE1600070 @ Call SMI monitor (smieq)
  417. ldr r4, scratchpad_base
  418. ldr r3, [r4,#0xBC]
  419. ldr r0, [r3,#12]
  420. mov r12, #0x2
  421. .word 0xE1600070 @ Call SMI monitor (smieq)
  422. logic_l1_restore:
  423. ldr r1, l2dis_3630
  424. cmp r1, #0x1 @ Test if L2 re-enable needed on 3630
  425. bne skipl2reen
  426. mrc p15, 0, r1, c1, c0, 1
  427. orr r1, r1, #2 @ re-enable L2 cache
  428. mcr p15, 0, r1, c1, c0, 1
  429. skipl2reen:
  430. mov r1, #0
  431. /*
  432. * Invalidate all instruction caches to PoU
  433. * and flush branch target cache
  434. */
  435. mcr p15, 0, r1, c7, c5, 0
  436. ldr r4, scratchpad_base
  437. ldr r3, [r4,#0xBC]
  438. adds r3, r3, #16
  439. ldmia r3!, {r4-r6}
  440. mov sp, r4
  441. msr spsr_cxsf, r5
  442. mov lr, r6
  443. ldmia r3!, {r4-r9}
  444. /* Coprocessor access Control Register */
  445. mcr p15, 0, r4, c1, c0, 2
  446. /* TTBR0 */
  447. MCR p15, 0, r5, c2, c0, 0
  448. /* TTBR1 */
  449. MCR p15, 0, r6, c2, c0, 1
  450. /* Translation table base control register */
  451. MCR p15, 0, r7, c2, c0, 2
  452. /* Domain access Control Register */
  453. MCR p15, 0, r8, c3, c0, 0
  454. /* Data fault status Register */
  455. MCR p15, 0, r9, c5, c0, 0
  456. ldmia r3!,{r4-r8}
  457. /* Instruction fault status Register */
  458. MCR p15, 0, r4, c5, c0, 1
  459. /* Data Auxiliary Fault Status Register */
  460. MCR p15, 0, r5, c5, c1, 0
  461. /* Instruction Auxiliary Fault Status Register*/
  462. MCR p15, 0, r6, c5, c1, 1
  463. /* Data Fault Address Register */
  464. MCR p15, 0, r7, c6, c0, 0
  465. /* Instruction Fault Address Register*/
  466. MCR p15, 0, r8, c6, c0, 2
  467. ldmia r3!,{r4-r7}
  468. /* User r/w thread and process ID */
  469. MCR p15, 0, r4, c13, c0, 2
  470. /* User ro thread and process ID */
  471. MCR p15, 0, r5, c13, c0, 3
  472. /* Privileged only thread and process ID */
  473. MCR p15, 0, r6, c13, c0, 4
  474. /* Cache size selection */
  475. MCR p15, 2, r7, c0, c0, 0
  476. ldmia r3!,{r4-r8}
  477. /* Data TLB lockdown registers */
  478. MCR p15, 0, r4, c10, c0, 0
  479. /* Instruction TLB lockdown registers */
  480. MCR p15, 0, r5, c10, c0, 1
  481. /* Secure or Nonsecure Vector Base Address */
  482. MCR p15, 0, r6, c12, c0, 0
  483. /* FCSE PID */
  484. MCR p15, 0, r7, c13, c0, 0
  485. /* Context PID */
  486. MCR p15, 0, r8, c13, c0, 1
  487. ldmia r3!,{r4-r5}
  488. /* Primary memory remap register */
  489. MCR p15, 0, r4, c10, c2, 0
  490. /* Normal memory remap register */
  491. MCR p15, 0, r5, c10, c2, 1
  492. /* Restore cpsr */
  493. ldmia r3!,{r4} @ load CPSR from SDRAM
  494. msr cpsr, r4 @ store cpsr
  495. /* Enabling MMU here */
  496. mrc p15, 0, r7, c2, c0, 2 @ Read TTBRControl
  497. /* Extract N (0:2) bits and decide whether to use TTBR0 or TTBR1 */
  498. and r7, #0x7
  499. cmp r7, #0x0
  500. beq usettbr0
  501. ttbr_error:
  502. /*
  503. * More work needs to be done to support N[0:2] value other than 0
  504. * So looping here so that the error can be detected
  505. */
  506. b ttbr_error
  507. usettbr0:
  508. mrc p15, 0, r2, c2, c0, 0
  509. ldr r5, ttbrbit_mask
  510. and r2, r5
  511. mov r4, pc
  512. ldr r5, table_index_mask
  513. and r4, r5 @ r4 = 31 to 20 bits of pc
  514. /* Extract the value to be written to table entry */
  515. ldr r1, table_entry
  516. /* r1 has the value to be written to table entry*/
  517. add r1, r1, r4
  518. /* Getting the address of table entry to modify */
  519. lsr r4, #18
  520. /* r2 has the location which needs to be modified */
  521. add r2, r4
  522. /* Storing previous entry of location being modified */
  523. ldr r5, scratchpad_base
  524. ldr r4, [r2]
  525. str r4, [r5, #0xC0]
  526. /* Modify the table entry */
  527. str r1, [r2]
  528. /*
  529. * Storing address of entry being modified
  530. * - will be restored after enabling MMU
  531. */
  532. ldr r5, scratchpad_base
  533. str r2, [r5, #0xC4]
  534. mov r0, #0
  535. mcr p15, 0, r0, c7, c5, 4 @ Flush prefetch buffer
  536. mcr p15, 0, r0, c7, c5, 6 @ Invalidate branch predictor array
  537. mcr p15, 0, r0, c8, c5, 0 @ Invalidate instruction TLB
  538. mcr p15, 0, r0, c8, c6, 0 @ Invalidate data TLB
  539. /*
  540. * Restore control register. This enables the MMU.
  541. * The caches and prediction are not enabled here, they
  542. * will be enabled after restoring the MMU table entry.
  543. */
  544. ldmia r3!, {r4}
  545. /* Store previous value of control register in scratchpad */
  546. str r4, [r5, #0xC8]
  547. ldr r2, cache_pred_disable_mask
  548. and r4, r2
  549. mcr p15, 0, r4, c1, c0, 0
  550. /*
  551. * ==============================
  552. * == Exit point from OFF mode ==
  553. * ==============================
  554. */
  555. ldmfd sp!, {r0-r12, pc} @ restore regs and return
  556. /*
  557. * Internal functions
  558. */
  559. /* This function implements the erratum ID i443 WA, applies to 34xx >= ES3.0 */
  560. .text
  561. ENTRY(es3_sdrc_fix)
  562. ldr r4, sdrc_syscfg @ get config addr
  563. ldr r5, [r4] @ get value
  564. tst r5, #0x100 @ is part access blocked
  565. it eq
  566. biceq r5, r5, #0x100 @ clear bit if set
  567. str r5, [r4] @ write back change
  568. ldr r4, sdrc_mr_0 @ get config addr
  569. ldr r5, [r4] @ get value
  570. str r5, [r4] @ write back change
  571. ldr r4, sdrc_emr2_0 @ get config addr
  572. ldr r5, [r4] @ get value
  573. str r5, [r4] @ write back change
  574. ldr r4, sdrc_manual_0 @ get config addr
  575. mov r5, #0x2 @ autorefresh command
  576. str r5, [r4] @ kick off refreshes
  577. ldr r4, sdrc_mr_1 @ get config addr
  578. ldr r5, [r4] @ get value
  579. str r5, [r4] @ write back change
  580. ldr r4, sdrc_emr2_1 @ get config addr
  581. ldr r5, [r4] @ get value
  582. str r5, [r4] @ write back change
  583. ldr r4, sdrc_manual_1 @ get config addr
  584. mov r5, #0x2 @ autorefresh command
  585. str r5, [r4] @ kick off refreshes
  586. bx lr
  587. sdrc_syscfg:
  588. .word SDRC_SYSCONFIG_P
  589. sdrc_mr_0:
  590. .word SDRC_MR_0_P
  591. sdrc_emr2_0:
  592. .word SDRC_EMR2_0_P
  593. sdrc_manual_0:
  594. .word SDRC_MANUAL_0_P
  595. sdrc_mr_1:
  596. .word SDRC_MR_1_P
  597. sdrc_emr2_1:
  598. .word SDRC_EMR2_1_P
  599. sdrc_manual_1:
  600. .word SDRC_MANUAL_1_P
  601. ENTRY(es3_sdrc_fix_sz)
  602. .word . - es3_sdrc_fix
  603. /*
  604. * This function implements the erratum ID i581 WA:
  605. * SDRC state restore before accessing the SDRAM
  606. *
  607. * Only used at return from non-OFF mode. For OFF
  608. * mode the ROM code configures the SDRC and
  609. * the DPLL before calling the restore code directly
  610. * from DDR.
  611. */
  612. /* Make sure SDRC accesses are ok */
  613. wait_sdrc_ok:
  614. /* DPLL3 must be locked before accessing the SDRC. Maybe the HW ensures this */
  615. ldr r4, cm_idlest_ckgen
  616. wait_dpll3_lock:
  617. ldr r5, [r4]
  618. tst r5, #1
  619. beq wait_dpll3_lock
  620. ldr r4, cm_idlest1_core
  621. wait_sdrc_ready:
  622. ldr r5, [r4]
  623. tst r5, #0x2
  624. bne wait_sdrc_ready
  625. /* allow DLL powerdown upon hw idle req */
  626. ldr r4, sdrc_power
  627. ldr r5, [r4]
  628. bic r5, r5, #0x40
  629. str r5, [r4]
  630. is_dll_in_lock_mode:
  631. /* Is dll in lock mode? */
  632. ldr r4, sdrc_dlla_ctrl
  633. ldr r5, [r4]
  634. tst r5, #0x4
  635. bxne lr @ Return if locked
  636. /* wait till dll locks */
  637. wait_dll_lock_timed:
  638. ldr r4, wait_dll_lock_counter
  639. add r4, r4, #1
  640. str r4, wait_dll_lock_counter
  641. ldr r4, sdrc_dlla_status
  642. /* Wait 20uS for lock */
  643. mov r6, #8
  644. wait_dll_lock:
  645. subs r6, r6, #0x1
  646. beq kick_dll
  647. ldr r5, [r4]
  648. and r5, r5, #0x4
  649. cmp r5, #0x4
  650. bne wait_dll_lock
  651. bx lr @ Return when locked
  652. /* disable/reenable DLL if not locked */
  653. kick_dll:
  654. ldr r4, sdrc_dlla_ctrl
  655. ldr r5, [r4]
  656. mov r6, r5
  657. bic r6, #(1<<3) @ disable dll
  658. str r6, [r4]
  659. dsb
  660. orr r6, r6, #(1<<3) @ enable dll
  661. str r6, [r4]
  662. dsb
  663. ldr r4, kick_counter
  664. add r4, r4, #1
  665. str r4, kick_counter
  666. b wait_dll_lock_timed
  667. cm_idlest1_core:
  668. .word CM_IDLEST1_CORE_V
  669. cm_idlest_ckgen:
  670. .word CM_IDLEST_CKGEN_V
  671. sdrc_dlla_status:
  672. .word SDRC_DLLA_STATUS_V
  673. sdrc_dlla_ctrl:
  674. .word SDRC_DLLA_CTRL_V
  675. pm_prepwstst_core_p:
  676. .word PM_PREPWSTST_CORE_P
  677. pm_pwstctrl_mpu:
  678. .word PM_PWSTCTRL_MPU_P
  679. scratchpad_base:
  680. .word SCRATCHPAD_BASE_P
  681. sram_base:
  682. .word SRAM_BASE_P + 0x8000
  683. sdrc_power:
  684. .word SDRC_POWER_V
  685. ttbrbit_mask:
  686. .word 0xFFFFC000
  687. table_index_mask:
  688. .word 0xFFF00000
  689. table_entry:
  690. .word 0x00000C02
  691. cache_pred_disable_mask:
  692. .word 0xFFFFE7FB
  693. control_stat:
  694. .word CONTROL_STAT
  695. control_mem_rta:
  696. .word CONTROL_MEM_RTA_CTRL
  697. kernel_flush:
  698. .word v7_flush_dcache_all
  699. l2dis_3630:
  700. .word 0
  701. /*
  702. * When exporting to userspace while the counters are in SRAM,
  703. * these 2 words need to be at the end to facilitate retrival!
  704. */
  705. kick_counter:
  706. .word 0
  707. wait_dll_lock_counter:
  708. .word 0
  709. ENTRY(omap34xx_cpu_suspend_sz)
  710. .word . - omap34xx_cpu_suspend