sleep34xx.S 16 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612
  1. /*
  2. * (C) Copyright 2007
  3. * Texas Instruments
  4. * Karthik Dasu <karthik-dp@ti.com>
  5. *
  6. * (C) Copyright 2004
  7. * Texas Instruments, <www.ti.com>
  8. * Richard Woodruff <r-woodruff2@ti.com>
  9. *
  10. * This program is free software; you can redistribute it and/or
  11. * modify it under the terms of the GNU General Public License as
  12. * published by the Free Software Foundation; either version 2 of
  13. * the License, or (at your option) any later version.
  14. *
  15. * This program is distributed in the hope that it will be useful,
  16. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  17. * MERCHANTABILITY or FITNESS FOR A PARTICULAR /PURPOSE. See the
  18. * GNU General Public License for more details.
  19. *
  20. * You should have received a copy of the GNU General Public License
  21. * along with this program; if not, write to the Free Software
  22. * Foundation, Inc., 59 Temple Place, Suite 330, Boston,
  23. * MA 02111-1307 USA
  24. */
  25. #include <linux/linkage.h>
  26. #include <asm/assembler.h>
  27. #include <plat/sram.h>
  28. #include <mach/io.h>
  29. #include "cm2xxx_3xxx.h"
  30. #include "prm2xxx_3xxx.h"
  31. #include "sdrc.h"
  32. #include "control.h"
  33. /*
  34. * Registers access definitions
  35. */
  36. #define SDRC_SCRATCHPAD_SEM_OFFS 0xc
  37. #define SDRC_SCRATCHPAD_SEM_V OMAP343X_SCRATCHPAD_REGADDR\
  38. (SDRC_SCRATCHPAD_SEM_OFFS)
  39. #define PM_PREPWSTST_CORE_P OMAP3430_PRM_BASE + CORE_MOD +\
  40. OMAP3430_PM_PREPWSTST
  41. #define PM_PWSTCTRL_MPU_P OMAP3430_PRM_BASE + MPU_MOD + OMAP2_PM_PWSTCTRL
  42. #define CM_IDLEST1_CORE_V OMAP34XX_CM_REGADDR(CORE_MOD, CM_IDLEST1)
  43. #define CM_IDLEST_CKGEN_V OMAP34XX_CM_REGADDR(PLL_MOD, CM_IDLEST)
  44. #define SRAM_BASE_P OMAP3_SRAM_PA
  45. #define CONTROL_STAT OMAP343X_CTRL_BASE + OMAP343X_CONTROL_STATUS
  46. #define CONTROL_MEM_RTA_CTRL (OMAP343X_CTRL_BASE +\
  47. OMAP36XX_CONTROL_MEM_RTA_CTRL)
  48. /* Move this as correct place is available */
  49. #define SCRATCHPAD_MEM_OFFS 0x310
  50. #define SCRATCHPAD_BASE_P (OMAP343X_CTRL_BASE +\
  51. OMAP343X_CONTROL_MEM_WKUP +\
  52. SCRATCHPAD_MEM_OFFS)
  53. #define SDRC_POWER_V OMAP34XX_SDRC_REGADDR(SDRC_POWER)
  54. #define SDRC_SYSCONFIG_P (OMAP343X_SDRC_BASE + SDRC_SYSCONFIG)
  55. #define SDRC_MR_0_P (OMAP343X_SDRC_BASE + SDRC_MR_0)
  56. #define SDRC_EMR2_0_P (OMAP343X_SDRC_BASE + SDRC_EMR2_0)
  57. #define SDRC_MANUAL_0_P (OMAP343X_SDRC_BASE + SDRC_MANUAL_0)
  58. #define SDRC_MR_1_P (OMAP343X_SDRC_BASE + SDRC_MR_1)
  59. #define SDRC_EMR2_1_P (OMAP343X_SDRC_BASE + SDRC_EMR2_1)
  60. #define SDRC_MANUAL_1_P (OMAP343X_SDRC_BASE + SDRC_MANUAL_1)
  61. #define SDRC_DLLA_STATUS_V OMAP34XX_SDRC_REGADDR(SDRC_DLLA_STATUS)
  62. #define SDRC_DLLA_CTRL_V OMAP34XX_SDRC_REGADDR(SDRC_DLLA_CTRL)
  63. /*
  64. * This file needs be built unconditionally as ARM to interoperate correctly
  65. * with non-Thumb-2-capable firmware.
  66. */
  67. .arm
  68. /*
  69. * API functions
  70. */
  71. /*
  72. * The "get_*restore_pointer" functions are used to provide a
  73. * physical restore address where the ROM code jumps while waking
  74. * up from MPU OFF/OSWR state.
  75. * The restore pointer is stored into the scratchpad.
  76. */
  77. .text
  78. /* Function call to get the restore pointer for resume from OFF */
  79. ENTRY(get_restore_pointer)
  80. stmfd sp!, {lr} @ save registers on stack
  81. adr r0, restore
  82. ldmfd sp!, {pc} @ restore regs and return
  83. ENDPROC(get_restore_pointer)
  84. .align
  85. ENTRY(get_restore_pointer_sz)
  86. .word . - get_restore_pointer
  87. .text
  88. /* Function call to get the restore pointer for 3630 resume from OFF */
  89. ENTRY(get_omap3630_restore_pointer)
  90. stmfd sp!, {lr} @ save registers on stack
  91. adr r0, restore_3630
  92. ldmfd sp!, {pc} @ restore regs and return
  93. ENDPROC(get_omap3630_restore_pointer)
  94. .align
  95. ENTRY(get_omap3630_restore_pointer_sz)
  96. .word . - get_omap3630_restore_pointer
  97. .text
  98. /* Function call to get the restore pointer for ES3 to resume from OFF */
  99. ENTRY(get_es3_restore_pointer)
  100. stmfd sp!, {lr} @ save registers on stack
  101. adr r0, restore_es3
  102. ldmfd sp!, {pc} @ restore regs and return
  103. ENDPROC(get_es3_restore_pointer)
  104. .align
  105. ENTRY(get_es3_restore_pointer_sz)
  106. .word . - get_es3_restore_pointer
  107. .text
  108. /*
  109. * L2 cache needs to be toggled for stable OFF mode functionality on 3630.
  110. * This function sets up a flag that will allow for this toggling to take
  111. * place on 3630. Hopefully some version in the future may not need this.
  112. */
  113. ENTRY(enable_omap3630_toggle_l2_on_restore)
  114. stmfd sp!, {lr} @ save registers on stack
  115. /* Setup so that we will disable and enable l2 */
  116. mov r1, #0x1
  117. adrl r2, l2dis_3630 @ may be too distant for plain adr
  118. str r1, [r2]
  119. ldmfd sp!, {pc} @ restore regs and return
  120. ENDPROC(enable_omap3630_toggle_l2_on_restore)
  121. .text
  122. /* Function to call rom code to save secure ram context */
  123. .align 3
  124. ENTRY(save_secure_ram_context)
  125. stmfd sp!, {r4 - r11, lr} @ save registers on stack
  126. adr r3, api_params @ r3 points to parameters
  127. str r0, [r3,#0x4] @ r0 has sdram address
  128. ldr r12, high_mask
  129. and r3, r3, r12
  130. ldr r12, sram_phy_addr_mask
  131. orr r3, r3, r12
  132. mov r0, #25 @ set service ID for PPA
  133. mov r12, r0 @ copy secure service ID in r12
  134. mov r1, #0 @ set task id for ROM code in r1
  135. mov r2, #4 @ set some flags in r2, r6
  136. mov r6, #0xff
  137. dsb @ data write barrier
  138. dmb @ data memory barrier
  139. smc #1 @ call SMI monitor (smi #1)
  140. nop
  141. nop
  142. nop
  143. nop
  144. ldmfd sp!, {r4 - r11, pc}
  145. .align
  146. sram_phy_addr_mask:
  147. .word SRAM_BASE_P
  148. high_mask:
  149. .word 0xffff
  150. api_params:
  151. .word 0x4, 0x0, 0x0, 0x1, 0x1
  152. ENDPROC(save_secure_ram_context)
  153. ENTRY(save_secure_ram_context_sz)
  154. .word . - save_secure_ram_context
  155. /*
  156. * ======================
  157. * == Idle entry point ==
  158. * ======================
  159. */
  160. /*
  161. * Forces OMAP into idle state
  162. *
  163. * omap34xx_cpu_suspend() - This bit of code saves the CPU context if needed
  164. * and executes the WFI instruction. Calling WFI effectively changes the
  165. * power domains states to the desired target power states.
  166. *
  167. *
  168. * Notes:
  169. * - this code gets copied to internal SRAM at boot and after wake-up
  170. * from OFF mode. The execution pointer in SRAM is _omap_sram_idle.
  171. * - when the OMAP wakes up it continues at different execution points
  172. * depending on the low power mode (non-OFF vs OFF modes),
  173. * cf. 'Resume path for xxx mode' comments.
  174. */
  175. .align 3
  176. ENTRY(omap34xx_cpu_suspend)
  177. stmfd sp!, {r4 - r11, lr} @ save registers on stack
  178. /*
  179. * r0 contains CPU context save/restore pointer in sdram
  180. * r1 contains information about saving context:
  181. * 0 - No context lost
  182. * 1 - Only L1 and logic lost
  183. * 2 - Only L2 lost (Even L1 is retained we clean it along with L2)
  184. * 3 - Both L1 and L2 lost and logic lost
  185. */
  186. /* Directly jump to WFI is the context save is not required */
  187. cmp r1, #0x0
  188. beq omap3_do_wfi
  189. /* Otherwise fall through to the save context code */
  190. save_context_wfi:
  191. mov r8, r0 @ Store SDRAM address in r8
  192. mrc p15, 0, r5, c1, c0, 1 @ Read Auxiliary Control Register
  193. mov r4, #0x1 @ Number of parameters for restore call
  194. stmia r8!, {r4-r5} @ Push parameters for restore call
  195. mrc p15, 1, r5, c9, c0, 2 @ Read L2 AUX ctrl register
  196. stmia r8!, {r4-r5} @ Push parameters for restore call
  197. /*
  198. * jump out to kernel flush routine
  199. * - reuse that code is better
  200. * - it executes in a cached space so is faster than refetch per-block
  201. * - should be faster and will change with kernel
  202. * - 'might' have to copy address, load and jump to it
  203. * Flush all data from the L1 data cache before disabling
  204. * SCTLR.C bit.
  205. */
  206. ldr r1, kernel_flush
  207. mov lr, pc
  208. bx r1
  209. /*
  210. * Clear the SCTLR.C bit to prevent further data cache
  211. * allocation. Clearing SCTLR.C would make all the data accesses
  212. * strongly ordered and would not hit the cache.
  213. */
  214. mrc p15, 0, r0, c1, c0, 0
  215. bic r0, r0, #(1 << 2) @ Disable the C bit
  216. mcr p15, 0, r0, c1, c0, 0
  217. isb
  218. /*
  219. * Invalidate L1 data cache. Even though only invalidate is
  220. * necessary exported flush API is used here. Doing clean
  221. * on already clean cache would be almost NOP.
  222. */
  223. ldr r1, kernel_flush
  224. blx r1
  225. /*
  226. * The kernel doesn't interwork: v7_flush_dcache_all in particluar will
  227. * always return in Thumb state when CONFIG_THUMB2_KERNEL is enabled.
  228. * This sequence switches back to ARM. Note that .align may insert a
  229. * nop: bx pc needs to be word-aligned in order to work.
  230. */
  231. THUMB( .thumb )
  232. THUMB( .align )
  233. THUMB( bx pc )
  234. THUMB( nop )
  235. .arm
  236. omap3_do_wfi:
  237. ldr r4, sdrc_power @ read the SDRC_POWER register
  238. ldr r5, [r4] @ read the contents of SDRC_POWER
  239. orr r5, r5, #0x40 @ enable self refresh on idle req
  240. str r5, [r4] @ write back to SDRC_POWER register
  241. /* Data memory barrier and Data sync barrier */
  242. dsb
  243. dmb
  244. /*
  245. * ===================================
  246. * == WFI instruction => Enter idle ==
  247. * ===================================
  248. */
  249. wfi @ wait for interrupt
  250. /*
  251. * ===================================
  252. * == Resume path for non-OFF modes ==
  253. * ===================================
  254. */
  255. nop
  256. nop
  257. nop
  258. nop
  259. nop
  260. nop
  261. nop
  262. nop
  263. nop
  264. nop
  265. bl wait_sdrc_ok
  266. mrc p15, 0, r0, c1, c0, 0
  267. tst r0, #(1 << 2) @ Check C bit enabled?
  268. orreq r0, r0, #(1 << 2) @ Enable the C bit if cleared
  269. mcreq p15, 0, r0, c1, c0, 0
  270. isb
  271. /*
  272. * ===================================
  273. * == Exit point from non-OFF modes ==
  274. * ===================================
  275. */
  276. ldmfd sp!, {r4 - r11, pc} @ restore regs and return
  277. /*
  278. * ==============================
  279. * == Resume path for OFF mode ==
  280. * ==============================
  281. */
  282. /*
  283. * The restore_* functions are called by the ROM code
  284. * when back from WFI in OFF mode.
  285. * Cf. the get_*restore_pointer functions.
  286. *
  287. * restore_es3: applies to 34xx >= ES3.0
  288. * restore_3630: applies to 36xx
  289. * restore: common code for 3xxx
  290. */
  291. restore_es3:
  292. ldr r5, pm_prepwstst_core_p
  293. ldr r4, [r5]
  294. and r4, r4, #0x3
  295. cmp r4, #0x0 @ Check if previous power state of CORE is OFF
  296. bne restore
  297. adr r0, es3_sdrc_fix
  298. ldr r1, sram_base
  299. ldr r2, es3_sdrc_fix_sz
  300. mov r2, r2, ror #2
  301. copy_to_sram:
  302. ldmia r0!, {r3} @ val = *src
  303. stmia r1!, {r3} @ *dst = val
  304. subs r2, r2, #0x1 @ num_words--
  305. bne copy_to_sram
  306. ldr r1, sram_base
  307. blx r1
  308. b restore
  309. restore_3630:
  310. ldr r1, pm_prepwstst_core_p
  311. ldr r2, [r1]
  312. and r2, r2, #0x3
  313. cmp r2, #0x0 @ Check if previous power state of CORE is OFF
  314. bne restore
  315. /* Disable RTA before giving control */
  316. ldr r1, control_mem_rta
  317. mov r2, #OMAP36XX_RTA_DISABLE
  318. str r2, [r1]
  319. /* Fall through to common code for the remaining logic */
  320. restore:
  321. /*
  322. * Read the pwstctrl register to check the reason for mpu reset.
  323. * This tells us what was lost.
  324. */
  325. ldr r1, pm_pwstctrl_mpu
  326. ldr r2, [r1]
  327. and r2, r2, #0x3
  328. cmp r2, #0x0 @ Check if target power state was OFF or RET
  329. bne logic_l1_restore
  330. ldr r0, l2dis_3630
  331. cmp r0, #0x1 @ should we disable L2 on 3630?
  332. bne skipl2dis
  333. mrc p15, 0, r0, c1, c0, 1
  334. bic r0, r0, #2 @ disable L2 cache
  335. mcr p15, 0, r0, c1, c0, 1
  336. skipl2dis:
  337. ldr r0, control_stat
  338. ldr r1, [r0]
  339. and r1, #0x700
  340. cmp r1, #0x300
  341. beq l2_inv_gp
  342. mov r0, #40 @ set service ID for PPA
  343. mov r12, r0 @ copy secure Service ID in r12
  344. mov r1, #0 @ set task id for ROM code in r1
  345. mov r2, #4 @ set some flags in r2, r6
  346. mov r6, #0xff
  347. adr r3, l2_inv_api_params @ r3 points to dummy parameters
  348. dsb @ data write barrier
  349. dmb @ data memory barrier
  350. smc #1 @ call SMI monitor (smi #1)
  351. /* Write to Aux control register to set some bits */
  352. mov r0, #42 @ set service ID for PPA
  353. mov r12, r0 @ copy secure Service ID in r12
  354. mov r1, #0 @ set task id for ROM code in r1
  355. mov r2, #4 @ set some flags in r2, r6
  356. mov r6, #0xff
  357. ldr r4, scratchpad_base
  358. ldr r3, [r4, #0xBC] @ r3 points to parameters
  359. dsb @ data write barrier
  360. dmb @ data memory barrier
  361. smc #1 @ call SMI monitor (smi #1)
  362. #ifdef CONFIG_OMAP3_L2_AUX_SECURE_SAVE_RESTORE
  363. /* Restore L2 aux control register */
  364. @ set service ID for PPA
  365. mov r0, #CONFIG_OMAP3_L2_AUX_SECURE_SERVICE_SET_ID
  366. mov r12, r0 @ copy service ID in r12
  367. mov r1, #0 @ set task ID for ROM code in r1
  368. mov r2, #4 @ set some flags in r2, r6
  369. mov r6, #0xff
  370. ldr r4, scratchpad_base
  371. ldr r3, [r4, #0xBC]
  372. adds r3, r3, #8 @ r3 points to parameters
  373. dsb @ data write barrier
  374. dmb @ data memory barrier
  375. smc #1 @ call SMI monitor (smi #1)
  376. #endif
  377. b logic_l1_restore
  378. .align
  379. l2_inv_api_params:
  380. .word 0x1, 0x00
  381. l2_inv_gp:
  382. /* Execute smi to invalidate L2 cache */
  383. mov r12, #0x1 @ set up to invalidate L2
  384. smc #0 @ Call SMI monitor (smieq)
  385. /* Write to Aux control register to set some bits */
  386. ldr r4, scratchpad_base
  387. ldr r3, [r4,#0xBC]
  388. ldr r0, [r3,#4]
  389. mov r12, #0x3
  390. smc #0 @ Call SMI monitor (smieq)
  391. ldr r4, scratchpad_base
  392. ldr r3, [r4,#0xBC]
  393. ldr r0, [r3,#12]
  394. mov r12, #0x2
  395. smc #0 @ Call SMI monitor (smieq)
  396. logic_l1_restore:
  397. ldr r1, l2dis_3630
  398. cmp r1, #0x1 @ Test if L2 re-enable needed on 3630
  399. bne skipl2reen
  400. mrc p15, 0, r1, c1, c0, 1
  401. orr r1, r1, #2 @ re-enable L2 cache
  402. mcr p15, 0, r1, c1, c0, 1
  403. skipl2reen:
  404. /* Now branch to the common CPU resume function */
  405. b cpu_resume
  406. .ltorg
  407. /*
  408. * Internal functions
  409. */
  410. /* This function implements the erratum ID i443 WA, applies to 34xx >= ES3.0 */
  411. .text
  412. .align 3
  413. ENTRY(es3_sdrc_fix)
  414. ldr r4, sdrc_syscfg @ get config addr
  415. ldr r5, [r4] @ get value
  416. tst r5, #0x100 @ is part access blocked
  417. it eq
  418. biceq r5, r5, #0x100 @ clear bit if set
  419. str r5, [r4] @ write back change
  420. ldr r4, sdrc_mr_0 @ get config addr
  421. ldr r5, [r4] @ get value
  422. str r5, [r4] @ write back change
  423. ldr r4, sdrc_emr2_0 @ get config addr
  424. ldr r5, [r4] @ get value
  425. str r5, [r4] @ write back change
  426. ldr r4, sdrc_manual_0 @ get config addr
  427. mov r5, #0x2 @ autorefresh command
  428. str r5, [r4] @ kick off refreshes
  429. ldr r4, sdrc_mr_1 @ get config addr
  430. ldr r5, [r4] @ get value
  431. str r5, [r4] @ write back change
  432. ldr r4, sdrc_emr2_1 @ get config addr
  433. ldr r5, [r4] @ get value
  434. str r5, [r4] @ write back change
  435. ldr r4, sdrc_manual_1 @ get config addr
  436. mov r5, #0x2 @ autorefresh command
  437. str r5, [r4] @ kick off refreshes
  438. bx lr
  439. .align
  440. sdrc_syscfg:
  441. .word SDRC_SYSCONFIG_P
  442. sdrc_mr_0:
  443. .word SDRC_MR_0_P
  444. sdrc_emr2_0:
  445. .word SDRC_EMR2_0_P
  446. sdrc_manual_0:
  447. .word SDRC_MANUAL_0_P
  448. sdrc_mr_1:
  449. .word SDRC_MR_1_P
  450. sdrc_emr2_1:
  451. .word SDRC_EMR2_1_P
  452. sdrc_manual_1:
  453. .word SDRC_MANUAL_1_P
  454. ENDPROC(es3_sdrc_fix)
  455. ENTRY(es3_sdrc_fix_sz)
  456. .word . - es3_sdrc_fix
  457. /*
  458. * This function implements the erratum ID i581 WA:
  459. * SDRC state restore before accessing the SDRAM
  460. *
  461. * Only used at return from non-OFF mode. For OFF
  462. * mode the ROM code configures the SDRC and
  463. * the DPLL before calling the restore code directly
  464. * from DDR.
  465. */
  466. /* Make sure SDRC accesses are ok */
  467. wait_sdrc_ok:
  468. /* DPLL3 must be locked before accessing the SDRC. Maybe the HW ensures this */
  469. ldr r4, cm_idlest_ckgen
  470. wait_dpll3_lock:
  471. ldr r5, [r4]
  472. tst r5, #1
  473. beq wait_dpll3_lock
  474. ldr r4, cm_idlest1_core
  475. wait_sdrc_ready:
  476. ldr r5, [r4]
  477. tst r5, #0x2
  478. bne wait_sdrc_ready
  479. /* allow DLL powerdown upon hw idle req */
  480. ldr r4, sdrc_power
  481. ldr r5, [r4]
  482. bic r5, r5, #0x40
  483. str r5, [r4]
  484. /*
  485. * PC-relative stores lead to undefined behaviour in Thumb-2: use a r7 as a
  486. * base instead.
  487. * Be careful not to clobber r7 when maintaing this code.
  488. */
  489. is_dll_in_lock_mode:
  490. /* Is dll in lock mode? */
  491. ldr r4, sdrc_dlla_ctrl
  492. ldr r5, [r4]
  493. tst r5, #0x4
  494. bxne lr @ Return if locked
  495. /* wait till dll locks */
  496. adr r7, kick_counter
  497. wait_dll_lock_timed:
  498. ldr r4, wait_dll_lock_counter
  499. add r4, r4, #1
  500. str r4, [r7, #wait_dll_lock_counter - kick_counter]
  501. ldr r4, sdrc_dlla_status
  502. /* Wait 20uS for lock */
  503. mov r6, #8
  504. wait_dll_lock:
  505. subs r6, r6, #0x1
  506. beq kick_dll
  507. ldr r5, [r4]
  508. and r5, r5, #0x4
  509. cmp r5, #0x4
  510. bne wait_dll_lock
  511. bx lr @ Return when locked
  512. /* disable/reenable DLL if not locked */
  513. kick_dll:
  514. ldr r4, sdrc_dlla_ctrl
  515. ldr r5, [r4]
  516. mov r6, r5
  517. bic r6, #(1<<3) @ disable dll
  518. str r6, [r4]
  519. dsb
  520. orr r6, r6, #(1<<3) @ enable dll
  521. str r6, [r4]
  522. dsb
  523. ldr r4, kick_counter
  524. add r4, r4, #1
  525. str r4, [r7] @ kick_counter
  526. b wait_dll_lock_timed
  527. .align
  528. cm_idlest1_core:
  529. .word CM_IDLEST1_CORE_V
  530. cm_idlest_ckgen:
  531. .word CM_IDLEST_CKGEN_V
  532. sdrc_dlla_status:
  533. .word SDRC_DLLA_STATUS_V
  534. sdrc_dlla_ctrl:
  535. .word SDRC_DLLA_CTRL_V
  536. pm_prepwstst_core_p:
  537. .word PM_PREPWSTST_CORE_P
  538. pm_pwstctrl_mpu:
  539. .word PM_PWSTCTRL_MPU_P
  540. scratchpad_base:
  541. .word SCRATCHPAD_BASE_P
  542. sram_base:
  543. .word SRAM_BASE_P + 0x8000
  544. sdrc_power:
  545. .word SDRC_POWER_V
  546. control_stat:
  547. .word CONTROL_STAT
  548. control_mem_rta:
  549. .word CONTROL_MEM_RTA_CTRL
  550. kernel_flush:
  551. .word v7_flush_dcache_all
  552. l2dis_3630:
  553. .word 0
  554. /*
  555. * When exporting to userspace while the counters are in SRAM,
  556. * these 2 words need to be at the end to facilitate retrival!
  557. */
  558. kick_counter:
  559. .word 0
  560. wait_dll_lock_counter:
  561. .word 0
  562. ENDPROC(omap34xx_cpu_suspend)
  563. ENTRY(omap34xx_cpu_suspend_sz)
  564. .word . - omap34xx_cpu_suspend