sleep34xx.S 21 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721
  1. /*
  2. * linux/arch/arm/mach-omap2/sleep.S
  3. *
  4. * (C) Copyright 2007
  5. * Texas Instruments
  6. * Karthik Dasu <karthik-dp@ti.com>
  7. *
  8. * (C) Copyright 2004
  9. * Texas Instruments, <www.ti.com>
  10. * Richard Woodruff <r-woodruff2@ti.com>
  11. *
  12. * This program is free software; you can redistribute it and/or
  13. * modify it under the terms of the GNU General Public License as
  14. * published by the Free Software Foundation; either version 2 of
  15. * the License, or (at your option) any later version.
  16. *
  17. * This program is distributed in the hope that it will be useful,
  18. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  19. * MERCHANTABILITY or FITNESS FOR A PARTICULAR /PURPOSE. See the
  20. * GNU General Public License for more details.
  21. *
  22. * You should have received a copy of the GNU General Public License
  23. * along with this program; if not, write to the Free Software
  24. * Foundation, Inc., 59 Temple Place, Suite 330, Boston,
  25. * MA 02111-1307 USA
  26. */
  27. #include <linux/linkage.h>
  28. #include <asm/assembler.h>
  29. #include <mach/io.h>
  30. #include "cm.h"
  31. #include "prm.h"
  32. #include "sdrc.h"
  33. #include "control.h"
  34. #define SDRC_SCRATCHPAD_SEM_V 0xfa00291c
  35. #define PM_PREPWSTST_CORE_V OMAP34XX_PRM_REGADDR(CORE_MOD, \
  36. OMAP3430_PM_PREPWSTST)
  37. #define PM_PREPWSTST_CORE_P 0x48306AE8
  38. #define PM_PREPWSTST_MPU_V OMAP34XX_PRM_REGADDR(MPU_MOD, \
  39. OMAP3430_PM_PREPWSTST)
  40. #define PM_PWSTCTRL_MPU_P OMAP3430_PRM_BASE + MPU_MOD + OMAP2_PM_PWSTCTRL
  41. #define CM_IDLEST1_CORE_V OMAP34XX_CM_REGADDR(CORE_MOD, CM_IDLEST1)
  42. #define CM_IDLEST_CKGEN_V OMAP34XX_CM_REGADDR(PLL_MOD, CM_IDLEST)
  43. #define SRAM_BASE_P 0x40200000
  44. #define CONTROL_STAT 0x480022F0
  45. #define CONTROL_MEM_RTA_CTRL (OMAP343X_CTRL_BASE\
  46. + OMAP36XX_CONTROL_MEM_RTA_CTRL)
  47. #define SCRATCHPAD_MEM_OFFS 0x310 /* Move this as correct place is
  48. * available */
  49. #define SCRATCHPAD_BASE_P (OMAP343X_CTRL_BASE + OMAP343X_CONTROL_MEM_WKUP\
  50. + SCRATCHPAD_MEM_OFFS)
  51. #define SDRC_POWER_V OMAP34XX_SDRC_REGADDR(SDRC_POWER)
  52. #define SDRC_SYSCONFIG_P (OMAP343X_SDRC_BASE + SDRC_SYSCONFIG)
  53. #define SDRC_MR_0_P (OMAP343X_SDRC_BASE + SDRC_MR_0)
  54. #define SDRC_EMR2_0_P (OMAP343X_SDRC_BASE + SDRC_EMR2_0)
  55. #define SDRC_MANUAL_0_P (OMAP343X_SDRC_BASE + SDRC_MANUAL_0)
  56. #define SDRC_MR_1_P (OMAP343X_SDRC_BASE + SDRC_MR_1)
  57. #define SDRC_EMR2_1_P (OMAP343X_SDRC_BASE + SDRC_EMR2_1)
  58. #define SDRC_MANUAL_1_P (OMAP343X_SDRC_BASE + SDRC_MANUAL_1)
  59. #define SDRC_DLLA_STATUS_V OMAP34XX_SDRC_REGADDR(SDRC_DLLA_STATUS)
  60. #define SDRC_DLLA_CTRL_V OMAP34XX_SDRC_REGADDR(SDRC_DLLA_CTRL)
  61. .text
  62. /* Function to acquire the semaphore in scratchpad */
  63. ENTRY(lock_scratchpad_sem)
  64. stmfd sp!, {lr} @ save registers on stack
  65. wait_sem:
  66. mov r0,#1
  67. ldr r1, sdrc_scratchpad_sem
  68. wait_loop:
  69. ldr r2, [r1] @ load the lock value
  70. cmp r2, r0 @ is the lock free ?
  71. beq wait_loop @ not free...
  72. swp r2, r0, [r1] @ semaphore free so lock it and proceed
  73. cmp r2, r0 @ did we succeed ?
  74. beq wait_sem @ no - try again
  75. ldmfd sp!, {pc} @ restore regs and return
  76. sdrc_scratchpad_sem:
  77. .word SDRC_SCRATCHPAD_SEM_V
  78. ENTRY(lock_scratchpad_sem_sz)
  79. .word . - lock_scratchpad_sem
  80. .text
  81. /* Function to release the scratchpad semaphore */
  82. ENTRY(unlock_scratchpad_sem)
  83. stmfd sp!, {lr} @ save registers on stack
  84. ldr r3, sdrc_scratchpad_sem
  85. mov r2,#0
  86. str r2,[r3]
  87. ldmfd sp!, {pc} @ restore regs and return
  88. ENTRY(unlock_scratchpad_sem_sz)
  89. .word . - unlock_scratchpad_sem
  90. .text
  91. /* Function call to get the restore pointer for resume from OFF */
  92. ENTRY(get_restore_pointer)
  93. stmfd sp!, {lr} @ save registers on stack
  94. adr r0, restore
  95. ldmfd sp!, {pc} @ restore regs and return
  96. ENTRY(get_restore_pointer_sz)
  97. .word . - get_restore_pointer
  98. .text
  99. /* Function call to get the restore pointer for 3630 resume from OFF */
  100. ENTRY(get_omap3630_restore_pointer)
  101. stmfd sp!, {lr} @ save registers on stack
  102. adr r0, restore_3630
  103. ldmfd sp!, {pc} @ restore regs and return
  104. ENTRY(get_omap3630_restore_pointer_sz)
  105. .word . - get_omap3630_restore_pointer
  106. .text
  107. /*
  108. * L2 cache needs to be toggled for stable OFF mode functionality on 3630.
  109. * This function sets up a fflag that will allow for this toggling to take
  110. * place on 3630. Hopefully some version in the future maynot need this
  111. */
  112. ENTRY(enable_omap3630_toggle_l2_on_restore)
  113. stmfd sp!, {lr} @ save registers on stack
  114. /* Setup so that we will disable and enable l2 */
  115. mov r1, #0x1
  116. str r1, l2dis_3630
  117. ldmfd sp!, {pc} @ restore regs and return
  118. .text
  119. /* Function call to get the restore pointer for for ES3 to resume from OFF */
  120. ENTRY(get_es3_restore_pointer)
  121. stmfd sp!, {lr} @ save registers on stack
  122. adr r0, restore_es3
  123. ldmfd sp!, {pc} @ restore regs and return
  124. ENTRY(get_es3_restore_pointer_sz)
  125. .word . - get_es3_restore_pointer
  126. ENTRY(es3_sdrc_fix)
  127. ldr r4, sdrc_syscfg @ get config addr
  128. ldr r5, [r4] @ get value
  129. tst r5, #0x100 @ is part access blocked
  130. it eq
  131. biceq r5, r5, #0x100 @ clear bit if set
  132. str r5, [r4] @ write back change
  133. ldr r4, sdrc_mr_0 @ get config addr
  134. ldr r5, [r4] @ get value
  135. str r5, [r4] @ write back change
  136. ldr r4, sdrc_emr2_0 @ get config addr
  137. ldr r5, [r4] @ get value
  138. str r5, [r4] @ write back change
  139. ldr r4, sdrc_manual_0 @ get config addr
  140. mov r5, #0x2 @ autorefresh command
  141. str r5, [r4] @ kick off refreshes
  142. ldr r4, sdrc_mr_1 @ get config addr
  143. ldr r5, [r4] @ get value
  144. str r5, [r4] @ write back change
  145. ldr r4, sdrc_emr2_1 @ get config addr
  146. ldr r5, [r4] @ get value
  147. str r5, [r4] @ write back change
  148. ldr r4, sdrc_manual_1 @ get config addr
  149. mov r5, #0x2 @ autorefresh command
  150. str r5, [r4] @ kick off refreshes
  151. bx lr
  152. sdrc_syscfg:
  153. .word SDRC_SYSCONFIG_P
  154. sdrc_mr_0:
  155. .word SDRC_MR_0_P
  156. sdrc_emr2_0:
  157. .word SDRC_EMR2_0_P
  158. sdrc_manual_0:
  159. .word SDRC_MANUAL_0_P
  160. sdrc_mr_1:
  161. .word SDRC_MR_1_P
  162. sdrc_emr2_1:
  163. .word SDRC_EMR2_1_P
  164. sdrc_manual_1:
  165. .word SDRC_MANUAL_1_P
  166. ENTRY(es3_sdrc_fix_sz)
  167. .word . - es3_sdrc_fix
  168. /* Function to call rom code to save secure ram context */
  169. ENTRY(save_secure_ram_context)
  170. stmfd sp!, {r1-r12, lr} @ save registers on stack
  171. save_secure_ram_debug:
  172. /* b save_secure_ram_debug */ @ enable to debug save code
  173. adr r3, api_params @ r3 points to parameters
  174. str r0, [r3,#0x4] @ r0 has sdram address
  175. ldr r12, high_mask
  176. and r3, r3, r12
  177. ldr r12, sram_phy_addr_mask
  178. orr r3, r3, r12
  179. mov r0, #25 @ set service ID for PPA
  180. mov r12, r0 @ copy secure service ID in r12
  181. mov r1, #0 @ set task id for ROM code in r1
  182. mov r2, #4 @ set some flags in r2, r6
  183. mov r6, #0xff
  184. mcr p15, 0, r0, c7, c10, 4 @ data write barrier
  185. mcr p15, 0, r0, c7, c10, 5 @ data memory barrier
  186. .word 0xE1600071 @ call SMI monitor (smi #1)
  187. nop
  188. nop
  189. nop
  190. nop
  191. ldmfd sp!, {r1-r12, pc}
  192. sram_phy_addr_mask:
  193. .word SRAM_BASE_P
  194. high_mask:
  195. .word 0xffff
  196. api_params:
  197. .word 0x4, 0x0, 0x0, 0x1, 0x1
  198. ENTRY(save_secure_ram_context_sz)
  199. .word . - save_secure_ram_context
  200. /*
  201. * Forces OMAP into idle state
  202. *
  203. * omap34xx_suspend() - This bit of code just executes the WFI
  204. * for normal idles.
  205. *
  206. * Note: This code get's copied to internal SRAM at boot. When the OMAP
  207. * wakes up it continues execution at the point it went to sleep.
  208. */
  209. ENTRY(omap34xx_cpu_suspend)
  210. stmfd sp!, {r0-r12, lr} @ save registers on stack
  211. loop:
  212. /*b loop*/ @Enable to debug by stepping through code
  213. /* r0 contains restore pointer in sdram */
  214. /* r1 contains information about saving context */
  215. ldr r4, sdrc_power @ read the SDRC_POWER register
  216. ldr r5, [r4] @ read the contents of SDRC_POWER
  217. orr r5, r5, #0x40 @ enable self refresh on idle req
  218. str r5, [r4] @ write back to SDRC_POWER register
  219. cmp r1, #0x0
  220. /* If context save is required, do that and execute wfi */
  221. bne save_context_wfi
  222. /* Data memory barrier and Data sync barrier */
  223. mov r1, #0
  224. mcr p15, 0, r1, c7, c10, 4
  225. mcr p15, 0, r1, c7, c10, 5
  226. wfi @ wait for interrupt
  227. nop
  228. nop
  229. nop
  230. nop
  231. nop
  232. nop
  233. nop
  234. nop
  235. nop
  236. nop
  237. bl wait_sdrc_ok
  238. ldmfd sp!, {r0-r12, pc} @ restore regs and return
  239. restore_es3:
  240. /*b restore_es3*/ @ Enable to debug restore code
  241. ldr r5, pm_prepwstst_core_p
  242. ldr r4, [r5]
  243. and r4, r4, #0x3
  244. cmp r4, #0x0 @ Check if previous power state of CORE is OFF
  245. bne restore
  246. adr r0, es3_sdrc_fix
  247. ldr r1, sram_base
  248. ldr r2, es3_sdrc_fix_sz
  249. mov r2, r2, ror #2
  250. copy_to_sram:
  251. ldmia r0!, {r3} @ val = *src
  252. stmia r1!, {r3} @ *dst = val
  253. subs r2, r2, #0x1 @ num_words--
  254. bne copy_to_sram
  255. ldr r1, sram_base
  256. blx r1
  257. b restore
  258. restore_3630:
  259. /*b restore_es3630*/ @ Enable to debug restore code
  260. ldr r1, pm_prepwstst_core_p
  261. ldr r2, [r1]
  262. and r2, r2, #0x3
  263. cmp r2, #0x0 @ Check if previous power state of CORE is OFF
  264. bne restore
  265. /* Disable RTA before giving control */
  266. ldr r1, control_mem_rta
  267. mov r2, #OMAP36XX_RTA_DISABLE
  268. str r2, [r1]
  269. /* Fall thru for the remaining logic */
  270. restore:
  271. /* b restore*/ @ Enable to debug restore code
  272. /* Check what was the reason for mpu reset and store the reason in r9*/
  273. /* 1 - Only L1 and logic lost */
  274. /* 2 - Only L2 lost - In this case, we wont be here */
  275. /* 3 - Both L1 and L2 lost */
  276. ldr r1, pm_pwstctrl_mpu
  277. ldr r2, [r1]
  278. and r2, r2, #0x3
  279. cmp r2, #0x0 @ Check if target power state was OFF or RET
  280. moveq r9, #0x3 @ MPU OFF => L1 and L2 lost
  281. movne r9, #0x1 @ Only L1 and L2 lost => avoid L2 invalidation
  282. bne logic_l1_restore
  283. ldr r0, l2dis_3630
  284. cmp r0, #0x1 @ should we disable L2 on 3630?
  285. bne skipl2dis
  286. mrc p15, 0, r0, c1, c0, 1
  287. bic r0, r0, #2 @ disable L2 cache
  288. mcr p15, 0, r0, c1, c0, 1
  289. skipl2dis:
  290. ldr r0, control_stat
  291. ldr r1, [r0]
  292. and r1, #0x700
  293. cmp r1, #0x300
  294. beq l2_inv_gp
  295. mov r0, #40 @ set service ID for PPA
  296. mov r12, r0 @ copy secure Service ID in r12
  297. mov r1, #0 @ set task id for ROM code in r1
  298. mov r2, #4 @ set some flags in r2, r6
  299. mov r6, #0xff
  300. adr r3, l2_inv_api_params @ r3 points to dummy parameters
  301. mcr p15, 0, r0, c7, c10, 4 @ data write barrier
  302. mcr p15, 0, r0, c7, c10, 5 @ data memory barrier
  303. .word 0xE1600071 @ call SMI monitor (smi #1)
  304. /* Write to Aux control register to set some bits */
  305. mov r0, #42 @ set service ID for PPA
  306. mov r12, r0 @ copy secure Service ID in r12
  307. mov r1, #0 @ set task id for ROM code in r1
  308. mov r2, #4 @ set some flags in r2, r6
  309. mov r6, #0xff
  310. ldr r4, scratchpad_base
  311. ldr r3, [r4, #0xBC] @ r3 points to parameters
  312. mcr p15, 0, r0, c7, c10, 4 @ data write barrier
  313. mcr p15, 0, r0, c7, c10, 5 @ data memory barrier
  314. .word 0xE1600071 @ call SMI monitor (smi #1)
  315. #ifdef CONFIG_OMAP3_L2_AUX_SECURE_SAVE_RESTORE
  316. /* Restore L2 aux control register */
  317. @ set service ID for PPA
  318. mov r0, #CONFIG_OMAP3_L2_AUX_SECURE_SERVICE_SET_ID
  319. mov r12, r0 @ copy service ID in r12
  320. mov r1, #0 @ set task ID for ROM code in r1
  321. mov r2, #4 @ set some flags in r2, r6
  322. mov r6, #0xff
  323. ldr r4, scratchpad_base
  324. ldr r3, [r4, #0xBC]
  325. adds r3, r3, #8 @ r3 points to parameters
  326. mcr p15, 0, r0, c7, c10, 4 @ data write barrier
  327. mcr p15, 0, r0, c7, c10, 5 @ data memory barrier
  328. .word 0xE1600071 @ call SMI monitor (smi #1)
  329. #endif
  330. b logic_l1_restore
  331. l2_inv_api_params:
  332. .word 0x1, 0x00
  333. l2_inv_gp:
  334. /* Execute smi to invalidate L2 cache */
  335. mov r12, #0x1 @ set up to invalide L2
  336. smi: .word 0xE1600070 @ Call SMI monitor (smieq)
  337. /* Write to Aux control register to set some bits */
  338. ldr r4, scratchpad_base
  339. ldr r3, [r4,#0xBC]
  340. ldr r0, [r3,#4]
  341. mov r12, #0x3
  342. .word 0xE1600070 @ Call SMI monitor (smieq)
  343. ldr r4, scratchpad_base
  344. ldr r3, [r4,#0xBC]
  345. ldr r0, [r3,#12]
  346. mov r12, #0x2
  347. .word 0xE1600070 @ Call SMI monitor (smieq)
  348. logic_l1_restore:
  349. ldr r1, l2dis_3630
  350. cmp r1, #0x1 @ Do we need to re-enable L2 on 3630?
  351. bne skipl2reen
  352. mrc p15, 0, r1, c1, c0, 1
  353. orr r1, r1, #2 @ re-enable L2 cache
  354. mcr p15, 0, r1, c1, c0, 1
  355. skipl2reen:
  356. mov r1, #0
  357. /* Invalidate all instruction caches to PoU
  358. * and flush branch target cache */
  359. mcr p15, 0, r1, c7, c5, 0
  360. ldr r4, scratchpad_base
  361. ldr r3, [r4,#0xBC]
  362. adds r3, r3, #16
  363. ldmia r3!, {r4-r6}
  364. mov sp, r4
  365. msr spsr_cxsf, r5
  366. mov lr, r6
  367. ldmia r3!, {r4-r9}
  368. /* Coprocessor access Control Register */
  369. mcr p15, 0, r4, c1, c0, 2
  370. /* TTBR0 */
  371. MCR p15, 0, r5, c2, c0, 0
  372. /* TTBR1 */
  373. MCR p15, 0, r6, c2, c0, 1
  374. /* Translation table base control register */
  375. MCR p15, 0, r7, c2, c0, 2
  376. /*domain access Control Register */
  377. MCR p15, 0, r8, c3, c0, 0
  378. /* data fault status Register */
  379. MCR p15, 0, r9, c5, c0, 0
  380. ldmia r3!,{r4-r8}
  381. /* instruction fault status Register */
  382. MCR p15, 0, r4, c5, c0, 1
  383. /*Data Auxiliary Fault Status Register */
  384. MCR p15, 0, r5, c5, c1, 0
  385. /*Instruction Auxiliary Fault Status Register*/
  386. MCR p15, 0, r6, c5, c1, 1
  387. /*Data Fault Address Register */
  388. MCR p15, 0, r7, c6, c0, 0
  389. /*Instruction Fault Address Register*/
  390. MCR p15, 0, r8, c6, c0, 2
  391. ldmia r3!,{r4-r7}
  392. /* user r/w thread and process ID */
  393. MCR p15, 0, r4, c13, c0, 2
  394. /* user ro thread and process ID */
  395. MCR p15, 0, r5, c13, c0, 3
  396. /*Privileged only thread and process ID */
  397. MCR p15, 0, r6, c13, c0, 4
  398. /* cache size selection */
  399. MCR p15, 2, r7, c0, c0, 0
  400. ldmia r3!,{r4-r8}
  401. /* Data TLB lockdown registers */
  402. MCR p15, 0, r4, c10, c0, 0
  403. /* Instruction TLB lockdown registers */
  404. MCR p15, 0, r5, c10, c0, 1
  405. /* Secure or Nonsecure Vector Base Address */
  406. MCR p15, 0, r6, c12, c0, 0
  407. /* FCSE PID */
  408. MCR p15, 0, r7, c13, c0, 0
  409. /* Context PID */
  410. MCR p15, 0, r8, c13, c0, 1
  411. ldmia r3!,{r4-r5}
  412. /* primary memory remap register */
  413. MCR p15, 0, r4, c10, c2, 0
  414. /*normal memory remap register */
  415. MCR p15, 0, r5, c10, c2, 1
  416. /* Restore cpsr */
  417. ldmia r3!,{r4} /*load CPSR from SDRAM*/
  418. msr cpsr, r4 /*store cpsr */
  419. /* Enabling MMU here */
  420. mrc p15, 0, r7, c2, c0, 2 /* Read TTBRControl */
  421. /* Extract N (0:2) bits and decide whether to use TTBR0 or TTBR1*/
  422. and r7, #0x7
  423. cmp r7, #0x0
  424. beq usettbr0
  425. ttbr_error:
  426. /* More work needs to be done to support N[0:2] value other than 0
  427. * So looping here so that the error can be detected
  428. */
  429. b ttbr_error
  430. usettbr0:
  431. mrc p15, 0, r2, c2, c0, 0
  432. ldr r5, ttbrbit_mask
  433. and r2, r5
  434. mov r4, pc
  435. ldr r5, table_index_mask
  436. and r4, r5 /* r4 = 31 to 20 bits of pc */
  437. /* Extract the value to be written to table entry */
  438. ldr r1, table_entry
  439. add r1, r1, r4 /* r1 has value to be written to table entry*/
  440. /* Getting the address of table entry to modify */
  441. lsr r4, #18
  442. add r2, r4 /* r2 has the location which needs to be modified */
  443. /* Storing previous entry of location being modified */
  444. ldr r5, scratchpad_base
  445. ldr r4, [r2]
  446. str r4, [r5, #0xC0]
  447. /* Modify the table entry */
  448. str r1, [r2]
  449. /* Storing address of entry being modified
  450. * - will be restored after enabling MMU */
  451. ldr r5, scratchpad_base
  452. str r2, [r5, #0xC4]
  453. mov r0, #0
  454. mcr p15, 0, r0, c7, c5, 4 @ Flush prefetch buffer
  455. mcr p15, 0, r0, c7, c5, 6 @ Invalidate branch predictor array
  456. mcr p15, 0, r0, c8, c5, 0 @ Invalidate instruction TLB
  457. mcr p15, 0, r0, c8, c6, 0 @ Invalidate data TLB
  458. /* Restore control register but dont enable caches here*/
  459. /* Caches will be enabled after restoring MMU table entry */
  460. ldmia r3!, {r4}
  461. /* Store previous value of control register in scratchpad */
  462. str r4, [r5, #0xC8]
  463. ldr r2, cache_pred_disable_mask
  464. and r4, r2
  465. mcr p15, 0, r4, c1, c0, 0
  466. ldmfd sp!, {r0-r12, pc} @ restore regs and return
  467. save_context_wfi:
  468. /*b save_context_wfi*/ @ enable to debug save code
  469. mov r8, r0 /* Store SDRAM address in r8 */
  470. mrc p15, 0, r5, c1, c0, 1 @ Read Auxiliary Control Register
  471. mov r4, #0x1 @ Number of parameters for restore call
  472. stmia r8!, {r4-r5} @ Push parameters for restore call
  473. mrc p15, 1, r5, c9, c0, 2 @ Read L2 AUX ctrl register
  474. stmia r8!, {r4-r5} @ Push parameters for restore call
  475. /* Check what that target sleep state is:stored in r1*/
  476. /* 1 - Only L1 and logic lost */
  477. /* 2 - Only L2 lost */
  478. /* 3 - Both L1 and L2 lost */
  479. cmp r1, #0x2 /* Only L2 lost */
  480. beq clean_l2
  481. cmp r1, #0x1 /* L2 retained */
  482. /* r9 stores whether to clean L2 or not*/
  483. moveq r9, #0x0 /* Dont Clean L2 */
  484. movne r9, #0x1 /* Clean L2 */
  485. l1_logic_lost:
  486. /* Store sp and spsr to SDRAM */
  487. mov r4, sp
  488. mrs r5, spsr
  489. mov r6, lr
  490. stmia r8!, {r4-r6}
  491. /* Save all ARM registers */
  492. /* Coprocessor access control register */
  493. mrc p15, 0, r6, c1, c0, 2
  494. stmia r8!, {r6}
  495. /* TTBR0, TTBR1 and Translation table base control */
  496. mrc p15, 0, r4, c2, c0, 0
  497. mrc p15, 0, r5, c2, c0, 1
  498. mrc p15, 0, r6, c2, c0, 2
  499. stmia r8!, {r4-r6}
  500. /* Domain access control register, data fault status register,
  501. and instruction fault status register */
  502. mrc p15, 0, r4, c3, c0, 0
  503. mrc p15, 0, r5, c5, c0, 0
  504. mrc p15, 0, r6, c5, c0, 1
  505. stmia r8!, {r4-r6}
  506. /* Data aux fault status register, instruction aux fault status,
  507. datat fault address register and instruction fault address register*/
  508. mrc p15, 0, r4, c5, c1, 0
  509. mrc p15, 0, r5, c5, c1, 1
  510. mrc p15, 0, r6, c6, c0, 0
  511. mrc p15, 0, r7, c6, c0, 2
  512. stmia r8!, {r4-r7}
  513. /* user r/w thread and process ID, user r/o thread and process ID,
  514. priv only thread and process ID, cache size selection */
  515. mrc p15, 0, r4, c13, c0, 2
  516. mrc p15, 0, r5, c13, c0, 3
  517. mrc p15, 0, r6, c13, c0, 4
  518. mrc p15, 2, r7, c0, c0, 0
  519. stmia r8!, {r4-r7}
  520. /* Data TLB lockdown, instruction TLB lockdown registers */
  521. mrc p15, 0, r5, c10, c0, 0
  522. mrc p15, 0, r6, c10, c0, 1
  523. stmia r8!, {r5-r6}
  524. /* Secure or non secure vector base address, FCSE PID, Context PID*/
  525. mrc p15, 0, r4, c12, c0, 0
  526. mrc p15, 0, r5, c13, c0, 0
  527. mrc p15, 0, r6, c13, c0, 1
  528. stmia r8!, {r4-r6}
  529. /* Primary remap, normal remap registers */
  530. mrc p15, 0, r4, c10, c2, 0
  531. mrc p15, 0, r5, c10, c2, 1
  532. stmia r8!,{r4-r5}
  533. /* Store current cpsr*/
  534. mrs r2, cpsr
  535. stmia r8!, {r2}
  536. mrc p15, 0, r4, c1, c0, 0
  537. /* save control register */
  538. stmia r8!, {r4}
  539. clean_caches:
  540. /* Clean Data or unified cache to POU*/
  541. /* How to invalidate only L1 cache???? - #FIX_ME# */
  542. /* mcr p15, 0, r11, c7, c11, 1 */
  543. cmp r9, #1 /* Check whether L2 inval is required or not*/
  544. bne skip_l2_inval
  545. clean_l2:
  546. /*
  547. * Jump out to kernel flush routine
  548. * - reuse that code is better
  549. * - it executes in a cached space so is faster than refetch per-block
  550. * - should be faster and will change with kernel
  551. * - 'might' have to copy address, load and jump to it
  552. * - lr is used since we are running in SRAM currently.
  553. */
  554. ldr r1, kernel_flush
  555. mov lr, pc
  556. bx r1
  557. skip_l2_inval:
  558. /* Data memory barrier and Data sync barrier */
  559. mov r1, #0
  560. mcr p15, 0, r1, c7, c10, 4
  561. mcr p15, 0, r1, c7, c10, 5
  562. wfi @ wait for interrupt
  563. nop
  564. nop
  565. nop
  566. nop
  567. nop
  568. nop
  569. nop
  570. nop
  571. nop
  572. nop
  573. bl wait_sdrc_ok
  574. /* restore regs and return */
  575. ldmfd sp!, {r0-r12, pc}
  576. /* Make sure SDRC accesses are ok */
  577. wait_sdrc_ok:
  578. /* DPLL3 must be locked before accessing the SDRC. Maybe the HW ensures this. */
  579. ldr r4, cm_idlest_ckgen
  580. wait_dpll3_lock:
  581. ldr r5, [r4]
  582. tst r5, #1
  583. beq wait_dpll3_lock
  584. ldr r4, cm_idlest1_core
  585. wait_sdrc_ready:
  586. ldr r5, [r4]
  587. tst r5, #0x2
  588. bne wait_sdrc_ready
  589. /* allow DLL powerdown upon hw idle req */
  590. ldr r4, sdrc_power
  591. ldr r5, [r4]
  592. bic r5, r5, #0x40
  593. str r5, [r4]
  594. is_dll_in_lock_mode:
  595. /* Is dll in lock mode? */
  596. ldr r4, sdrc_dlla_ctrl
  597. ldr r5, [r4]
  598. tst r5, #0x4
  599. bxne lr
  600. /* wait till dll locks */
  601. wait_dll_lock_timed:
  602. ldr r4, wait_dll_lock_counter
  603. add r4, r4, #1
  604. str r4, wait_dll_lock_counter
  605. ldr r4, sdrc_dlla_status
  606. mov r6, #8 /* Wait 20uS for lock */
  607. wait_dll_lock:
  608. subs r6, r6, #0x1
  609. beq kick_dll
  610. ldr r5, [r4]
  611. and r5, r5, #0x4
  612. cmp r5, #0x4
  613. bne wait_dll_lock
  614. bx lr
  615. /* disable/reenable DLL if not locked */
  616. kick_dll:
  617. ldr r4, sdrc_dlla_ctrl
  618. ldr r5, [r4]
  619. mov r6, r5
  620. bic r6, #(1<<3) /* disable dll */
  621. str r6, [r4]
  622. dsb
  623. orr r6, r6, #(1<<3) /* enable dll */
  624. str r6, [r4]
  625. dsb
  626. ldr r4, kick_counter
  627. add r4, r4, #1
  628. str r4, kick_counter
  629. b wait_dll_lock_timed
  630. cm_idlest1_core:
  631. .word CM_IDLEST1_CORE_V
  632. cm_idlest_ckgen:
  633. .word CM_IDLEST_CKGEN_V
  634. sdrc_dlla_status:
  635. .word SDRC_DLLA_STATUS_V
  636. sdrc_dlla_ctrl:
  637. .word SDRC_DLLA_CTRL_V
  638. pm_prepwstst_core:
  639. .word PM_PREPWSTST_CORE_V
  640. pm_prepwstst_core_p:
  641. .word PM_PREPWSTST_CORE_P
  642. pm_prepwstst_mpu:
  643. .word PM_PREPWSTST_MPU_V
  644. pm_pwstctrl_mpu:
  645. .word PM_PWSTCTRL_MPU_P
  646. scratchpad_base:
  647. .word SCRATCHPAD_BASE_P
  648. sram_base:
  649. .word SRAM_BASE_P + 0x8000
  650. sdrc_power:
  651. .word SDRC_POWER_V
  652. clk_stabilize_delay:
  653. .word 0x000001FF
  654. assoc_mask:
  655. .word 0x3ff
  656. numset_mask:
  657. .word 0x7fff
  658. ttbrbit_mask:
  659. .word 0xFFFFC000
  660. table_index_mask:
  661. .word 0xFFF00000
  662. table_entry:
  663. .word 0x00000C02
  664. cache_pred_disable_mask:
  665. .word 0xFFFFE7FB
  666. control_stat:
  667. .word CONTROL_STAT
  668. control_mem_rta:
  669. .word CONTROL_MEM_RTA_CTRL
  670. kernel_flush:
  671. .word v7_flush_dcache_all
  672. l2dis_3630:
  673. .word 0
  674. /*
  675. * When exporting to userspace while the counters are in SRAM,
  676. * these 2 words need to be at the end to facilitate retrival!
  677. */
  678. kick_counter:
  679. .word 0
  680. wait_dll_lock_counter:
  681. .word 0
  682. ENTRY(omap34xx_cpu_suspend_sz)
  683. .word . - omap34xx_cpu_suspend