sleep34xx.S 17 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612
  1. /*
  2. * linux/arch/arm/mach-omap2/sleep.S
  3. *
  4. * (C) Copyright 2007
  5. * Texas Instruments
  6. * Karthik Dasu <karthik-dp@ti.com>
  7. *
  8. * (C) Copyright 2004
  9. * Texas Instruments, <www.ti.com>
  10. * Richard Woodruff <r-woodruff2@ti.com>
  11. *
  12. * This program is free software; you can redistribute it and/or
  13. * modify it under the terms of the GNU General Public License as
  14. * published by the Free Software Foundation; either version 2 of
  15. * the License, or (at your option) any later version.
  16. *
  17. * This program is distributed in the hope that it will be useful,
  18. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  19. * MERCHANTABILITY or FITNESS FOR A PARTICULAR /PURPOSE. See the
  20. * GNU General Public License for more details.
  21. *
  22. * You should have received a copy of the GNU General Public License
  23. * along with this program; if not, write to the Free Software
  24. * Foundation, Inc., 59 Temple Place, Suite 330, Boston,
  25. * MA 02111-1307 USA
  26. */
  27. #include <linux/linkage.h>
  28. #include <asm/assembler.h>
  29. #include <mach/io.h>
  30. #include <plat/control.h>
  31. #include "cm.h"
  32. #include "prm.h"
  33. #include "sdrc.h"
  34. #define PM_PREPWSTST_CORE_V OMAP34XX_PRM_REGADDR(CORE_MOD, \
  35. OMAP3430_PM_PREPWSTST)
  36. #define PM_PREPWSTST_CORE_P 0x48306AE8
  37. #define PM_PREPWSTST_MPU_V OMAP34XX_PRM_REGADDR(MPU_MOD, \
  38. OMAP3430_PM_PREPWSTST)
  39. #define PM_PWSTCTRL_MPU_P OMAP3430_PRM_BASE + MPU_MOD + PM_PWSTCTRL
  40. #define CM_IDLEST1_CORE_V OMAP34XX_CM_REGADDR(CORE_MOD, CM_IDLEST1)
  41. #define SRAM_BASE_P 0x40200000
  42. #define CONTROL_STAT 0x480022F0
  43. #define SCRATCHPAD_MEM_OFFS 0x310 /* Move this as correct place is
  44. * available */
  45. #define SCRATCHPAD_BASE_P (OMAP343X_CTRL_BASE + OMAP343X_CONTROL_MEM_WKUP\
  46. + SCRATCHPAD_MEM_OFFS)
  47. #define SDRC_POWER_V OMAP34XX_SDRC_REGADDR(SDRC_POWER)
  48. #define SDRC_SYSCONFIG_P (OMAP343X_SDRC_BASE + SDRC_SYSCONFIG)
  49. #define SDRC_MR_0_P (OMAP343X_SDRC_BASE + SDRC_MR_0)
  50. #define SDRC_EMR2_0_P (OMAP343X_SDRC_BASE + SDRC_EMR2_0)
  51. #define SDRC_MANUAL_0_P (OMAP343X_SDRC_BASE + SDRC_MANUAL_0)
  52. #define SDRC_MR_1_P (OMAP343X_SDRC_BASE + SDRC_MR_1)
  53. #define SDRC_EMR2_1_P (OMAP343X_SDRC_BASE + SDRC_EMR2_1)
  54. #define SDRC_MANUAL_1_P (OMAP343X_SDRC_BASE + SDRC_MANUAL_1)
  55. #define SDRC_DLLA_STATUS_V OMAP34XX_SDRC_REGADDR(SDRC_DLLA_STATUS)
  56. #define SDRC_DLLA_CTRL_V OMAP34XX_SDRC_REGADDR(SDRC_DLLA_CTRL)
  57. .text
  58. /* Function call to get the restore pointer for resume from OFF */
  59. ENTRY(get_restore_pointer)
  60. stmfd sp!, {lr} @ save registers on stack
  61. adr r0, restore
  62. ldmfd sp!, {pc} @ restore regs and return
  63. ENTRY(get_restore_pointer_sz)
  64. .word . - get_restore_pointer
  65. .text
  66. /* Function call to get the restore pointer for for ES3 to resume from OFF */
  67. ENTRY(get_es3_restore_pointer)
  68. stmfd sp!, {lr} @ save registers on stack
  69. adr r0, restore_es3
  70. ldmfd sp!, {pc} @ restore regs and return
  71. ENTRY(get_es3_restore_pointer_sz)
  72. .word . - get_es3_restore_pointer
  73. ENTRY(es3_sdrc_fix)
  74. ldr r4, sdrc_syscfg @ get config addr
  75. ldr r5, [r4] @ get value
  76. tst r5, #0x100 @ is part access blocked
  77. it eq
  78. biceq r5, r5, #0x100 @ clear bit if set
  79. str r5, [r4] @ write back change
  80. ldr r4, sdrc_mr_0 @ get config addr
  81. ldr r5, [r4] @ get value
  82. str r5, [r4] @ write back change
  83. ldr r4, sdrc_emr2_0 @ get config addr
  84. ldr r5, [r4] @ get value
  85. str r5, [r4] @ write back change
  86. ldr r4, sdrc_manual_0 @ get config addr
  87. mov r5, #0x2 @ autorefresh command
  88. str r5, [r4] @ kick off refreshes
  89. ldr r4, sdrc_mr_1 @ get config addr
  90. ldr r5, [r4] @ get value
  91. str r5, [r4] @ write back change
  92. ldr r4, sdrc_emr2_1 @ get config addr
  93. ldr r5, [r4] @ get value
  94. str r5, [r4] @ write back change
  95. ldr r4, sdrc_manual_1 @ get config addr
  96. mov r5, #0x2 @ autorefresh command
  97. str r5, [r4] @ kick off refreshes
  98. bx lr
  99. sdrc_syscfg:
  100. .word SDRC_SYSCONFIG_P
  101. sdrc_mr_0:
  102. .word SDRC_MR_0_P
  103. sdrc_emr2_0:
  104. .word SDRC_EMR2_0_P
  105. sdrc_manual_0:
  106. .word SDRC_MANUAL_0_P
  107. sdrc_mr_1:
  108. .word SDRC_MR_1_P
  109. sdrc_emr2_1:
  110. .word SDRC_EMR2_1_P
  111. sdrc_manual_1:
  112. .word SDRC_MANUAL_1_P
  113. ENTRY(es3_sdrc_fix_sz)
  114. .word . - es3_sdrc_fix
  115. /* Function to call rom code to save secure ram context */
  116. ENTRY(save_secure_ram_context)
  117. stmfd sp!, {r1-r12, lr} @ save registers on stack
  118. save_secure_ram_debug:
  119. /* b save_secure_ram_debug */ @ enable to debug save code
  120. adr r3, api_params @ r3 points to parameters
  121. str r0, [r3,#0x4] @ r0 has sdram address
  122. ldr r12, high_mask
  123. and r3, r3, r12
  124. ldr r12, sram_phy_addr_mask
  125. orr r3, r3, r12
  126. mov r0, #25 @ set service ID for PPA
  127. mov r12, r0 @ copy secure service ID in r12
  128. mov r1, #0 @ set task id for ROM code in r1
  129. mov r2, #4 @ set some flags in r2, r6
  130. mov r6, #0xff
  131. mcr p15, 0, r0, c7, c10, 4 @ data write barrier
  132. mcr p15, 0, r0, c7, c10, 5 @ data memory barrier
  133. .word 0xE1600071 @ call SMI monitor (smi #1)
  134. nop
  135. nop
  136. nop
  137. nop
  138. ldmfd sp!, {r1-r12, pc}
  139. sram_phy_addr_mask:
  140. .word SRAM_BASE_P
  141. high_mask:
  142. .word 0xffff
  143. api_params:
  144. .word 0x4, 0x0, 0x0, 0x1, 0x1
  145. ENTRY(save_secure_ram_context_sz)
  146. .word . - save_secure_ram_context
  147. /*
  148. * Forces OMAP into idle state
  149. *
  150. * omap34xx_suspend() - This bit of code just executes the WFI
  151. * for normal idles.
  152. *
  153. * Note: This code get's copied to internal SRAM at boot. When the OMAP
  154. * wakes up it continues execution at the point it went to sleep.
  155. */
  156. ENTRY(omap34xx_cpu_suspend)
  157. stmfd sp!, {r0-r12, lr} @ save registers on stack
  158. loop:
  159. /*b loop*/ @Enable to debug by stepping through code
  160. /* r0 contains restore pointer in sdram */
  161. /* r1 contains information about saving context */
  162. ldr r4, sdrc_power @ read the SDRC_POWER register
  163. ldr r5, [r4] @ read the contents of SDRC_POWER
  164. orr r5, r5, #0x40 @ enable self refresh on idle req
  165. str r5, [r4] @ write back to SDRC_POWER register
  166. cmp r1, #0x0
  167. /* If context save is required, do that and execute wfi */
  168. bne save_context_wfi
  169. /* Data memory barrier and Data sync barrier */
  170. mov r1, #0
  171. mcr p15, 0, r1, c7, c10, 4
  172. mcr p15, 0, r1, c7, c10, 5
  173. wfi @ wait for interrupt
  174. nop
  175. nop
  176. nop
  177. nop
  178. nop
  179. nop
  180. nop
  181. nop
  182. nop
  183. nop
  184. bl wait_sdrc_ok
  185. ldmfd sp!, {r0-r12, pc} @ restore regs and return
  186. restore_es3:
  187. /*b restore_es3*/ @ Enable to debug restore code
  188. ldr r5, pm_prepwstst_core_p
  189. ldr r4, [r5]
  190. and r4, r4, #0x3
  191. cmp r4, #0x0 @ Check if previous power state of CORE is OFF
  192. bne restore
  193. adr r0, es3_sdrc_fix
  194. ldr r1, sram_base
  195. ldr r2, es3_sdrc_fix_sz
  196. mov r2, r2, ror #2
  197. copy_to_sram:
  198. ldmia r0!, {r3} @ val = *src
  199. stmia r1!, {r3} @ *dst = val
  200. subs r2, r2, #0x1 @ num_words--
  201. bne copy_to_sram
  202. ldr r1, sram_base
  203. blx r1
  204. restore:
  205. /* b restore*/ @ Enable to debug restore code
  206. /* Check what was the reason for mpu reset and store the reason in r9*/
  207. /* 1 - Only L1 and logic lost */
  208. /* 2 - Only L2 lost - In this case, we wont be here */
  209. /* 3 - Both L1 and L2 lost */
  210. ldr r1, pm_pwstctrl_mpu
  211. ldr r2, [r1]
  212. and r2, r2, #0x3
  213. cmp r2, #0x0 @ Check if target power state was OFF or RET
  214. moveq r9, #0x3 @ MPU OFF => L1 and L2 lost
  215. movne r9, #0x1 @ Only L1 and L2 lost => avoid L2 invalidation
  216. bne logic_l1_restore
  217. ldr r0, control_stat
  218. ldr r1, [r0]
  219. and r1, #0x700
  220. cmp r1, #0x300
  221. beq l2_inv_gp
  222. mov r0, #40 @ set service ID for PPA
  223. mov r12, r0 @ copy secure Service ID in r12
  224. mov r1, #0 @ set task id for ROM code in r1
  225. mov r2, #4 @ set some flags in r2, r6
  226. mov r6, #0xff
  227. adr r3, l2_inv_api_params @ r3 points to dummy parameters
  228. mcr p15, 0, r0, c7, c10, 4 @ data write barrier
  229. mcr p15, 0, r0, c7, c10, 5 @ data memory barrier
  230. .word 0xE1600071 @ call SMI monitor (smi #1)
  231. /* Write to Aux control register to set some bits */
  232. mov r0, #42 @ set service ID for PPA
  233. mov r12, r0 @ copy secure Service ID in r12
  234. mov r1, #0 @ set task id for ROM code in r1
  235. mov r2, #4 @ set some flags in r2, r6
  236. mov r6, #0xff
  237. adr r3, write_aux_control_params @ r3 points to parameters
  238. mcr p15, 0, r0, c7, c10, 4 @ data write barrier
  239. mcr p15, 0, r0, c7, c10, 5 @ data memory barrier
  240. .word 0xE1600071 @ call SMI monitor (smi #1)
  241. b logic_l1_restore
  242. l2_inv_api_params:
  243. .word 0x1, 0x00
  244. write_aux_control_params:
  245. .word 0x1, 0x72
  246. l2_inv_gp:
  247. /* Execute smi to invalidate L2 cache */
  248. mov r12, #0x1 @ set up to invalide L2
  249. smi: .word 0xE1600070 @ Call SMI monitor (smieq)
  250. /* Write to Aux control register to set some bits */
  251. mov r0, #0x72
  252. mov r12, #0x3
  253. .word 0xE1600070 @ Call SMI monitor (smieq)
  254. logic_l1_restore:
  255. mov r1, #0
  256. /* Invalidate all instruction caches to PoU
  257. * and flush branch target cache */
  258. mcr p15, 0, r1, c7, c5, 0
  259. ldr r4, scratchpad_base
  260. ldr r3, [r4,#0xBC]
  261. ldmia r3!, {r4-r6}
  262. mov sp, r4
  263. msr spsr_cxsf, r5
  264. mov lr, r6
  265. ldmia r3!, {r4-r9}
  266. /* Coprocessor access Control Register */
  267. mcr p15, 0, r4, c1, c0, 2
  268. /* TTBR0 */
  269. MCR p15, 0, r5, c2, c0, 0
  270. /* TTBR1 */
  271. MCR p15, 0, r6, c2, c0, 1
  272. /* Translation table base control register */
  273. MCR p15, 0, r7, c2, c0, 2
  274. /*domain access Control Register */
  275. MCR p15, 0, r8, c3, c0, 0
  276. /* data fault status Register */
  277. MCR p15, 0, r9, c5, c0, 0
  278. ldmia r3!,{r4-r8}
  279. /* instruction fault status Register */
  280. MCR p15, 0, r4, c5, c0, 1
  281. /*Data Auxiliary Fault Status Register */
  282. MCR p15, 0, r5, c5, c1, 0
  283. /*Instruction Auxiliary Fault Status Register*/
  284. MCR p15, 0, r6, c5, c1, 1
  285. /*Data Fault Address Register */
  286. MCR p15, 0, r7, c6, c0, 0
  287. /*Instruction Fault Address Register*/
  288. MCR p15, 0, r8, c6, c0, 2
  289. ldmia r3!,{r4-r7}
  290. /* user r/w thread and process ID */
  291. MCR p15, 0, r4, c13, c0, 2
  292. /* user ro thread and process ID */
  293. MCR p15, 0, r5, c13, c0, 3
  294. /*Privileged only thread and process ID */
  295. MCR p15, 0, r6, c13, c0, 4
  296. /* cache size selection */
  297. MCR p15, 2, r7, c0, c0, 0
  298. ldmia r3!,{r4-r8}
  299. /* Data TLB lockdown registers */
  300. MCR p15, 0, r4, c10, c0, 0
  301. /* Instruction TLB lockdown registers */
  302. MCR p15, 0, r5, c10, c0, 1
  303. /* Secure or Nonsecure Vector Base Address */
  304. MCR p15, 0, r6, c12, c0, 0
  305. /* FCSE PID */
  306. MCR p15, 0, r7, c13, c0, 0
  307. /* Context PID */
  308. MCR p15, 0, r8, c13, c0, 1
  309. ldmia r3!,{r4-r5}
  310. /* primary memory remap register */
  311. MCR p15, 0, r4, c10, c2, 0
  312. /*normal memory remap register */
  313. MCR p15, 0, r5, c10, c2, 1
  314. /* Restore cpsr */
  315. ldmia r3!,{r4} /*load CPSR from SDRAM*/
  316. msr cpsr, r4 /*store cpsr */
  317. /* Enabling MMU here */
  318. mrc p15, 0, r7, c2, c0, 2 /* Read TTBRControl */
  319. /* Extract N (0:2) bits and decide whether to use TTBR0 or TTBR1*/
  320. and r7, #0x7
  321. cmp r7, #0x0
  322. beq usettbr0
  323. ttbr_error:
  324. /* More work needs to be done to support N[0:2] value other than 0
  325. * So looping here so that the error can be detected
  326. */
  327. b ttbr_error
  328. usettbr0:
  329. mrc p15, 0, r2, c2, c0, 0
  330. ldr r5, ttbrbit_mask
  331. and r2, r5
  332. mov r4, pc
  333. ldr r5, table_index_mask
  334. and r4, r5 /* r4 = 31 to 20 bits of pc */
  335. /* Extract the value to be written to table entry */
  336. ldr r1, table_entry
  337. add r1, r1, r4 /* r1 has value to be written to table entry*/
  338. /* Getting the address of table entry to modify */
  339. lsr r4, #18
  340. add r2, r4 /* r2 has the location which needs to be modified */
  341. /* Storing previous entry of location being modified */
  342. ldr r5, scratchpad_base
  343. ldr r4, [r2]
  344. str r4, [r5, #0xC0]
  345. /* Modify the table entry */
  346. str r1, [r2]
  347. /* Storing address of entry being modified
  348. * - will be restored after enabling MMU */
  349. ldr r5, scratchpad_base
  350. str r2, [r5, #0xC4]
  351. mov r0, #0
  352. mcr p15, 0, r0, c7, c5, 4 @ Flush prefetch buffer
  353. mcr p15, 0, r0, c7, c5, 6 @ Invalidate branch predictor array
  354. mcr p15, 0, r0, c8, c5, 0 @ Invalidate instruction TLB
  355. mcr p15, 0, r0, c8, c6, 0 @ Invalidate data TLB
  356. /* Restore control register but dont enable caches here*/
  357. /* Caches will be enabled after restoring MMU table entry */
  358. ldmia r3!, {r4}
  359. /* Store previous value of control register in scratchpad */
  360. str r4, [r5, #0xC8]
  361. ldr r2, cache_pred_disable_mask
  362. and r4, r2
  363. mcr p15, 0, r4, c1, c0, 0
  364. ldmfd sp!, {r0-r12, pc} @ restore regs and return
  365. save_context_wfi:
  366. /*b save_context_wfi*/ @ enable to debug save code
  367. mov r8, r0 /* Store SDRAM address in r8 */
  368. /* Check what that target sleep state is:stored in r1*/
  369. /* 1 - Only L1 and logic lost */
  370. /* 2 - Only L2 lost */
  371. /* 3 - Both L1 and L2 lost */
  372. cmp r1, #0x2 /* Only L2 lost */
  373. beq clean_l2
  374. cmp r1, #0x1 /* L2 retained */
  375. /* r9 stores whether to clean L2 or not*/
  376. moveq r9, #0x0 /* Dont Clean L2 */
  377. movne r9, #0x1 /* Clean L2 */
  378. l1_logic_lost:
  379. /* Store sp and spsr to SDRAM */
  380. mov r4, sp
  381. mrs r5, spsr
  382. mov r6, lr
  383. stmia r8!, {r4-r6}
  384. /* Save all ARM registers */
  385. /* Coprocessor access control register */
  386. mrc p15, 0, r6, c1, c0, 2
  387. stmia r8!, {r6}
  388. /* TTBR0, TTBR1 and Translation table base control */
  389. mrc p15, 0, r4, c2, c0, 0
  390. mrc p15, 0, r5, c2, c0, 1
  391. mrc p15, 0, r6, c2, c0, 2
  392. stmia r8!, {r4-r6}
  393. /* Domain access control register, data fault status register,
  394. and instruction fault status register */
  395. mrc p15, 0, r4, c3, c0, 0
  396. mrc p15, 0, r5, c5, c0, 0
  397. mrc p15, 0, r6, c5, c0, 1
  398. stmia r8!, {r4-r6}
  399. /* Data aux fault status register, instruction aux fault status,
  400. datat fault address register and instruction fault address register*/
  401. mrc p15, 0, r4, c5, c1, 0
  402. mrc p15, 0, r5, c5, c1, 1
  403. mrc p15, 0, r6, c6, c0, 0
  404. mrc p15, 0, r7, c6, c0, 2
  405. stmia r8!, {r4-r7}
  406. /* user r/w thread and process ID, user r/o thread and process ID,
  407. priv only thread and process ID, cache size selection */
  408. mrc p15, 0, r4, c13, c0, 2
  409. mrc p15, 0, r5, c13, c0, 3
  410. mrc p15, 0, r6, c13, c0, 4
  411. mrc p15, 2, r7, c0, c0, 0
  412. stmia r8!, {r4-r7}
  413. /* Data TLB lockdown, instruction TLB lockdown registers */
  414. mrc p15, 0, r5, c10, c0, 0
  415. mrc p15, 0, r6, c10, c0, 1
  416. stmia r8!, {r5-r6}
  417. /* Secure or non secure vector base address, FCSE PID, Context PID*/
  418. mrc p15, 0, r4, c12, c0, 0
  419. mrc p15, 0, r5, c13, c0, 0
  420. mrc p15, 0, r6, c13, c0, 1
  421. stmia r8!, {r4-r6}
  422. /* Primary remap, normal remap registers */
  423. mrc p15, 0, r4, c10, c2, 0
  424. mrc p15, 0, r5, c10, c2, 1
  425. stmia r8!,{r4-r5}
  426. /* Store current cpsr*/
  427. mrs r2, cpsr
  428. stmia r8!, {r2}
  429. mrc p15, 0, r4, c1, c0, 0
  430. /* save control register */
  431. stmia r8!, {r4}
  432. clean_caches:
  433. /* Clean Data or unified cache to POU*/
  434. /* How to invalidate only L1 cache???? - #FIX_ME# */
  435. /* mcr p15, 0, r11, c7, c11, 1 */
  436. cmp r9, #1 /* Check whether L2 inval is required or not*/
  437. bne skip_l2_inval
  438. clean_l2:
  439. /* read clidr */
  440. mrc p15, 1, r0, c0, c0, 1
  441. /* extract loc from clidr */
  442. ands r3, r0, #0x7000000
  443. /* left align loc bit field */
  444. mov r3, r3, lsr #23
  445. /* if loc is 0, then no need to clean */
  446. beq finished
  447. /* start clean at cache level 0 */
  448. mov r10, #0
  449. loop1:
  450. /* work out 3x current cache level */
  451. add r2, r10, r10, lsr #1
  452. /* extract cache type bits from clidr*/
  453. mov r1, r0, lsr r2
  454. /* mask of the bits for current cache only */
  455. and r1, r1, #7
  456. /* see what cache we have at this level */
  457. cmp r1, #2
  458. /* skip if no cache, or just i-cache */
  459. blt skip
  460. /* select current cache level in cssr */
  461. mcr p15, 2, r10, c0, c0, 0
  462. /* isb to sych the new cssr&csidr */
  463. isb
  464. /* read the new csidr */
  465. mrc p15, 1, r1, c0, c0, 0
  466. /* extract the length of the cache lines */
  467. and r2, r1, #7
  468. /* add 4 (line length offset) */
  469. add r2, r2, #4
  470. ldr r4, assoc_mask
  471. /* find maximum number on the way size */
  472. ands r4, r4, r1, lsr #3
  473. /* find bit position of way size increment */
  474. clz r5, r4
  475. ldr r7, numset_mask
  476. /* extract max number of the index size*/
  477. ands r7, r7, r1, lsr #13
  478. loop2:
  479. mov r9, r4
  480. /* create working copy of max way size*/
  481. loop3:
  482. /* factor way and cache number into r11 */
  483. orr r11, r10, r9, lsl r5
  484. /* factor index number into r11 */
  485. orr r11, r11, r7, lsl r2
  486. /*clean & invalidate by set/way */
  487. mcr p15, 0, r11, c7, c10, 2
  488. /* decrement the way*/
  489. subs r9, r9, #1
  490. bge loop3
  491. /*decrement the index */
  492. subs r7, r7, #1
  493. bge loop2
  494. skip:
  495. add r10, r10, #2
  496. /* increment cache number */
  497. cmp r3, r10
  498. bgt loop1
  499. finished:
  500. /*swith back to cache level 0 */
  501. mov r10, #0
  502. /* select current cache level in cssr */
  503. mcr p15, 2, r10, c0, c0, 0
  504. isb
  505. skip_l2_inval:
  506. /* Data memory barrier and Data sync barrier */
  507. mov r1, #0
  508. mcr p15, 0, r1, c7, c10, 4
  509. mcr p15, 0, r1, c7, c10, 5
  510. wfi @ wait for interrupt
  511. nop
  512. nop
  513. nop
  514. nop
  515. nop
  516. nop
  517. nop
  518. nop
  519. nop
  520. nop
  521. bl wait_sdrc_ok
  522. /* restore regs and return */
  523. ldmfd sp!, {r0-r12, pc}
  524. /* Make sure SDRC accesses are ok */
  525. wait_sdrc_ok:
  526. ldr r4, cm_idlest1_core
  527. ldr r5, [r4]
  528. and r5, r5, #0x2
  529. cmp r5, #0
  530. bne wait_sdrc_ok
  531. ldr r4, sdrc_power
  532. ldr r5, [r4]
  533. bic r5, r5, #0x40
  534. str r5, [r4]
  535. wait_dll_lock:
  536. /* Is dll in lock mode? */
  537. ldr r4, sdrc_dlla_ctrl
  538. ldr r5, [r4]
  539. tst r5, #0x4
  540. bxne lr
  541. /* wait till dll locks */
  542. ldr r4, sdrc_dlla_status
  543. ldr r5, [r4]
  544. and r5, r5, #0x4
  545. cmp r5, #0x4
  546. bne wait_dll_lock
  547. bx lr
  548. cm_idlest1_core:
  549. .word CM_IDLEST1_CORE_V
  550. sdrc_dlla_status:
  551. .word SDRC_DLLA_STATUS_V
  552. sdrc_dlla_ctrl:
  553. .word SDRC_DLLA_CTRL_V
  554. pm_prepwstst_core:
  555. .word PM_PREPWSTST_CORE_V
  556. pm_prepwstst_core_p:
  557. .word PM_PREPWSTST_CORE_P
  558. pm_prepwstst_mpu:
  559. .word PM_PREPWSTST_MPU_V
  560. pm_pwstctrl_mpu:
  561. .word PM_PWSTCTRL_MPU_P
  562. scratchpad_base:
  563. .word SCRATCHPAD_BASE_P
  564. sram_base:
  565. .word SRAM_BASE_P + 0x8000
  566. sdrc_power:
  567. .word SDRC_POWER_V
  568. clk_stabilize_delay:
  569. .word 0x000001FF
  570. assoc_mask:
  571. .word 0x3ff
  572. numset_mask:
  573. .word 0x7fff
  574. ttbrbit_mask:
  575. .word 0xFFFFC000
  576. table_index_mask:
  577. .word 0xFFF00000
  578. table_entry:
  579. .word 0x00000C02
  580. cache_pred_disable_mask:
  581. .word 0xFFFFE7FB
  582. control_stat:
  583. .word CONTROL_STAT
  584. ENTRY(omap34xx_cpu_suspend_sz)
  585. .word . - omap34xx_cpu_suspend