sleep34xx.S 15 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580
  1. /*
  2. * (C) Copyright 2007
  3. * Texas Instruments
  4. * Karthik Dasu <karthik-dp@ti.com>
  5. *
  6. * (C) Copyright 2004
  7. * Texas Instruments, <www.ti.com>
  8. * Richard Woodruff <r-woodruff2@ti.com>
  9. *
  10. * This program is free software; you can redistribute it and/or
  11. * modify it under the terms of the GNU General Public License as
  12. * published by the Free Software Foundation; either version 2 of
  13. * the License, or (at your option) any later version.
  14. *
  15. * This program is distributed in the hope that it will be useful,
  16. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  17. * MERCHANTABILITY or FITNESS FOR A PARTICULAR /PURPOSE. See the
  18. * GNU General Public License for more details.
  19. *
  20. * You should have received a copy of the GNU General Public License
  21. * along with this program; if not, write to the Free Software
  22. * Foundation, Inc., 59 Temple Place, Suite 330, Boston,
  23. * MA 02111-1307 USA
  24. */
  25. #include <linux/linkage.h>
  26. #include <asm/assembler.h>
  27. #include "omap34xx.h"
  28. #include "iomap.h"
  29. #include "cm3xxx.h"
  30. #include "prm3xxx.h"
  31. #include "sdrc.h"
  32. #include "sram.h"
  33. #include "control.h"
  34. /*
  35. * Registers access definitions
  36. */
  37. #define SDRC_SCRATCHPAD_SEM_OFFS 0xc
  38. #define SDRC_SCRATCHPAD_SEM_V OMAP343X_SCRATCHPAD_REGADDR\
  39. (SDRC_SCRATCHPAD_SEM_OFFS)
  40. #define PM_PREPWSTST_CORE_P OMAP3430_PRM_BASE + CORE_MOD +\
  41. OMAP3430_PM_PREPWSTST
  42. #define PM_PWSTCTRL_MPU_P OMAP3430_PRM_BASE + MPU_MOD + OMAP2_PM_PWSTCTRL
  43. #define CM_IDLEST1_CORE_V OMAP34XX_CM_REGADDR(CORE_MOD, CM_IDLEST1)
  44. #define CM_IDLEST_CKGEN_V OMAP34XX_CM_REGADDR(PLL_MOD, CM_IDLEST)
  45. #define SRAM_BASE_P OMAP3_SRAM_PA
  46. #define CONTROL_STAT OMAP343X_CTRL_BASE + OMAP343X_CONTROL_STATUS
  47. #define CONTROL_MEM_RTA_CTRL (OMAP343X_CTRL_BASE +\
  48. OMAP36XX_CONTROL_MEM_RTA_CTRL)
  49. /* Move this as correct place is available */
  50. #define SCRATCHPAD_MEM_OFFS 0x310
  51. #define SCRATCHPAD_BASE_P (OMAP343X_CTRL_BASE +\
  52. OMAP343X_CONTROL_MEM_WKUP +\
  53. SCRATCHPAD_MEM_OFFS)
  54. #define SDRC_POWER_V OMAP34XX_SDRC_REGADDR(SDRC_POWER)
  55. #define SDRC_SYSCONFIG_P (OMAP343X_SDRC_BASE + SDRC_SYSCONFIG)
  56. #define SDRC_MR_0_P (OMAP343X_SDRC_BASE + SDRC_MR_0)
  57. #define SDRC_EMR2_0_P (OMAP343X_SDRC_BASE + SDRC_EMR2_0)
  58. #define SDRC_MANUAL_0_P (OMAP343X_SDRC_BASE + SDRC_MANUAL_0)
  59. #define SDRC_MR_1_P (OMAP343X_SDRC_BASE + SDRC_MR_1)
  60. #define SDRC_EMR2_1_P (OMAP343X_SDRC_BASE + SDRC_EMR2_1)
  61. #define SDRC_MANUAL_1_P (OMAP343X_SDRC_BASE + SDRC_MANUAL_1)
  62. #define SDRC_DLLA_STATUS_V OMAP34XX_SDRC_REGADDR(SDRC_DLLA_STATUS)
  63. #define SDRC_DLLA_CTRL_V OMAP34XX_SDRC_REGADDR(SDRC_DLLA_CTRL)
  64. /*
  65. * This file needs be built unconditionally as ARM to interoperate correctly
  66. * with non-Thumb-2-capable firmware.
  67. */
  68. .arm
  69. /*
  70. * API functions
  71. */
  72. .text
  73. /*
  74. * L2 cache needs to be toggled for stable OFF mode functionality on 3630.
  75. * This function sets up a flag that will allow for this toggling to take
  76. * place on 3630. Hopefully some version in the future may not need this.
  77. */
  78. ENTRY(enable_omap3630_toggle_l2_on_restore)
  79. stmfd sp!, {lr} @ save registers on stack
  80. /* Setup so that we will disable and enable l2 */
  81. mov r1, #0x1
  82. adrl r3, l2dis_3630_offset @ may be too distant for plain adr
  83. ldr r2, [r3] @ value for offset
  84. str r1, [r2, r3] @ write to l2dis_3630
  85. ldmfd sp!, {pc} @ restore regs and return
  86. ENDPROC(enable_omap3630_toggle_l2_on_restore)
  87. /*
  88. * Function to call rom code to save secure ram context.
  89. *
  90. * r0 = physical address of the parameters
  91. */
  92. ENTRY(save_secure_ram_context)
  93. stmfd sp!, {r4 - r11, lr} @ save registers on stack
  94. mov r3, r0 @ physical address of parameters
  95. mov r0, #25 @ set service ID for PPA
  96. mov r12, r0 @ copy secure service ID in r12
  97. mov r1, #0 @ set task id for ROM code in r1
  98. mov r2, #4 @ set some flags in r2, r6
  99. mov r6, #0xff
  100. dsb @ data write barrier
  101. dmb @ data memory barrier
  102. smc #1 @ call SMI monitor (smi #1)
  103. nop
  104. nop
  105. nop
  106. nop
  107. ldmfd sp!, {r4 - r11, pc}
  108. ENDPROC(save_secure_ram_context)
  109. /*
  110. * ======================
  111. * == Idle entry point ==
  112. * ======================
  113. */
  114. /*
  115. * Forces OMAP into idle state
  116. *
  117. * omap34xx_cpu_suspend() - This bit of code saves the CPU context if needed
  118. * and executes the WFI instruction. Calling WFI effectively changes the
  119. * power domains states to the desired target power states.
  120. *
  121. *
  122. * Notes:
  123. * - only the minimum set of functions gets copied to internal SRAM at boot
  124. * and after wake-up from OFF mode, cf. omap_push_sram_idle. The function
  125. * pointers in SDRAM or SRAM are called depending on the desired low power
  126. * target state.
  127. * - when the OMAP wakes up it continues at different execution points
  128. * depending on the low power mode (non-OFF vs OFF modes),
  129. * cf. 'Resume path for xxx mode' comments.
  130. */
  131. .align 3
  132. ENTRY(omap34xx_cpu_suspend)
  133. stmfd sp!, {r4 - r11, lr} @ save registers on stack
  134. /*
  135. * r0 contains information about saving context:
  136. * 0 - No context lost
  137. * 1 - Only L1 and logic lost
  138. * 2 - Only L2 lost (Even L1 is retained we clean it along with L2)
  139. * 3 - Both L1 and L2 lost and logic lost
  140. */
  141. /*
  142. * For OFF mode: save context and jump to WFI in SDRAM (omap3_do_wfi)
  143. * For non-OFF modes: jump to the WFI code in SRAM (omap3_do_wfi_sram)
  144. */
  145. ldr r4, omap3_do_wfi_sram_addr
  146. ldr r5, [r4]
  147. cmp r0, #0x0 @ If no context save required,
  148. bxeq r5 @ jump to the WFI code in SRAM
  149. /* Otherwise fall through to the save context code */
  150. save_context_wfi:
  151. /*
  152. * jump out to kernel flush routine
  153. * - reuse that code is better
  154. * - it executes in a cached space so is faster than refetch per-block
  155. * - should be faster and will change with kernel
  156. * - 'might' have to copy address, load and jump to it
  157. * Flush all data from the L1 data cache before disabling
  158. * SCTLR.C bit.
  159. */
  160. ldr r1, kernel_flush
  161. mov lr, pc
  162. bx r1
  163. /*
  164. * Clear the SCTLR.C bit to prevent further data cache
  165. * allocation. Clearing SCTLR.C would make all the data accesses
  166. * strongly ordered and would not hit the cache.
  167. */
  168. mrc p15, 0, r0, c1, c0, 0
  169. bic r0, r0, #(1 << 2) @ Disable the C bit
  170. mcr p15, 0, r0, c1, c0, 0
  171. isb
  172. /*
  173. * Invalidate L1 data cache. Even though only invalidate is
  174. * necessary exported flush API is used here. Doing clean
  175. * on already clean cache would be almost NOP.
  176. */
  177. ldr r1, kernel_flush
  178. blx r1
  179. b omap3_do_wfi
  180. ENDPROC(omap34xx_cpu_suspend)
  181. omap3_do_wfi_sram_addr:
  182. .word omap3_do_wfi_sram
  183. kernel_flush:
  184. .word v7_flush_dcache_all
  185. /* ===================================
  186. * == WFI instruction => Enter idle ==
  187. * ===================================
  188. */
  189. /*
  190. * Do WFI instruction
  191. * Includes the resume path for non-OFF modes
  192. *
  193. * This code gets copied to internal SRAM and is accessible
  194. * from both SDRAM and SRAM:
  195. * - executed from SRAM for non-off modes (omap3_do_wfi_sram),
  196. * - executed from SDRAM for OFF mode (omap3_do_wfi).
  197. */
  198. .align 3
  199. ENTRY(omap3_do_wfi)
  200. ldr r4, sdrc_power @ read the SDRC_POWER register
  201. ldr r5, [r4] @ read the contents of SDRC_POWER
  202. orr r5, r5, #0x40 @ enable self refresh on idle req
  203. str r5, [r4] @ write back to SDRC_POWER register
  204. /* Data memory barrier and Data sync barrier */
  205. dsb
  206. dmb
  207. /*
  208. * ===================================
  209. * == WFI instruction => Enter idle ==
  210. * ===================================
  211. */
  212. wfi @ wait for interrupt
  213. /*
  214. * ===================================
  215. * == Resume path for non-OFF modes ==
  216. * ===================================
  217. */
  218. nop
  219. nop
  220. nop
  221. nop
  222. nop
  223. nop
  224. nop
  225. nop
  226. nop
  227. nop
  228. /*
  229. * This function implements the erratum ID i581 WA:
  230. * SDRC state restore before accessing the SDRAM
  231. *
  232. * Only used at return from non-OFF mode. For OFF
  233. * mode the ROM code configures the SDRC and
  234. * the DPLL before calling the restore code directly
  235. * from DDR.
  236. */
  237. /* Make sure SDRC accesses are ok */
  238. wait_sdrc_ok:
  239. /* DPLL3 must be locked before accessing the SDRC. Maybe the HW ensures this */
  240. ldr r4, cm_idlest_ckgen
  241. wait_dpll3_lock:
  242. ldr r5, [r4]
  243. tst r5, #1
  244. beq wait_dpll3_lock
  245. ldr r4, cm_idlest1_core
  246. wait_sdrc_ready:
  247. ldr r5, [r4]
  248. tst r5, #0x2
  249. bne wait_sdrc_ready
  250. /* allow DLL powerdown upon hw idle req */
  251. ldr r4, sdrc_power
  252. ldr r5, [r4]
  253. bic r5, r5, #0x40
  254. str r5, [r4]
  255. is_dll_in_lock_mode:
  256. /* Is dll in lock mode? */
  257. ldr r4, sdrc_dlla_ctrl
  258. ldr r5, [r4]
  259. tst r5, #0x4
  260. bne exit_nonoff_modes @ Return if locked
  261. /* wait till dll locks */
  262. wait_dll_lock_timed:
  263. ldr r4, sdrc_dlla_status
  264. /* Wait 20uS for lock */
  265. mov r6, #8
  266. wait_dll_lock:
  267. subs r6, r6, #0x1
  268. beq kick_dll
  269. ldr r5, [r4]
  270. and r5, r5, #0x4
  271. cmp r5, #0x4
  272. bne wait_dll_lock
  273. b exit_nonoff_modes @ Return when locked
  274. /* disable/reenable DLL if not locked */
  275. kick_dll:
  276. ldr r4, sdrc_dlla_ctrl
  277. ldr r5, [r4]
  278. mov r6, r5
  279. bic r6, #(1<<3) @ disable dll
  280. str r6, [r4]
  281. dsb
  282. orr r6, r6, #(1<<3) @ enable dll
  283. str r6, [r4]
  284. dsb
  285. b wait_dll_lock_timed
  286. exit_nonoff_modes:
  287. /* Re-enable C-bit if needed */
  288. mrc p15, 0, r0, c1, c0, 0
  289. tst r0, #(1 << 2) @ Check C bit enabled?
  290. orreq r0, r0, #(1 << 2) @ Enable the C bit if cleared
  291. mcreq p15, 0, r0, c1, c0, 0
  292. isb
  293. /*
  294. * ===================================
  295. * == Exit point from non-OFF modes ==
  296. * ===================================
  297. */
  298. ldmfd sp!, {r4 - r11, pc} @ restore regs and return
  299. ENDPROC(omap3_do_wfi)
  300. sdrc_power:
  301. .word SDRC_POWER_V
  302. cm_idlest1_core:
  303. .word CM_IDLEST1_CORE_V
  304. cm_idlest_ckgen:
  305. .word CM_IDLEST_CKGEN_V
  306. sdrc_dlla_status:
  307. .word SDRC_DLLA_STATUS_V
  308. sdrc_dlla_ctrl:
  309. .word SDRC_DLLA_CTRL_V
  310. ENTRY(omap3_do_wfi_sz)
  311. .word . - omap3_do_wfi
  312. /*
  313. * ==============================
  314. * == Resume path for OFF mode ==
  315. * ==============================
  316. */
  317. /*
  318. * The restore_* functions are called by the ROM code
  319. * when back from WFI in OFF mode.
  320. * Cf. the get_*restore_pointer functions.
  321. *
  322. * restore_es3: applies to 34xx >= ES3.0
  323. * restore_3630: applies to 36xx
  324. * restore: common code for 3xxx
  325. *
  326. * Note: when back from CORE and MPU OFF mode we are running
  327. * from SDRAM, without MMU, without the caches and prediction.
  328. * Also the SRAM content has been cleared.
  329. */
  330. ENTRY(omap3_restore_es3)
  331. ldr r5, pm_prepwstst_core_p
  332. ldr r4, [r5]
  333. and r4, r4, #0x3
  334. cmp r4, #0x0 @ Check if previous power state of CORE is OFF
  335. bne omap3_restore @ Fall through to OMAP3 common code
  336. adr r0, es3_sdrc_fix
  337. ldr r1, sram_base
  338. ldr r2, es3_sdrc_fix_sz
  339. mov r2, r2, ror #2
  340. copy_to_sram:
  341. ldmia r0!, {r3} @ val = *src
  342. stmia r1!, {r3} @ *dst = val
  343. subs r2, r2, #0x1 @ num_words--
  344. bne copy_to_sram
  345. ldr r1, sram_base
  346. blx r1
  347. b omap3_restore @ Fall through to OMAP3 common code
  348. ENDPROC(omap3_restore_es3)
  349. ENTRY(omap3_restore_3630)
  350. ldr r1, pm_prepwstst_core_p
  351. ldr r2, [r1]
  352. and r2, r2, #0x3
  353. cmp r2, #0x0 @ Check if previous power state of CORE is OFF
  354. bne omap3_restore @ Fall through to OMAP3 common code
  355. /* Disable RTA before giving control */
  356. ldr r1, control_mem_rta
  357. mov r2, #OMAP36XX_RTA_DISABLE
  358. str r2, [r1]
  359. ENDPROC(omap3_restore_3630)
  360. /* Fall through to common code for the remaining logic */
  361. ENTRY(omap3_restore)
  362. /*
  363. * Read the pwstctrl register to check the reason for mpu reset.
  364. * This tells us what was lost.
  365. */
  366. ldr r1, pm_pwstctrl_mpu
  367. ldr r2, [r1]
  368. and r2, r2, #0x3
  369. cmp r2, #0x0 @ Check if target power state was OFF or RET
  370. bne logic_l1_restore
  371. adr r1, l2dis_3630_offset @ address for offset
  372. ldr r0, [r1] @ value for offset
  373. ldr r0, [r1, r0] @ value at l2dis_3630
  374. cmp r0, #0x1 @ should we disable L2 on 3630?
  375. bne skipl2dis
  376. mrc p15, 0, r0, c1, c0, 1
  377. bic r0, r0, #2 @ disable L2 cache
  378. mcr p15, 0, r0, c1, c0, 1
  379. skipl2dis:
  380. ldr r0, control_stat
  381. ldr r1, [r0]
  382. and r1, #0x700
  383. cmp r1, #0x300
  384. beq l2_inv_gp
  385. adr r0, l2_inv_api_params_offset
  386. ldr r3, [r0]
  387. add r3, r3, r0 @ r3 points to dummy parameters
  388. mov r0, #40 @ set service ID for PPA
  389. mov r12, r0 @ copy secure Service ID in r12
  390. mov r1, #0 @ set task id for ROM code in r1
  391. mov r2, #4 @ set some flags in r2, r6
  392. mov r6, #0xff
  393. dsb @ data write barrier
  394. dmb @ data memory barrier
  395. smc #1 @ call SMI monitor (smi #1)
  396. /* Write to Aux control register to set some bits */
  397. mov r0, #42 @ set service ID for PPA
  398. mov r12, r0 @ copy secure Service ID in r12
  399. mov r1, #0 @ set task id for ROM code in r1
  400. mov r2, #4 @ set some flags in r2, r6
  401. mov r6, #0xff
  402. ldr r4, scratchpad_base
  403. ldr r3, [r4, #0xBC] @ r3 points to parameters
  404. dsb @ data write barrier
  405. dmb @ data memory barrier
  406. smc #1 @ call SMI monitor (smi #1)
  407. #ifdef CONFIG_OMAP3_L2_AUX_SECURE_SAVE_RESTORE
  408. /* Restore L2 aux control register */
  409. @ set service ID for PPA
  410. mov r0, #CONFIG_OMAP3_L2_AUX_SECURE_SERVICE_SET_ID
  411. mov r12, r0 @ copy service ID in r12
  412. mov r1, #0 @ set task ID for ROM code in r1
  413. mov r2, #4 @ set some flags in r2, r6
  414. mov r6, #0xff
  415. ldr r4, scratchpad_base
  416. ldr r3, [r4, #0xBC]
  417. adds r3, r3, #8 @ r3 points to parameters
  418. dsb @ data write barrier
  419. dmb @ data memory barrier
  420. smc #1 @ call SMI monitor (smi #1)
  421. #endif
  422. b logic_l1_restore
  423. .align
  424. l2_inv_api_params_offset:
  425. .long l2_inv_api_params - .
  426. l2_inv_gp:
  427. /* Execute smi to invalidate L2 cache */
  428. mov r12, #0x1 @ set up to invalidate L2
  429. smc #0 @ Call SMI monitor (smieq)
  430. /* Write to Aux control register to set some bits */
  431. ldr r4, scratchpad_base
  432. ldr r3, [r4,#0xBC]
  433. ldr r0, [r3,#4]
  434. mov r12, #0x3
  435. smc #0 @ Call SMI monitor (smieq)
  436. ldr r4, scratchpad_base
  437. ldr r3, [r4,#0xBC]
  438. ldr r0, [r3,#12]
  439. mov r12, #0x2
  440. smc #0 @ Call SMI monitor (smieq)
  441. logic_l1_restore:
  442. adr r0, l2dis_3630_offset @ adress for offset
  443. ldr r1, [r0] @ value for offset
  444. ldr r1, [r0, r1] @ value at l2dis_3630
  445. cmp r1, #0x1 @ Test if L2 re-enable needed on 3630
  446. bne skipl2reen
  447. mrc p15, 0, r1, c1, c0, 1
  448. orr r1, r1, #2 @ re-enable L2 cache
  449. mcr p15, 0, r1, c1, c0, 1
  450. skipl2reen:
  451. /* Now branch to the common CPU resume function */
  452. b cpu_resume
  453. ENDPROC(omap3_restore)
  454. .ltorg
  455. /*
  456. * Local variables
  457. */
  458. pm_prepwstst_core_p:
  459. .word PM_PREPWSTST_CORE_P
  460. pm_pwstctrl_mpu:
  461. .word PM_PWSTCTRL_MPU_P
  462. scratchpad_base:
  463. .word SCRATCHPAD_BASE_P
  464. sram_base:
  465. .word SRAM_BASE_P + 0x8000
  466. control_stat:
  467. .word CONTROL_STAT
  468. control_mem_rta:
  469. .word CONTROL_MEM_RTA_CTRL
  470. l2dis_3630_offset:
  471. .long l2dis_3630 - .
  472. .data
  473. l2dis_3630:
  474. .word 0
  475. .data
  476. l2_inv_api_params:
  477. .word 0x1, 0x00
  478. /*
  479. * Internal functions
  480. */
  481. /*
  482. * This function implements the erratum ID i443 WA, applies to 34xx >= ES3.0
  483. * Copied to and run from SRAM in order to reconfigure the SDRC parameters.
  484. */
  485. .text
  486. .align 3
  487. ENTRY(es3_sdrc_fix)
  488. ldr r4, sdrc_syscfg @ get config addr
  489. ldr r5, [r4] @ get value
  490. tst r5, #0x100 @ is part access blocked
  491. it eq
  492. biceq r5, r5, #0x100 @ clear bit if set
  493. str r5, [r4] @ write back change
  494. ldr r4, sdrc_mr_0 @ get config addr
  495. ldr r5, [r4] @ get value
  496. str r5, [r4] @ write back change
  497. ldr r4, sdrc_emr2_0 @ get config addr
  498. ldr r5, [r4] @ get value
  499. str r5, [r4] @ write back change
  500. ldr r4, sdrc_manual_0 @ get config addr
  501. mov r5, #0x2 @ autorefresh command
  502. str r5, [r4] @ kick off refreshes
  503. ldr r4, sdrc_mr_1 @ get config addr
  504. ldr r5, [r4] @ get value
  505. str r5, [r4] @ write back change
  506. ldr r4, sdrc_emr2_1 @ get config addr
  507. ldr r5, [r4] @ get value
  508. str r5, [r4] @ write back change
  509. ldr r4, sdrc_manual_1 @ get config addr
  510. mov r5, #0x2 @ autorefresh command
  511. str r5, [r4] @ kick off refreshes
  512. bx lr
  513. /*
  514. * Local variables
  515. */
  516. .align
  517. sdrc_syscfg:
  518. .word SDRC_SYSCONFIG_P
  519. sdrc_mr_0:
  520. .word SDRC_MR_0_P
  521. sdrc_emr2_0:
  522. .word SDRC_EMR2_0_P
  523. sdrc_manual_0:
  524. .word SDRC_MANUAL_0_P
  525. sdrc_mr_1:
  526. .word SDRC_MR_1_P
  527. sdrc_emr2_1:
  528. .word SDRC_EMR2_1_P
  529. sdrc_manual_1:
  530. .word SDRC_MANUAL_1_P
  531. ENDPROC(es3_sdrc_fix)
  532. ENTRY(es3_sdrc_fix_sz)
  533. .word . - es3_sdrc_fix