1/* 2 * (C) Copyright 2007 3 * Texas Instruments 4 * Karthik Dasu <karthik-dp@ti.com> 5 * 6 * (C) Copyright 2004 7 * Texas Instruments, <www.ti.com> 8 * Richard Woodruff <r-woodruff2@ti.com> 9 * 10 * This program is free software; you can redistribute it and/or 11 * modify it under the terms of the GNU General Public License as 12 * published by the Free Software Foundation; either version 2 of 13 * the License, or (at your option) any later version. 14 * 15 * This program is distributed in the hope that it will be useful, 16 * but WITHOUT ANY WARRANTY; without even the implied warranty of 17 * MERCHANTABILITY or FITNESS FOR A PARTICULAR /PURPOSE. See the 18 * GNU General Public License for more details. 19 * 20 * You should have received a copy of the GNU General Public License 21 * along with this program; if not, write to the Free Software 22 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, 23 * MA 02111-1307 USA 24 */ 25#include <linux/linkage.h> 26 27#include <asm/assembler.h> 28 29#include "omap34xx.h" 30#include "iomap.h" 31#include "cm3xxx.h" 32#include "prm3xxx.h" 33#include "sdrc.h" 34#include "sram.h" 35#include "control.h" 36 37/* 38 * Registers access definitions 39 */ 40#define SDRC_SCRATCHPAD_SEM_OFFS 0xc 41#define SDRC_SCRATCHPAD_SEM_V OMAP343X_SCRATCHPAD_REGADDR\ 42 (SDRC_SCRATCHPAD_SEM_OFFS) 43#define PM_PREPWSTST_CORE_P OMAP3430_PRM_BASE + CORE_MOD +\ 44 OMAP3430_PM_PREPWSTST 45#define PM_PWSTCTRL_MPU_P OMAP3430_PRM_BASE + MPU_MOD + OMAP2_PM_PWSTCTRL 46#define CM_IDLEST1_CORE_V OMAP34XX_CM_REGADDR(CORE_MOD, CM_IDLEST1) 47#define CM_IDLEST_CKGEN_V OMAP34XX_CM_REGADDR(PLL_MOD, CM_IDLEST) 48#define SRAM_BASE_P OMAP3_SRAM_PA 49#define CONTROL_STAT OMAP343X_CTRL_BASE + OMAP343X_CONTROL_STATUS 50#define CONTROL_MEM_RTA_CTRL (OMAP343X_CTRL_BASE +\ 51 OMAP36XX_CONTROL_MEM_RTA_CTRL) 52 53/* Move this as correct place is available */ 54#define SCRATCHPAD_MEM_OFFS 0x310 55#define SCRATCHPAD_BASE_P (OMAP343X_CTRL_BASE +\ 56 OMAP343X_CONTROL_MEM_WKUP +\ 57 SCRATCHPAD_MEM_OFFS) 58#define SDRC_POWER_V OMAP34XX_SDRC_REGADDR(SDRC_POWER) 59#define SDRC_SYSCONFIG_P (OMAP343X_SDRC_BASE + SDRC_SYSCONFIG) 60#define SDRC_MR_0_P (OMAP343X_SDRC_BASE + SDRC_MR_0) 61#define SDRC_EMR2_0_P (OMAP343X_SDRC_BASE + SDRC_EMR2_0) 62#define SDRC_MANUAL_0_P (OMAP343X_SDRC_BASE + SDRC_MANUAL_0) 63#define SDRC_MR_1_P (OMAP343X_SDRC_BASE + SDRC_MR_1) 64#define SDRC_EMR2_1_P (OMAP343X_SDRC_BASE + SDRC_EMR2_1) 65#define SDRC_MANUAL_1_P (OMAP343X_SDRC_BASE + SDRC_MANUAL_1) 66#define SDRC_DLLA_STATUS_V OMAP34XX_SDRC_REGADDR(SDRC_DLLA_STATUS) 67#define SDRC_DLLA_CTRL_V OMAP34XX_SDRC_REGADDR(SDRC_DLLA_CTRL) 68 69/* 70 * This file needs be built unconditionally as ARM to interoperate correctly 71 * with non-Thumb-2-capable firmware. 72 */ 73 .arm 74 75/* 76 * API functions 77 */ 78 79 .text 80/* 81 * L2 cache needs to be toggled for stable OFF mode functionality on 3630. 82 * This function sets up a flag that will allow for this toggling to take 83 * place on 3630. Hopefully some version in the future may not need this. 84 */ 85ENTRY(enable_omap3630_toggle_l2_on_restore) 86 stmfd sp!, {lr} @ save registers on stack 87 /* Setup so that we will disable and enable l2 */ 88 mov r1, #0x1 89 adrl r2, l2dis_3630 @ may be too distant for plain adr 90 str r1, [r2] 91 ldmfd sp!, {pc} @ restore regs and return 92ENDPROC(enable_omap3630_toggle_l2_on_restore) 93 94 .text 95/* Function to call rom code to save secure ram context */ 96 .align 3 97ENTRY(save_secure_ram_context) 98 stmfd sp!, {r4 - r11, lr} @ save registers on stack 99 adr r3, api_params @ r3 points to parameters 100 str r0, [r3,#0x4] @ r0 has sdram address 101 ldr r12, high_mask 102 and r3, r3, r12 103 ldr r12, sram_phy_addr_mask 104 orr r3, r3, r12 105 mov r0, #25 @ set service ID for PPA 106 mov r12, r0 @ copy secure service ID in r12 107 mov r1, #0 @ set task id for ROM code in r1 108 mov r2, #4 @ set some flags in r2, r6 109 mov r6, #0xff 110 dsb @ data write barrier 111 dmb @ data memory barrier 112 smc #1 @ call SMI monitor (smi #1) 113 nop 114 nop 115 nop 116 nop 117 ldmfd sp!, {r4 - r11, pc} 118 .align 119sram_phy_addr_mask: 120 .word SRAM_BASE_P 121high_mask: 122 .word 0xffff 123api_params: 124 .word 0x4, 0x0, 0x0, 0x1, 0x1 125ENDPROC(save_secure_ram_context) 126ENTRY(save_secure_ram_context_sz) 127 .word . - save_secure_ram_context 128 129/* 130 * ====================== 131 * == Idle entry point == 132 * ====================== 133 */ 134 135/* 136 * Forces OMAP into idle state 137 * 138 * omap34xx_cpu_suspend() - This bit of code saves the CPU context if needed 139 * and executes the WFI instruction. Calling WFI effectively changes the 140 * power domains states to the desired target power states. 141 * 142 * 143 * Notes: 144 * - only the minimum set of functions gets copied to internal SRAM at boot 145 * and after wake-up from OFF mode, cf. omap_push_sram_idle. The function 146 * pointers in SDRAM or SRAM are called depending on the desired low power 147 * target state. 148 * - when the OMAP wakes up it continues at different execution points 149 * depending on the low power mode (non-OFF vs OFF modes), 150 * cf. 'Resume path for xxx mode' comments. 151 */ 152 .align 3 153ENTRY(omap34xx_cpu_suspend) 154 stmfd sp!, {r4 - r11, lr} @ save registers on stack 155 156 /* 157 * r0 contains information about saving context: 158 * 0 - No context lost 159 * 1 - Only L1 and logic lost 160 * 2 - Only L2 lost (Even L1 is retained we clean it along with L2) 161 * 3 - Both L1 and L2 lost and logic lost 162 */ 163 164 /* 165 * For OFF mode: save context and jump to WFI in SDRAM (omap3_do_wfi) 166 * For non-OFF modes: jump to the WFI code in SRAM (omap3_do_wfi_sram) 167 */ 168 ldr r4, omap3_do_wfi_sram_addr 169 ldr r5, [r4] 170 cmp r0, #0x0 @ If no context save required, 171 bxeq r5 @ jump to the WFI code in SRAM 172 173 174 /* Otherwise fall through to the save context code */ 175save_context_wfi: 176 /* 177 * jump out to kernel flush routine 178 * - reuse that code is better 179 * - it executes in a cached space so is faster than refetch per-block 180 * - should be faster and will change with kernel 181 * - 'might' have to copy address, load and jump to it 182 * Flush all data from the L1 data cache before disabling 183 * SCTLR.C bit. 184 */ 185 ldr r1, kernel_flush 186 mov lr, pc 187 bx r1 188 189 /* 190 * Clear the SCTLR.C bit to prevent further data cache 191 * allocation. Clearing SCTLR.C would make all the data accesses 192 * strongly ordered and would not hit the cache. 193 */ 194 mrc p15, 0, r0, c1, c0, 0 195 bic r0, r0, #(1 << 2) @ Disable the C bit 196 mcr p15, 0, r0, c1, c0, 0 197 isb 198 199 /* 200 * Invalidate L1 data cache. Even though only invalidate is 201 * necessary exported flush API is used here. Doing clean 202 * on already clean cache would be almost NOP. 203 */ 204 ldr r1, kernel_flush 205 blx r1 206 b omap3_do_wfi 207ENDPROC(omap34xx_cpu_suspend) 208omap3_do_wfi_sram_addr: 209 .word omap3_do_wfi_sram 210kernel_flush: 211 .word v7_flush_dcache_all 212 213/* =================================== 214 * == WFI instruction => Enter idle == 215 * =================================== 216 */ 217 218/* 219 * Do WFI instruction 220 * Includes the resume path for non-OFF modes 221 * 222 * This code gets copied to internal SRAM and is accessible 223 * from both SDRAM and SRAM: 224 * - executed from SRAM for non-off modes (omap3_do_wfi_sram), 225 * - executed from SDRAM for OFF mode (omap3_do_wfi). 226 */ 227 .align 3 228ENTRY(omap3_do_wfi) 229 ldr r4, sdrc_power @ read the SDRC_POWER register 230 ldr r5, [r4] @ read the contents of SDRC_POWER 231 orr r5, r5, #0x40 @ enable self refresh on idle req 232 str r5, [r4] @ write back to SDRC_POWER register 233 234 /* Data memory barrier and Data sync barrier */ 235 dsb 236 dmb 237 238/* 239 * =================================== 240 * == WFI instruction => Enter idle == 241 * =================================== 242 */ 243 wfi @ wait for interrupt 244 245/* 246 * =================================== 247 * == Resume path for non-OFF modes == 248 * =================================== 249 */ 250 nop 251 nop 252 nop 253 nop 254 nop 255 nop 256 nop 257 nop 258 nop 259 nop 260 261/* 262 * This function implements the erratum ID i581 WA: 263 * SDRC state restore before accessing the SDRAM 264 * 265 * Only used at return from non-OFF mode. For OFF 266 * mode the ROM code configures the SDRC and 267 * the DPLL before calling the restore code directly 268 * from DDR. 269 */ 270 271/* Make sure SDRC accesses are ok */ 272wait_sdrc_ok: 273 274/* DPLL3 must be locked before accessing the SDRC. Maybe the HW ensures this */ 275 ldr r4, cm_idlest_ckgen 276wait_dpll3_lock: 277 ldr r5, [r4] 278 tst r5, #1 279 beq wait_dpll3_lock 280 281 ldr r4, cm_idlest1_core 282wait_sdrc_ready: 283 ldr r5, [r4] 284 tst r5, #0x2 285 bne wait_sdrc_ready 286 /* allow DLL powerdown upon hw idle req */ 287 ldr r4, sdrc_power 288 ldr r5, [r4] 289 bic r5, r5, #0x40 290 str r5, [r4] 291 292/* 293 * PC-relative stores lead to undefined behaviour in Thumb-2: use a r7 as a 294 * base instead. 295 * Be careful not to clobber r7 when maintaing this code. 296 */ 297 298is_dll_in_lock_mode: 299 /* Is dll in lock mode? */ 300 ldr r4, sdrc_dlla_ctrl 301 ldr r5, [r4] 302 tst r5, #0x4 303 bne exit_nonoff_modes @ Return if locked 304 /* wait till dll locks */ 305 adr r7, kick_counter 306wait_dll_lock_timed: 307 ldr r4, wait_dll_lock_counter 308 add r4, r4, #1 309 str r4, [r7, #wait_dll_lock_counter - kick_counter] 310 ldr r4, sdrc_dlla_status 311 /* Wait 20uS for lock */ 312 mov r6, #8 313wait_dll_lock: 314 subs r6, r6, #0x1 315 beq kick_dll 316 ldr r5, [r4] 317 and r5, r5, #0x4 318 cmp r5, #0x4 319 bne wait_dll_lock 320 b exit_nonoff_modes @ Return when locked 321 322 /* disable/reenable DLL if not locked */ 323kick_dll: 324 ldr r4, sdrc_dlla_ctrl 325 ldr r5, [r4] 326 mov r6, r5 327 bic r6, #(1<<3) @ disable dll 328 str r6, [r4] 329 dsb 330 orr r6, r6, #(1<<3) @ enable dll 331 str r6, [r4] 332 dsb 333 ldr r4, kick_counter 334 add r4, r4, #1 335 str r4, [r7] @ kick_counter 336 b wait_dll_lock_timed 337 338exit_nonoff_modes: 339 /* Re-enable C-bit if needed */ 340 mrc p15, 0, r0, c1, c0, 0 341 tst r0, #(1 << 2) @ Check C bit enabled? 342 orreq r0, r0, #(1 << 2) @ Enable the C bit if cleared 343 mcreq p15, 0, r0, c1, c0, 0 344 isb 345 346/* 347 * =================================== 348 * == Exit point from non-OFF modes == 349 * =================================== 350 */ 351 ldmfd sp!, {r4 - r11, pc} @ restore regs and return 352ENDPROC(omap3_do_wfi) 353sdrc_power: 354 .word SDRC_POWER_V 355cm_idlest1_core: 356 .word CM_IDLEST1_CORE_V 357cm_idlest_ckgen: 358 .word CM_IDLEST_CKGEN_V 359sdrc_dlla_status: 360 .word SDRC_DLLA_STATUS_V 361sdrc_dlla_ctrl: 362 .word SDRC_DLLA_CTRL_V 363 /* 364 * When exporting to userspace while the counters are in SRAM, 365 * these 2 words need to be at the end to facilitate retrival! 366 */ 367kick_counter: 368 .word 0 369wait_dll_lock_counter: 370 .word 0 371 372ENTRY(omap3_do_wfi_sz) 373 .word . - omap3_do_wfi 374 375 376/* 377 * ============================== 378 * == Resume path for OFF mode == 379 * ============================== 380 */ 381 382/* 383 * The restore_* functions are called by the ROM code 384 * when back from WFI in OFF mode. 385 * Cf. the get_*restore_pointer functions. 386 * 387 * restore_es3: applies to 34xx >= ES3.0 388 * restore_3630: applies to 36xx 389 * restore: common code for 3xxx 390 * 391 * Note: when back from CORE and MPU OFF mode we are running 392 * from SDRAM, without MMU, without the caches and prediction. 393 * Also the SRAM content has been cleared. 394 */ 395ENTRY(omap3_restore_es3) 396 ldr r5, pm_prepwstst_core_p 397 ldr r4, [r5] 398 and r4, r4, #0x3 399 cmp r4, #0x0 @ Check if previous power state of CORE is OFF 400 bne omap3_restore @ Fall through to OMAP3 common code 401 adr r0, es3_sdrc_fix 402 ldr r1, sram_base 403 ldr r2, es3_sdrc_fix_sz 404 mov r2, r2, ror #2 405copy_to_sram: 406 ldmia r0!, {r3} @ val = *src 407 stmia r1!, {r3} @ *dst = val 408 subs r2, r2, #0x1 @ num_words-- 409 bne copy_to_sram 410 ldr r1, sram_base 411 blx r1 412 b omap3_restore @ Fall through to OMAP3 common code 413ENDPROC(omap3_restore_es3) 414 415ENTRY(omap3_restore_3630) 416 ldr r1, pm_prepwstst_core_p 417 ldr r2, [r1] 418 and r2, r2, #0x3 419 cmp r2, #0x0 @ Check if previous power state of CORE is OFF 420 bne omap3_restore @ Fall through to OMAP3 common code 421 /* Disable RTA before giving control */ 422 ldr r1, control_mem_rta 423 mov r2, #OMAP36XX_RTA_DISABLE 424 str r2, [r1] 425ENDPROC(omap3_restore_3630) 426 427 /* Fall through to common code for the remaining logic */ 428 429ENTRY(omap3_restore) 430 /* 431 * Read the pwstctrl register to check the reason for mpu reset. 432 * This tells us what was lost. 433 */ 434 ldr r1, pm_pwstctrl_mpu 435 ldr r2, [r1] 436 and r2, r2, #0x3 437 cmp r2, #0x0 @ Check if target power state was OFF or RET 438 bne logic_l1_restore 439 440 ldr r0, l2dis_3630 441 cmp r0, #0x1 @ should we disable L2 on 3630? 442 bne skipl2dis 443 mrc p15, 0, r0, c1, c0, 1 444 bic r0, r0, #2 @ disable L2 cache 445 mcr p15, 0, r0, c1, c0, 1 446skipl2dis: 447 ldr r0, control_stat 448 ldr r1, [r0] 449 and r1, #0x700 450 cmp r1, #0x300 451 beq l2_inv_gp 452 mov r0, #40 @ set service ID for PPA 453 mov r12, r0 @ copy secure Service ID in r12 454 mov r1, #0 @ set task id for ROM code in r1 455 mov r2, #4 @ set some flags in r2, r6 456 mov r6, #0xff 457 adr r3, l2_inv_api_params @ r3 points to dummy parameters 458 dsb @ data write barrier 459 dmb @ data memory barrier 460 smc #1 @ call SMI monitor (smi #1) 461 /* Write to Aux control register to set some bits */ 462 mov r0, #42 @ set service ID for PPA 463 mov r12, r0 @ copy secure Service ID in r12 464 mov r1, #0 @ set task id for ROM code in r1 465 mov r2, #4 @ set some flags in r2, r6 466 mov r6, #0xff 467 ldr r4, scratchpad_base 468 ldr r3, [r4, #0xBC] @ r3 points to parameters 469 dsb @ data write barrier 470 dmb @ data memory barrier 471 smc #1 @ call SMI monitor (smi #1) 472 473#ifdef CONFIG_OMAP3_L2_AUX_SECURE_SAVE_RESTORE 474 /* Restore L2 aux control register */ 475 @ set service ID for PPA 476 mov r0, #CONFIG_OMAP3_L2_AUX_SECURE_SERVICE_SET_ID 477 mov r12, r0 @ copy service ID in r12 478 mov r1, #0 @ set task ID for ROM code in r1 479 mov r2, #4 @ set some flags in r2, r6 480 mov r6, #0xff 481 ldr r4, scratchpad_base 482 ldr r3, [r4, #0xBC] 483 adds r3, r3, #8 @ r3 points to parameters 484 dsb @ data write barrier 485 dmb @ data memory barrier 486 smc #1 @ call SMI monitor (smi #1) 487#endif 488 b logic_l1_restore 489 490 .align 491l2_inv_api_params: 492 .word 0x1, 0x00 493l2_inv_gp: 494 /* Execute smi to invalidate L2 cache */ 495 mov r12, #0x1 @ set up to invalidate L2 496 smc #0 @ Call SMI monitor (smieq) 497 /* Write to Aux control register to set some bits */ 498 ldr r4, scratchpad_base 499 ldr r3, [r4,#0xBC] 500 ldr r0, [r3,#4] 501 mov r12, #0x3 502 smc #0 @ Call SMI monitor (smieq) 503 ldr r4, scratchpad_base 504 ldr r3, [r4,#0xBC] 505 ldr r0, [r3,#12] 506 mov r12, #0x2 507 smc #0 @ Call SMI monitor (smieq) 508logic_l1_restore: 509 ldr r1, l2dis_3630 510 cmp r1, #0x1 @ Test if L2 re-enable needed on 3630 511 bne skipl2reen 512 mrc p15, 0, r1, c1, c0, 1 513 orr r1, r1, #2 @ re-enable L2 cache 514 mcr p15, 0, r1, c1, c0, 1 515skipl2reen: 516 517 /* Now branch to the common CPU resume function */ 518 b cpu_resume 519ENDPROC(omap3_restore) 520 521 .ltorg 522 523/* 524 * Local variables 525 */ 526pm_prepwstst_core_p: 527 .word PM_PREPWSTST_CORE_P 528pm_pwstctrl_mpu: 529 .word PM_PWSTCTRL_MPU_P 530scratchpad_base: 531 .word SCRATCHPAD_BASE_P 532sram_base: 533 .word SRAM_BASE_P + 0x8000 534control_stat: 535 .word CONTROL_STAT 536control_mem_rta: 537 .word CONTROL_MEM_RTA_CTRL 538l2dis_3630: 539 .word 0 540 541/* 542 * Internal functions 543 */ 544 545/* 546 * This function implements the erratum ID i443 WA, applies to 34xx >= ES3.0 547 * Copied to and run from SRAM in order to reconfigure the SDRC parameters. 548 */ 549 .text 550 .align 3 551ENTRY(es3_sdrc_fix) 552 ldr r4, sdrc_syscfg @ get config addr 553 ldr r5, [r4] @ get value 554 tst r5, #0x100 @ is part access blocked 555 it eq 556 biceq r5, r5, #0x100 @ clear bit if set 557 str r5, [r4] @ write back change 558 ldr r4, sdrc_mr_0 @ get config addr 559 ldr r5, [r4] @ get value 560 str r5, [r4] @ write back change 561 ldr r4, sdrc_emr2_0 @ get config addr 562 ldr r5, [r4] @ get value 563 str r5, [r4] @ write back change 564 ldr r4, sdrc_manual_0 @ get config addr 565 mov r5, #0x2 @ autorefresh command 566 str r5, [r4] @ kick off refreshes 567 ldr r4, sdrc_mr_1 @ get config addr 568 ldr r5, [r4] @ get value 569 str r5, [r4] @ write back change 570 ldr r4, sdrc_emr2_1 @ get config addr 571 ldr r5, [r4] @ get value 572 str r5, [r4] @ write back change 573 ldr r4, sdrc_manual_1 @ get config addr 574 mov r5, #0x2 @ autorefresh command 575 str r5, [r4] @ kick off refreshes 576 bx lr 577 578/* 579 * Local variables 580 */ 581 .align 582sdrc_syscfg: 583 .word SDRC_SYSCONFIG_P 584sdrc_mr_0: 585 .word SDRC_MR_0_P 586sdrc_emr2_0: 587 .word SDRC_EMR2_0_P 588sdrc_manual_0: 589 .word SDRC_MANUAL_0_P 590sdrc_mr_1: 591 .word SDRC_MR_1_P 592sdrc_emr2_1: 593 .word SDRC_EMR2_1_P 594sdrc_manual_1: 595 .word SDRC_MANUAL_1_P 596ENDPROC(es3_sdrc_fix) 597ENTRY(es3_sdrc_fix_sz) 598 .word . - es3_sdrc_fix 599