1/* SPDX-License-Identifier: GPL-2.0 */ 2/* 3 * Low level suspend code for AM43XX SoCs 4 * 5 * Copyright (C) 2013-2018 Texas Instruments Incorporated - http://www.ti.com/ 6 * Dave Gerlach, Vaibhav Bedia 7 */ 8 9#include <generated/ti-emif-asm-offsets.h> 10#include <generated/ti-pm-asm-offsets.h> 11#include <linux/linkage.h> 12#include <linux/ti-emif-sram.h> 13 14#include <asm/assembler.h> 15#include <asm/hardware/cache-l2x0.h> 16#include <asm/memory.h> 17 18#include "cm33xx.h" 19#include "common.h" 20#include "iomap.h" 21#include "omap-secure.h" 22#include "omap44xx.h" 23#include "prm33xx.h" 24#include "prcm43xx.h" 25 26#define AM33XX_CM_CLKCTRL_MODULESTATE_DISABLED 0x00030000 27#define AM33XX_CM_CLKCTRL_MODULEMODE_DISABLE 0x0003 28#define AM33XX_CM_CLKCTRL_MODULEMODE_ENABLE 0x0002 29 30#define AM43XX_EMIF_POWEROFF_ENABLE 0x1 31#define AM43XX_EMIF_POWEROFF_DISABLE 0x0 32 33#define AM43XX_CM_CLKSTCTRL_CLKTRCTRL_SW_SLEEP 0x1 34#define AM43XX_CM_CLKSTCTRL_CLKTRCTRL_HW_AUTO 0x3 35 36#define AM43XX_CM_BASE 0x44DF0000 37 38#define AM43XX_CM_REGADDR(inst, reg) \ 39 AM33XX_L4_WK_IO_ADDRESS(AM43XX_CM_BASE + (inst) + (reg)) 40 41#define AM43XX_CM_MPU_CLKSTCTRL AM43XX_CM_REGADDR(AM43XX_CM_MPU_INST, \ 42 AM43XX_CM_MPU_MPU_CDOFFS) 43#define AM43XX_CM_MPU_MPU_CLKCTRL AM43XX_CM_REGADDR(AM43XX_CM_MPU_INST, \ 44 AM43XX_CM_MPU_MPU_CLKCTRL_OFFSET) 45#define AM43XX_CM_PER_EMIF_CLKCTRL AM43XX_CM_REGADDR(AM43XX_CM_PER_INST, \ 46 AM43XX_CM_PER_EMIF_CLKCTRL_OFFSET) 47#define AM43XX_PRM_EMIF_CTRL_OFFSET 0x0030 48 49 .arm 50 .align 3 51 52ENTRY(am43xx_do_wfi) 53 stmfd sp!, {r4 - r11, lr} @ save registers on stack 54 55#ifdef CONFIG_CACHE_L2X0 56 /* Retrieve l2 cache virt address BEFORE we shut off EMIF */ 57 ldr r1, get_l2cache_base 58 blx r1 59 mov r8, r0 60#endif 61 62 /* 63 * Flush all data from the L1 and L2 data cache before disabling 64 * SCTLR.C bit. 65 */ 66 ldr r1, kernel_flush 67 blx r1 68 69 /* 70 * Clear the SCTLR.C bit to prevent further data cache 71 * allocation. Clearing SCTLR.C would make all the data accesses 72 * strongly ordered and would not hit the cache. 73 */ 74 mrc p15, 0, r0, c1, c0, 0 75 bic r0, r0, #(1 << 2) @ Disable the C bit 76 mcr p15, 0, r0, c1, c0, 0 77 isb 78 dsb 79 80 /* 81 * Invalidate L1 and L2 data cache. 82 */ 83 ldr r1, kernel_flush 84 blx r1 85 86#ifdef CONFIG_CACHE_L2X0 87 /* 88 * Clean and invalidate the L2 cache. 89 */ 90#ifdef CONFIG_PL310_ERRATA_727915 91 mov r0, #0x03 92 mov r12, #OMAP4_MON_L2X0_DBG_CTRL_INDEX 93 dsb 94 smc #0 95 dsb 96#endif 97 mov r0, r8 98 adr r4, am43xx_pm_ro_sram_data 99 ldr r3, [r4, #AMX3_PM_RO_SRAM_DATA_VIRT_OFFSET] 100 101 mov r2, r0 102 ldr r0, [r2, #L2X0_AUX_CTRL] 103 str r0, [r3, #AMX3_PM_L2_AUX_CTRL_VAL_OFFSET] 104 ldr r0, [r2, #L310_PREFETCH_CTRL] 105 str r0, [r3, #AMX3_PM_L2_PREFETCH_CTRL_VAL_OFFSET] 106 107 ldr r0, l2_val 108 str r0, [r2, #L2X0_CLEAN_INV_WAY] 109wait: 110 ldr r0, [r2, #L2X0_CLEAN_INV_WAY] 111 ldr r1, l2_val 112 ands r0, r0, r1 113 bne wait 114#ifdef CONFIG_PL310_ERRATA_727915 115 mov r0, #0x00 116 mov r12, #OMAP4_MON_L2X0_DBG_CTRL_INDEX 117 dsb 118 smc #0 119 dsb 120#endif 121l2x_sync: 122 mov r0, r8 123 mov r2, r0 124 mov r0, #0x0 125 str r0, [r2, #L2X0_CACHE_SYNC] 126sync: 127 ldr r0, [r2, #L2X0_CACHE_SYNC] 128 ands r0, r0, #0x1 129 bne sync 130#endif 131 132 adr r9, am43xx_emif_sram_table 133 134 ldr r3, [r9, #EMIF_PM_ENTER_SR_OFFSET] 135 blx r3 136 137 ldr r3, [r9, #EMIF_PM_SAVE_CONTEXT_OFFSET] 138 blx r3 139 140 /* Disable EMIF */ 141 ldr r1, am43xx_virt_emif_clkctrl 142 ldr r2, [r1] 143 bic r2, r2, #AM33XX_CM_CLKCTRL_MODULEMODE_DISABLE 144 str r2, [r1] 145 146wait_emif_disable: 147 ldr r2, [r1] 148 mov r3, #AM33XX_CM_CLKCTRL_MODULESTATE_DISABLED 149 cmp r2, r3 150 bne wait_emif_disable 151 152 /* 153 * For the MPU WFI to be registered as an interrupt 154 * to WKUP_M3, MPU_CLKCTRL.MODULEMODE needs to be set 155 * to DISABLED 156 */ 157 ldr r1, am43xx_virt_mpu_clkctrl 158 ldr r2, [r1] 159 bic r2, r2, #AM33XX_CM_CLKCTRL_MODULEMODE_DISABLE 160 str r2, [r1] 161 162 /* 163 * Put MPU CLKDM to SW_SLEEP 164 */ 165 ldr r1, am43xx_virt_mpu_clkstctrl 166 mov r2, #AM43XX_CM_CLKSTCTRL_CLKTRCTRL_SW_SLEEP 167 str r2, [r1] 168 169 /* 170 * Execute a barrier instruction to ensure that all cache, 171 * TLB and branch predictor maintenance operations issued 172 * have completed. 173 */ 174 dsb 175 dmb 176 177 /* 178 * Execute a WFI instruction and wait until the 179 * STANDBYWFI output is asserted to indicate that the 180 * CPU is in idle and low power state. CPU can specualatively 181 * prefetch the instructions so add NOPs after WFI. Sixteen 182 * NOPs as per Cortex-A9 pipeline. 183 */ 184 wfi 185 186 nop 187 nop 188 nop 189 nop 190 nop 191 nop 192 nop 193 nop 194 nop 195 nop 196 nop 197 nop 198 nop 199 nop 200 nop 201 nop 202 203 /* We come here in case of an abort due to a late interrupt */ 204 ldr r1, am43xx_virt_mpu_clkstctrl 205 mov r2, #AM43XX_CM_CLKSTCTRL_CLKTRCTRL_HW_AUTO 206 str r2, [r1] 207 208 /* Set MPU_CLKCTRL.MODULEMODE back to ENABLE */ 209 ldr r1, am43xx_virt_mpu_clkctrl 210 mov r2, #AM33XX_CM_CLKCTRL_MODULEMODE_ENABLE 211 str r2, [r1] 212 213 /* Re-enable EMIF */ 214 ldr r1, am43xx_virt_emif_clkctrl 215 mov r2, #AM33XX_CM_CLKCTRL_MODULEMODE_ENABLE 216 str r2, [r1] 217wait_emif_enable: 218 ldr r3, [r1] 219 cmp r2, r3 220 bne wait_emif_enable 221 222 /* 223 * Set SCTLR.C bit to allow data cache allocation 224 */ 225 mrc p15, 0, r0, c1, c0, 0 226 orr r0, r0, #(1 << 2) @ Enable the C bit 227 mcr p15, 0, r0, c1, c0, 0 228 isb 229 230 ldr r1, [r9, #EMIF_PM_ABORT_SR_OFFSET] 231 blx r1 232 233 /* Let the suspend code know about the abort */ 234 mov r0, #1 235 ldmfd sp!, {r4 - r11, pc} @ restore regs and return 236ENDPROC(am43xx_do_wfi) 237 238 .align 239ENTRY(am43xx_resume_offset) 240 .word . - am43xx_do_wfi 241 242ENTRY(am43xx_resume_from_deep_sleep) 243 /* Set MPU CLKSTCTRL to HW AUTO so that CPUidle works properly */ 244 ldr r1, am43xx_virt_mpu_clkstctrl 245 mov r2, #AM43XX_CM_CLKSTCTRL_CLKTRCTRL_HW_AUTO 246 str r2, [r1] 247 248 /* For AM43xx, use EMIF power down until context is restored */ 249 ldr r2, am43xx_phys_emif_poweroff 250 mov r1, #AM43XX_EMIF_POWEROFF_ENABLE 251 str r1, [r2, #0x0] 252 253 /* Re-enable EMIF */ 254 ldr r1, am43xx_phys_emif_clkctrl 255 mov r2, #AM33XX_CM_CLKCTRL_MODULEMODE_ENABLE 256 str r2, [r1] 257wait_emif_enable1: 258 ldr r3, [r1] 259 cmp r2, r3 260 bne wait_emif_enable1 261 262 adr r9, am43xx_emif_sram_table 263 264 ldr r1, [r9, #EMIF_PM_RESTORE_CONTEXT_OFFSET] 265 blx r1 266 267 ldr r1, [r9, #EMIF_PM_EXIT_SR_OFFSET] 268 blx r1 269 270 ldr r2, am43xx_phys_emif_poweroff 271 mov r1, #AM43XX_EMIF_POWEROFF_DISABLE 272 str r1, [r2, #0x0] 273 274#ifdef CONFIG_CACHE_L2X0 275 ldr r2, l2_cache_base 276 ldr r0, [r2, #L2X0_CTRL] 277 and r0, #0x0f 278 cmp r0, #1 279 beq skip_l2en @ Skip if already enabled 280 281 adr r4, am43xx_pm_ro_sram_data 282 ldr r3, [r4, #AMX3_PM_RO_SRAM_DATA_PHYS_OFFSET] 283 ldr r0, [r3, #AMX3_PM_L2_PREFETCH_CTRL_VAL_OFFSET] 284 285 ldr r12, l2_smc1 286 dsb 287 smc #0 288 dsb 289set_aux_ctrl: 290 ldr r0, [r3, #AMX3_PM_L2_AUX_CTRL_VAL_OFFSET] 291 ldr r12, l2_smc2 292 dsb 293 smc #0 294 dsb 295 296 /* L2 invalidate on resume */ 297 ldr r0, l2_val 298 ldr r2, l2_cache_base 299 str r0, [r2, #L2X0_INV_WAY] 300wait2: 301 ldr r0, [r2, #L2X0_INV_WAY] 302 ldr r1, l2_val 303 ands r0, r0, r1 304 bne wait2 305#ifdef CONFIG_PL310_ERRATA_727915 306 mov r0, #0x00 307 mov r12, #OMAP4_MON_L2X0_DBG_CTRL_INDEX 308 dsb 309 smc #0 310 dsb 311#endif 312l2x_sync2: 313 ldr r2, l2_cache_base 314 mov r0, #0x0 315 str r0, [r2, #L2X0_CACHE_SYNC] 316sync2: 317 ldr r0, [r2, #L2X0_CACHE_SYNC] 318 ands r0, r0, #0x1 319 bne sync2 320 321 mov r0, #0x1 322 ldr r12, l2_smc3 323 dsb 324 smc #0 325 dsb 326#endif 327skip_l2en: 328 /* We are back. Branch to the common CPU resume routine */ 329 mov r0, #0 330 ldr pc, resume_addr 331ENDPROC(am43xx_resume_from_deep_sleep) 332 333/* 334 * Local variables 335 */ 336 .align 337resume_addr: 338 .word cpu_resume - PAGE_OFFSET + 0x80000000 339kernel_flush: 340 .word v7_flush_dcache_all 341ddr_start: 342 .word PAGE_OFFSET 343 344am43xx_phys_emif_poweroff: 345 .word (AM43XX_CM_BASE + AM43XX_PRM_DEVICE_INST + \ 346 AM43XX_PRM_EMIF_CTRL_OFFSET) 347am43xx_virt_mpu_clkstctrl: 348 .word (AM43XX_CM_MPU_CLKSTCTRL) 349am43xx_virt_mpu_clkctrl: 350 .word (AM43XX_CM_MPU_MPU_CLKCTRL) 351am43xx_virt_emif_clkctrl: 352 .word (AM43XX_CM_PER_EMIF_CLKCTRL) 353am43xx_phys_emif_clkctrl: 354 .word (AM43XX_CM_BASE + AM43XX_CM_PER_INST + \ 355 AM43XX_CM_PER_EMIF_CLKCTRL_OFFSET) 356 357#ifdef CONFIG_CACHE_L2X0 358/* L2 cache related defines for AM437x */ 359get_l2cache_base: 360 .word omap4_get_l2cache_base 361l2_cache_base: 362 .word OMAP44XX_L2CACHE_BASE 363l2_smc1: 364 .word OMAP4_MON_L2X0_PREFETCH_INDEX 365l2_smc2: 366 .word OMAP4_MON_L2X0_AUXCTRL_INDEX 367l2_smc3: 368 .word OMAP4_MON_L2X0_CTRL_INDEX 369l2_val: 370 .word 0xffff 371#endif 372 373.align 3 374/* DDR related defines */ 375ENTRY(am43xx_emif_sram_table) 376 .space EMIF_PM_FUNCTIONS_SIZE 377 378ENTRY(am43xx_pm_sram) 379 .word am43xx_do_wfi 380 .word am43xx_do_wfi_sz 381 .word am43xx_resume_offset 382 .word am43xx_emif_sram_table 383 .word am43xx_pm_ro_sram_data 384 385.align 3 386 387ENTRY(am43xx_pm_ro_sram_data) 388 .space AMX3_PM_RO_SRAM_DATA_SIZE 389 390ENTRY(am43xx_do_wfi_sz) 391 .word . - am43xx_do_wfi 392