18bd22949SKevin Hilman/* 28bd22949SKevin Hilman * linux/arch/arm/mach-omap2/sleep.S 38bd22949SKevin Hilman * 48bd22949SKevin Hilman * (C) Copyright 2007 58bd22949SKevin Hilman * Texas Instruments 68bd22949SKevin Hilman * Karthik Dasu <karthik-dp@ti.com> 78bd22949SKevin Hilman * 88bd22949SKevin Hilman * (C) Copyright 2004 98bd22949SKevin Hilman * Texas Instruments, <www.ti.com> 108bd22949SKevin Hilman * Richard Woodruff <r-woodruff2@ti.com> 118bd22949SKevin Hilman * 128bd22949SKevin Hilman * This program is free software; you can redistribute it and/or 138bd22949SKevin Hilman * modify it under the terms of the GNU General Public License as 148bd22949SKevin Hilman * published by the Free Software Foundation; either version 2 of 158bd22949SKevin Hilman * the License, or (at your option) any later version. 168bd22949SKevin Hilman * 178bd22949SKevin Hilman * This program is distributed in the hope that it will be useful, 188bd22949SKevin Hilman * but WITHOUT ANY WARRANTY; without even the implied warranty of 198bd22949SKevin Hilman * MERCHANTABILITY or FITNESS FOR A PARTICULAR /PURPOSE. See the 208bd22949SKevin Hilman * GNU General Public License for more details. 218bd22949SKevin Hilman * 228bd22949SKevin Hilman * You should have received a copy of the GNU General Public License 238bd22949SKevin Hilman * along with this program; if not, write to the Free Software 248bd22949SKevin Hilman * Foundation, Inc., 59 Temple Place, Suite 330, Boston, 258bd22949SKevin Hilman * MA 02111-1307 USA 268bd22949SKevin Hilman */ 278bd22949SKevin Hilman#include <linux/linkage.h> 288bd22949SKevin Hilman#include <asm/assembler.h> 298bd22949SKevin Hilman#include <mach/io.h> 30ce491cf8STony Lindgren#include <plat/control.h> 318bd22949SKevin Hilman 328bd22949SKevin Hilman#include "prm.h" 338bd22949SKevin Hilman#include "sdrc.h" 348bd22949SKevin Hilman 358bd22949SKevin Hilman#define PM_PREPWSTST_CORE_V OMAP34XX_PRM_REGADDR(CORE_MOD, \ 368bd22949SKevin Hilman OMAP3430_PM_PREPWSTST) 378bd22949SKevin Hilman#define PM_PREPWSTST_MPU_V OMAP34XX_PRM_REGADDR(MPU_MOD, \ 388bd22949SKevin Hilman OMAP3430_PM_PREPWSTST) 3961255ab9SRajendra Nayak#define PM_PWSTCTRL_MPU_P OMAP3430_PRM_BASE + MPU_MOD + PM_PWSTCTRL 408bd22949SKevin Hilman#define SCRATCHPAD_MEM_OFFS 0x310 /* Move this as correct place is 418bd22949SKevin Hilman * available */ 4261255ab9SRajendra Nayak#define SCRATCHPAD_BASE_P (OMAP343X_CTRL_BASE + OMAP343X_CONTROL_MEM_WKUP\ 4361255ab9SRajendra Nayak + SCRATCHPAD_MEM_OFFS) 448bd22949SKevin Hilman#define SDRC_POWER_V OMAP34XX_SDRC_REGADDR(SDRC_POWER) 458bd22949SKevin Hilman 468bd22949SKevin Hilman .text 478bd22949SKevin Hilman/* Function call to get the restore pointer for resume from OFF */ 488bd22949SKevin HilmanENTRY(get_restore_pointer) 498bd22949SKevin Hilman stmfd sp!, {lr} @ save registers on stack 508bd22949SKevin Hilman adr r0, restore 518bd22949SKevin Hilman ldmfd sp!, {pc} @ restore regs and return 528bd22949SKevin HilmanENTRY(get_restore_pointer_sz) 538bd22949SKevin Hilman .word . - get_restore_pointer_sz 548bd22949SKevin Hilman/* 558bd22949SKevin Hilman * Forces OMAP into idle state 568bd22949SKevin Hilman * 578bd22949SKevin Hilman * omap34xx_suspend() - This bit of code just executes the WFI 588bd22949SKevin Hilman * for normal idles. 598bd22949SKevin Hilman * 608bd22949SKevin Hilman * Note: This code get's copied to internal SRAM at boot. When the OMAP 618bd22949SKevin Hilman * wakes up it continues execution at the point it went to sleep. 628bd22949SKevin Hilman */ 638bd22949SKevin HilmanENTRY(omap34xx_cpu_suspend) 648bd22949SKevin Hilman stmfd sp!, {r0-r12, lr} @ save registers on stack 658bd22949SKevin Hilmanloop: 668bd22949SKevin Hilman /*b loop*/ @Enable to debug by stepping through code 678bd22949SKevin Hilman /* r0 contains restore pointer in sdram */ 688bd22949SKevin Hilman /* r1 contains information about saving context */ 698bd22949SKevin Hilman ldr r4, sdrc_power @ read the SDRC_POWER register 708bd22949SKevin Hilman ldr r5, [r4] @ read the contents of SDRC_POWER 718bd22949SKevin Hilman orr r5, r5, #0x40 @ enable self refresh on idle req 728bd22949SKevin Hilman str r5, [r4] @ write back to SDRC_POWER register 738bd22949SKevin Hilman 748bd22949SKevin Hilman cmp r1, #0x0 758bd22949SKevin Hilman /* If context save is required, do that and execute wfi */ 768bd22949SKevin Hilman bne save_context_wfi 778bd22949SKevin Hilman /* Data memory barrier and Data sync barrier */ 788bd22949SKevin Hilman mov r1, #0 798bd22949SKevin Hilman mcr p15, 0, r1, c7, c10, 4 808bd22949SKevin Hilman mcr p15, 0, r1, c7, c10, 5 818bd22949SKevin Hilman 828bd22949SKevin Hilman wfi @ wait for interrupt 838bd22949SKevin Hilman 848bd22949SKevin Hilman nop 858bd22949SKevin Hilman nop 868bd22949SKevin Hilman nop 878bd22949SKevin Hilman nop 888bd22949SKevin Hilman nop 898bd22949SKevin Hilman nop 908bd22949SKevin Hilman nop 918bd22949SKevin Hilman nop 928bd22949SKevin Hilman nop 938bd22949SKevin Hilman nop 948bd22949SKevin Hilman bl i_dll_wait 958bd22949SKevin Hilman 968bd22949SKevin Hilman ldmfd sp!, {r0-r12, pc} @ restore regs and return 978bd22949SKevin Hilmanrestore: 988bd22949SKevin Hilman /* b restore*/ @ Enable to debug restore code 998bd22949SKevin Hilman /* Check what was the reason for mpu reset and store the reason in r9*/ 1008bd22949SKevin Hilman /* 1 - Only L1 and logic lost */ 1018bd22949SKevin Hilman /* 2 - Only L2 lost - In this case, we wont be here */ 1028bd22949SKevin Hilman /* 3 - Both L1 and L2 lost */ 1038bd22949SKevin Hilman ldr r1, pm_pwstctrl_mpu 1048bd22949SKevin Hilman ldr r2, [r1] 1058bd22949SKevin Hilman and r2, r2, #0x3 1068bd22949SKevin Hilman cmp r2, #0x0 @ Check if target power state was OFF or RET 1078bd22949SKevin Hilman moveq r9, #0x3 @ MPU OFF => L1 and L2 lost 1088bd22949SKevin Hilman movne r9, #0x1 @ Only L1 and L2 lost => avoid L2 invalidation 1098bd22949SKevin Hilman bne logic_l1_restore 1108bd22949SKevin Hilman /* Execute smi to invalidate L2 cache */ 1118bd22949SKevin Hilman mov r12, #0x1 @ set up to invalide L2 1128bd22949SKevin Hilmansmi: .word 0xE1600070 @ Call SMI monitor (smieq) 1138bd22949SKevin Hilmanlogic_l1_restore: 1148bd22949SKevin Hilman mov r1, #0 1158bd22949SKevin Hilman /* Invalidate all instruction caches to PoU 1168bd22949SKevin Hilman * and flush branch target cache */ 1178bd22949SKevin Hilman mcr p15, 0, r1, c7, c5, 0 1188bd22949SKevin Hilman 1198bd22949SKevin Hilman ldr r4, scratchpad_base 1208bd22949SKevin Hilman ldr r3, [r4,#0xBC] 1218bd22949SKevin Hilman ldmia r3!, {r4-r6} 1228bd22949SKevin Hilman mov sp, r4 1238bd22949SKevin Hilman msr spsr_cxsf, r5 1248bd22949SKevin Hilman mov lr, r6 1258bd22949SKevin Hilman 1268bd22949SKevin Hilman ldmia r3!, {r4-r9} 1278bd22949SKevin Hilman /* Coprocessor access Control Register */ 1288bd22949SKevin Hilman mcr p15, 0, r4, c1, c0, 2 1298bd22949SKevin Hilman 1308bd22949SKevin Hilman /* TTBR0 */ 1318bd22949SKevin Hilman MCR p15, 0, r5, c2, c0, 0 1328bd22949SKevin Hilman /* TTBR1 */ 1338bd22949SKevin Hilman MCR p15, 0, r6, c2, c0, 1 1348bd22949SKevin Hilman /* Translation table base control register */ 1358bd22949SKevin Hilman MCR p15, 0, r7, c2, c0, 2 1368bd22949SKevin Hilman /*domain access Control Register */ 1378bd22949SKevin Hilman MCR p15, 0, r8, c3, c0, 0 1388bd22949SKevin Hilman /* data fault status Register */ 1398bd22949SKevin Hilman MCR p15, 0, r9, c5, c0, 0 1408bd22949SKevin Hilman 1418bd22949SKevin Hilman ldmia r3!,{r4-r8} 1428bd22949SKevin Hilman /* instruction fault status Register */ 1438bd22949SKevin Hilman MCR p15, 0, r4, c5, c0, 1 1448bd22949SKevin Hilman /*Data Auxiliary Fault Status Register */ 1458bd22949SKevin Hilman MCR p15, 0, r5, c5, c1, 0 1468bd22949SKevin Hilman /*Instruction Auxiliary Fault Status Register*/ 1478bd22949SKevin Hilman MCR p15, 0, r6, c5, c1, 1 1488bd22949SKevin Hilman /*Data Fault Address Register */ 1498bd22949SKevin Hilman MCR p15, 0, r7, c6, c0, 0 1508bd22949SKevin Hilman /*Instruction Fault Address Register*/ 1518bd22949SKevin Hilman MCR p15, 0, r8, c6, c0, 2 1528bd22949SKevin Hilman ldmia r3!,{r4-r7} 1538bd22949SKevin Hilman 1548bd22949SKevin Hilman /* user r/w thread and process ID */ 1558bd22949SKevin Hilman MCR p15, 0, r4, c13, c0, 2 1568bd22949SKevin Hilman /* user ro thread and process ID */ 1578bd22949SKevin Hilman MCR p15, 0, r5, c13, c0, 3 1588bd22949SKevin Hilman /*Privileged only thread and process ID */ 1598bd22949SKevin Hilman MCR p15, 0, r6, c13, c0, 4 1608bd22949SKevin Hilman /* cache size selection */ 1618bd22949SKevin Hilman MCR p15, 2, r7, c0, c0, 0 1628bd22949SKevin Hilman ldmia r3!,{r4-r8} 1638bd22949SKevin Hilman /* Data TLB lockdown registers */ 1648bd22949SKevin Hilman MCR p15, 0, r4, c10, c0, 0 1658bd22949SKevin Hilman /* Instruction TLB lockdown registers */ 1668bd22949SKevin Hilman MCR p15, 0, r5, c10, c0, 1 1678bd22949SKevin Hilman /* Secure or Nonsecure Vector Base Address */ 1688bd22949SKevin Hilman MCR p15, 0, r6, c12, c0, 0 1698bd22949SKevin Hilman /* FCSE PID */ 1708bd22949SKevin Hilman MCR p15, 0, r7, c13, c0, 0 1718bd22949SKevin Hilman /* Context PID */ 1728bd22949SKevin Hilman MCR p15, 0, r8, c13, c0, 1 1738bd22949SKevin Hilman 1748bd22949SKevin Hilman ldmia r3!,{r4-r5} 1758bd22949SKevin Hilman /* primary memory remap register */ 1768bd22949SKevin Hilman MCR p15, 0, r4, c10, c2, 0 1778bd22949SKevin Hilman /*normal memory remap register */ 1788bd22949SKevin Hilman MCR p15, 0, r5, c10, c2, 1 1798bd22949SKevin Hilman 1808bd22949SKevin Hilman /* Restore cpsr */ 1818bd22949SKevin Hilman ldmia r3!,{r4} /*load CPSR from SDRAM*/ 1828bd22949SKevin Hilman msr cpsr, r4 /*store cpsr */ 1838bd22949SKevin Hilman 1848bd22949SKevin Hilman /* Enabling MMU here */ 1858bd22949SKevin Hilman mrc p15, 0, r7, c2, c0, 2 /* Read TTBRControl */ 1868bd22949SKevin Hilman /* Extract N (0:2) bits and decide whether to use TTBR0 or TTBR1*/ 1878bd22949SKevin Hilman and r7, #0x7 1888bd22949SKevin Hilman cmp r7, #0x0 1898bd22949SKevin Hilman beq usettbr0 1908bd22949SKevin Hilmanttbr_error: 1918bd22949SKevin Hilman /* More work needs to be done to support N[0:2] value other than 0 1928bd22949SKevin Hilman * So looping here so that the error can be detected 1938bd22949SKevin Hilman */ 1948bd22949SKevin Hilman b ttbr_error 1958bd22949SKevin Hilmanusettbr0: 1968bd22949SKevin Hilman mrc p15, 0, r2, c2, c0, 0 1978bd22949SKevin Hilman ldr r5, ttbrbit_mask 1988bd22949SKevin Hilman and r2, r5 1998bd22949SKevin Hilman mov r4, pc 2008bd22949SKevin Hilman ldr r5, table_index_mask 2018bd22949SKevin Hilman and r4, r5 /* r4 = 31 to 20 bits of pc */ 2028bd22949SKevin Hilman /* Extract the value to be written to table entry */ 2038bd22949SKevin Hilman ldr r1, table_entry 2048bd22949SKevin Hilman add r1, r1, r4 /* r1 has value to be written to table entry*/ 2058bd22949SKevin Hilman /* Getting the address of table entry to modify */ 2068bd22949SKevin Hilman lsr r4, #18 2078bd22949SKevin Hilman add r2, r4 /* r2 has the location which needs to be modified */ 2088bd22949SKevin Hilman /* Storing previous entry of location being modified */ 2098bd22949SKevin Hilman ldr r5, scratchpad_base 2108bd22949SKevin Hilman ldr r4, [r2] 2118bd22949SKevin Hilman str r4, [r5, #0xC0] 2128bd22949SKevin Hilman /* Modify the table entry */ 2138bd22949SKevin Hilman str r1, [r2] 2148bd22949SKevin Hilman /* Storing address of entry being modified 2158bd22949SKevin Hilman * - will be restored after enabling MMU */ 2168bd22949SKevin Hilman ldr r5, scratchpad_base 2178bd22949SKevin Hilman str r2, [r5, #0xC4] 2188bd22949SKevin Hilman 2198bd22949SKevin Hilman mov r0, #0 2208bd22949SKevin Hilman mcr p15, 0, r0, c7, c5, 4 @ Flush prefetch buffer 2218bd22949SKevin Hilman mcr p15, 0, r0, c7, c5, 6 @ Invalidate branch predictor array 2228bd22949SKevin Hilman mcr p15, 0, r0, c8, c5, 0 @ Invalidate instruction TLB 2238bd22949SKevin Hilman mcr p15, 0, r0, c8, c6, 0 @ Invalidate data TLB 2248bd22949SKevin Hilman /* Restore control register but dont enable caches here*/ 2258bd22949SKevin Hilman /* Caches will be enabled after restoring MMU table entry */ 2268bd22949SKevin Hilman ldmia r3!, {r4} 2278bd22949SKevin Hilman /* Store previous value of control register in scratchpad */ 2288bd22949SKevin Hilman str r4, [r5, #0xC8] 2298bd22949SKevin Hilman ldr r2, cache_pred_disable_mask 2308bd22949SKevin Hilman and r4, r2 2318bd22949SKevin Hilman mcr p15, 0, r4, c1, c0, 0 2328bd22949SKevin Hilman 2338bd22949SKevin Hilman ldmfd sp!, {r0-r12, pc} @ restore regs and return 2348bd22949SKevin Hilmansave_context_wfi: 2358bd22949SKevin Hilman /*b save_context_wfi*/ @ enable to debug save code 2368bd22949SKevin Hilman mov r8, r0 /* Store SDRAM address in r8 */ 2378bd22949SKevin Hilman /* Check what that target sleep state is:stored in r1*/ 2388bd22949SKevin Hilman /* 1 - Only L1 and logic lost */ 2398bd22949SKevin Hilman /* 2 - Only L2 lost */ 2408bd22949SKevin Hilman /* 3 - Both L1 and L2 lost */ 2418bd22949SKevin Hilman cmp r1, #0x2 /* Only L2 lost */ 2428bd22949SKevin Hilman beq clean_l2 2438bd22949SKevin Hilman cmp r1, #0x1 /* L2 retained */ 2448bd22949SKevin Hilman /* r9 stores whether to clean L2 or not*/ 2458bd22949SKevin Hilman moveq r9, #0x0 /* Dont Clean L2 */ 2468bd22949SKevin Hilman movne r9, #0x1 /* Clean L2 */ 2478bd22949SKevin Hilmanl1_logic_lost: 2488bd22949SKevin Hilman /* Store sp and spsr to SDRAM */ 2498bd22949SKevin Hilman mov r4, sp 2508bd22949SKevin Hilman mrs r5, spsr 2518bd22949SKevin Hilman mov r6, lr 2528bd22949SKevin Hilman stmia r8!, {r4-r6} 2538bd22949SKevin Hilman /* Save all ARM registers */ 2548bd22949SKevin Hilman /* Coprocessor access control register */ 2558bd22949SKevin Hilman mrc p15, 0, r6, c1, c0, 2 2568bd22949SKevin Hilman stmia r8!, {r6} 2578bd22949SKevin Hilman /* TTBR0, TTBR1 and Translation table base control */ 2588bd22949SKevin Hilman mrc p15, 0, r4, c2, c0, 0 2598bd22949SKevin Hilman mrc p15, 0, r5, c2, c0, 1 2608bd22949SKevin Hilman mrc p15, 0, r6, c2, c0, 2 2618bd22949SKevin Hilman stmia r8!, {r4-r6} 2628bd22949SKevin Hilman /* Domain access control register, data fault status register, 2638bd22949SKevin Hilman and instruction fault status register */ 2648bd22949SKevin Hilman mrc p15, 0, r4, c3, c0, 0 2658bd22949SKevin Hilman mrc p15, 0, r5, c5, c0, 0 2668bd22949SKevin Hilman mrc p15, 0, r6, c5, c0, 1 2678bd22949SKevin Hilman stmia r8!, {r4-r6} 2688bd22949SKevin Hilman /* Data aux fault status register, instruction aux fault status, 2698bd22949SKevin Hilman datat fault address register and instruction fault address register*/ 2708bd22949SKevin Hilman mrc p15, 0, r4, c5, c1, 0 2718bd22949SKevin Hilman mrc p15, 0, r5, c5, c1, 1 2728bd22949SKevin Hilman mrc p15, 0, r6, c6, c0, 0 2738bd22949SKevin Hilman mrc p15, 0, r7, c6, c0, 2 2748bd22949SKevin Hilman stmia r8!, {r4-r7} 2758bd22949SKevin Hilman /* user r/w thread and process ID, user r/o thread and process ID, 2768bd22949SKevin Hilman priv only thread and process ID, cache size selection */ 2778bd22949SKevin Hilman mrc p15, 0, r4, c13, c0, 2 2788bd22949SKevin Hilman mrc p15, 0, r5, c13, c0, 3 2798bd22949SKevin Hilman mrc p15, 0, r6, c13, c0, 4 2808bd22949SKevin Hilman mrc p15, 2, r7, c0, c0, 0 2818bd22949SKevin Hilman stmia r8!, {r4-r7} 2828bd22949SKevin Hilman /* Data TLB lockdown, instruction TLB lockdown registers */ 2838bd22949SKevin Hilman mrc p15, 0, r5, c10, c0, 0 2848bd22949SKevin Hilman mrc p15, 0, r6, c10, c0, 1 2858bd22949SKevin Hilman stmia r8!, {r5-r6} 2868bd22949SKevin Hilman /* Secure or non secure vector base address, FCSE PID, Context PID*/ 2878bd22949SKevin Hilman mrc p15, 0, r4, c12, c0, 0 2888bd22949SKevin Hilman mrc p15, 0, r5, c13, c0, 0 2898bd22949SKevin Hilman mrc p15, 0, r6, c13, c0, 1 2908bd22949SKevin Hilman stmia r8!, {r4-r6} 2918bd22949SKevin Hilman /* Primary remap, normal remap registers */ 2928bd22949SKevin Hilman mrc p15, 0, r4, c10, c2, 0 2938bd22949SKevin Hilman mrc p15, 0, r5, c10, c2, 1 2948bd22949SKevin Hilman stmia r8!,{r4-r5} 2958bd22949SKevin Hilman 2968bd22949SKevin Hilman /* Store current cpsr*/ 2978bd22949SKevin Hilman mrs r2, cpsr 2988bd22949SKevin Hilman stmia r8!, {r2} 2998bd22949SKevin Hilman 3008bd22949SKevin Hilman mrc p15, 0, r4, c1, c0, 0 3018bd22949SKevin Hilman /* save control register */ 3028bd22949SKevin Hilman stmia r8!, {r4} 3038bd22949SKevin Hilmanclean_caches: 3048bd22949SKevin Hilman /* Clean Data or unified cache to POU*/ 3058bd22949SKevin Hilman /* How to invalidate only L1 cache???? - #FIX_ME# */ 3068bd22949SKevin Hilman /* mcr p15, 0, r11, c7, c11, 1 */ 3078bd22949SKevin Hilman cmp r9, #1 /* Check whether L2 inval is required or not*/ 3088bd22949SKevin Hilman bne skip_l2_inval 3098bd22949SKevin Hilmanclean_l2: 3108bd22949SKevin Hilman /* read clidr */ 3118bd22949SKevin Hilman mrc p15, 1, r0, c0, c0, 1 3128bd22949SKevin Hilman /* extract loc from clidr */ 3138bd22949SKevin Hilman ands r3, r0, #0x7000000 3148bd22949SKevin Hilman /* left align loc bit field */ 3158bd22949SKevin Hilman mov r3, r3, lsr #23 3168bd22949SKevin Hilman /* if loc is 0, then no need to clean */ 3178bd22949SKevin Hilman beq finished 3188bd22949SKevin Hilman /* start clean at cache level 0 */ 3198bd22949SKevin Hilman mov r10, #0 3208bd22949SKevin Hilmanloop1: 3218bd22949SKevin Hilman /* work out 3x current cache level */ 3228bd22949SKevin Hilman add r2, r10, r10, lsr #1 3238bd22949SKevin Hilman /* extract cache type bits from clidr*/ 3248bd22949SKevin Hilman mov r1, r0, lsr r2 3258bd22949SKevin Hilman /* mask of the bits for current cache only */ 3268bd22949SKevin Hilman and r1, r1, #7 3278bd22949SKevin Hilman /* see what cache we have at this level */ 3288bd22949SKevin Hilman cmp r1, #2 3298bd22949SKevin Hilman /* skip if no cache, or just i-cache */ 3308bd22949SKevin Hilman blt skip 3318bd22949SKevin Hilman /* select current cache level in cssr */ 3328bd22949SKevin Hilman mcr p15, 2, r10, c0, c0, 0 3338bd22949SKevin Hilman /* isb to sych the new cssr&csidr */ 3348bd22949SKevin Hilman isb 3358bd22949SKevin Hilman /* read the new csidr */ 3368bd22949SKevin Hilman mrc p15, 1, r1, c0, c0, 0 3378bd22949SKevin Hilman /* extract the length of the cache lines */ 3388bd22949SKevin Hilman and r2, r1, #7 3398bd22949SKevin Hilman /* add 4 (line length offset) */ 3408bd22949SKevin Hilman add r2, r2, #4 3418bd22949SKevin Hilman ldr r4, assoc_mask 3428bd22949SKevin Hilman /* find maximum number on the way size */ 3438bd22949SKevin Hilman ands r4, r4, r1, lsr #3 3448bd22949SKevin Hilman /* find bit position of way size increment */ 3458bd22949SKevin Hilman clz r5, r4 3468bd22949SKevin Hilman ldr r7, numset_mask 3478bd22949SKevin Hilman /* extract max number of the index size*/ 3488bd22949SKevin Hilman ands r7, r7, r1, lsr #13 3498bd22949SKevin Hilmanloop2: 3508bd22949SKevin Hilman mov r9, r4 3518bd22949SKevin Hilman /* create working copy of max way size*/ 3528bd22949SKevin Hilmanloop3: 3538bd22949SKevin Hilman /* factor way and cache number into r11 */ 3548bd22949SKevin Hilman orr r11, r10, r9, lsl r5 3558bd22949SKevin Hilman /* factor index number into r11 */ 3568bd22949SKevin Hilman orr r11, r11, r7, lsl r2 3578bd22949SKevin Hilman /*clean & invalidate by set/way */ 3588bd22949SKevin Hilman mcr p15, 0, r11, c7, c10, 2 3598bd22949SKevin Hilman /* decrement the way*/ 3608bd22949SKevin Hilman subs r9, r9, #1 3618bd22949SKevin Hilman bge loop3 3628bd22949SKevin Hilman /*decrement the index */ 3638bd22949SKevin Hilman subs r7, r7, #1 3648bd22949SKevin Hilman bge loop2 3658bd22949SKevin Hilmanskip: 3668bd22949SKevin Hilman add r10, r10, #2 3678bd22949SKevin Hilman /* increment cache number */ 3688bd22949SKevin Hilman cmp r3, r10 3698bd22949SKevin Hilman bgt loop1 3708bd22949SKevin Hilmanfinished: 3718bd22949SKevin Hilman /*swith back to cache level 0 */ 3728bd22949SKevin Hilman mov r10, #0 3738bd22949SKevin Hilman /* select current cache level in cssr */ 3748bd22949SKevin Hilman mcr p15, 2, r10, c0, c0, 0 3758bd22949SKevin Hilman isb 3768bd22949SKevin Hilmanskip_l2_inval: 3778bd22949SKevin Hilman /* Data memory barrier and Data sync barrier */ 3788bd22949SKevin Hilman mov r1, #0 3798bd22949SKevin Hilman mcr p15, 0, r1, c7, c10, 4 3808bd22949SKevin Hilman mcr p15, 0, r1, c7, c10, 5 3818bd22949SKevin Hilman 3828bd22949SKevin Hilman wfi @ wait for interrupt 3838bd22949SKevin Hilman nop 3848bd22949SKevin Hilman nop 3858bd22949SKevin Hilman nop 3868bd22949SKevin Hilman nop 3878bd22949SKevin Hilman nop 3888bd22949SKevin Hilman nop 3898bd22949SKevin Hilman nop 3908bd22949SKevin Hilman nop 3918bd22949SKevin Hilman nop 3928bd22949SKevin Hilman nop 3938bd22949SKevin Hilman bl i_dll_wait 3948bd22949SKevin Hilman /* restore regs and return */ 3958bd22949SKevin Hilman ldmfd sp!, {r0-r12, pc} 3968bd22949SKevin Hilman 3978bd22949SKevin Hilmani_dll_wait: 3988bd22949SKevin Hilman ldr r4, clk_stabilize_delay 3998bd22949SKevin Hilman 4008bd22949SKevin Hilmani_dll_delay: 4018bd22949SKevin Hilman subs r4, r4, #0x1 4028bd22949SKevin Hilman bne i_dll_delay 4038bd22949SKevin Hilman ldr r4, sdrc_power 4048bd22949SKevin Hilman ldr r5, [r4] 4058bd22949SKevin Hilman bic r5, r5, #0x40 4068bd22949SKevin Hilman str r5, [r4] 4078bd22949SKevin Hilman bx lr 4088bd22949SKevin Hilmanpm_prepwstst_core: 4098bd22949SKevin Hilman .word PM_PREPWSTST_CORE_V 4108bd22949SKevin Hilmanpm_prepwstst_mpu: 4118bd22949SKevin Hilman .word PM_PREPWSTST_MPU_V 4128bd22949SKevin Hilmanpm_pwstctrl_mpu: 4138bd22949SKevin Hilman .word PM_PWSTCTRL_MPU_P 4148bd22949SKevin Hilmanscratchpad_base: 4158bd22949SKevin Hilman .word SCRATCHPAD_BASE_P 4168bd22949SKevin Hilmansdrc_power: 4178bd22949SKevin Hilman .word SDRC_POWER_V 4188bd22949SKevin Hilmanclk_stabilize_delay: 4198bd22949SKevin Hilman .word 0x000001FF 4208bd22949SKevin Hilmanassoc_mask: 4218bd22949SKevin Hilman .word 0x3ff 4228bd22949SKevin Hilmannumset_mask: 4238bd22949SKevin Hilman .word 0x7fff 4248bd22949SKevin Hilmanttbrbit_mask: 4258bd22949SKevin Hilman .word 0xFFFFC000 4268bd22949SKevin Hilmantable_index_mask: 4278bd22949SKevin Hilman .word 0xFFF00000 4288bd22949SKevin Hilmantable_entry: 4298bd22949SKevin Hilman .word 0x00000C02 4308bd22949SKevin Hilmancache_pred_disable_mask: 4318bd22949SKevin Hilman .word 0xFFFFE7FB 4328bd22949SKevin HilmanENTRY(omap34xx_cpu_suspend_sz) 4338bd22949SKevin Hilman .word . - omap34xx_cpu_suspend 434