xref: /openbmc/linux/arch/arm/mach-omap2/sleep34xx.S (revision 27d59a4a)
18bd22949SKevin Hilman/*
28bd22949SKevin Hilman * linux/arch/arm/mach-omap2/sleep.S
38bd22949SKevin Hilman *
48bd22949SKevin Hilman * (C) Copyright 2007
58bd22949SKevin Hilman * Texas Instruments
68bd22949SKevin Hilman * Karthik Dasu <karthik-dp@ti.com>
78bd22949SKevin Hilman *
88bd22949SKevin Hilman * (C) Copyright 2004
98bd22949SKevin Hilman * Texas Instruments, <www.ti.com>
108bd22949SKevin Hilman * Richard Woodruff <r-woodruff2@ti.com>
118bd22949SKevin Hilman *
128bd22949SKevin Hilman * This program is free software; you can redistribute it and/or
138bd22949SKevin Hilman * modify it under the terms of the GNU General Public License as
148bd22949SKevin Hilman * published by the Free Software Foundation; either version 2 of
158bd22949SKevin Hilman * the License, or (at your option) any later version.
168bd22949SKevin Hilman *
178bd22949SKevin Hilman * This program is distributed in the hope that it will be useful,
188bd22949SKevin Hilman * but WITHOUT ANY WARRANTY; without even the implied warranty of
198bd22949SKevin Hilman * MERCHANTABILITY or FITNESS FOR A PARTICULAR /PURPOSE.  See the
208bd22949SKevin Hilman * GNU General Public License for more details.
218bd22949SKevin Hilman *
228bd22949SKevin Hilman * You should have received a copy of the GNU General Public License
238bd22949SKevin Hilman * along with this program; if not, write to the Free Software
248bd22949SKevin Hilman * Foundation, Inc., 59 Temple Place, Suite 330, Boston,
258bd22949SKevin Hilman * MA 02111-1307 USA
268bd22949SKevin Hilman */
278bd22949SKevin Hilman#include <linux/linkage.h>
288bd22949SKevin Hilman#include <asm/assembler.h>
298bd22949SKevin Hilman#include <mach/io.h>
30ce491cf8STony Lindgren#include <plat/control.h>
318bd22949SKevin Hilman
328bd22949SKevin Hilman#include "prm.h"
338bd22949SKevin Hilman#include "sdrc.h"
348bd22949SKevin Hilman
358bd22949SKevin Hilman#define PM_PREPWSTST_CORE_V	OMAP34XX_PRM_REGADDR(CORE_MOD, \
368bd22949SKevin Hilman				OMAP3430_PM_PREPWSTST)
378bd22949SKevin Hilman#define PM_PREPWSTST_MPU_V	OMAP34XX_PRM_REGADDR(MPU_MOD, \
388bd22949SKevin Hilman				OMAP3430_PM_PREPWSTST)
3961255ab9SRajendra Nayak#define PM_PWSTCTRL_MPU_P	OMAP3430_PRM_BASE + MPU_MOD + PM_PWSTCTRL
4027d59a4aSTero Kristo#define SRAM_BASE_P		0x40200000
4127d59a4aSTero Kristo#define CONTROL_STAT		0x480022F0
428bd22949SKevin Hilman#define SCRATCHPAD_MEM_OFFS	0x310 /* Move this as correct place is
438bd22949SKevin Hilman				       * available */
4461255ab9SRajendra Nayak#define SCRATCHPAD_BASE_P	(OMAP343X_CTRL_BASE + OMAP343X_CONTROL_MEM_WKUP\
4561255ab9SRajendra Nayak						+ SCRATCHPAD_MEM_OFFS)
468bd22949SKevin Hilman#define SDRC_POWER_V		OMAP34XX_SDRC_REGADDR(SDRC_POWER)
478bd22949SKevin Hilman
488bd22949SKevin Hilman	.text
498bd22949SKevin Hilman/* Function call to get the restore pointer for resume from OFF */
508bd22949SKevin HilmanENTRY(get_restore_pointer)
518bd22949SKevin Hilman        stmfd   sp!, {lr}     @ save registers on stack
528bd22949SKevin Hilman	adr	r0, restore
538bd22949SKevin Hilman        ldmfd   sp!, {pc}     @ restore regs and return
548bd22949SKevin HilmanENTRY(get_restore_pointer_sz)
558bd22949SKevin Hilman        .word   . - get_restore_pointer_sz
5627d59a4aSTero Kristo
5727d59a4aSTero Kristo/* Function to call rom code to save secure ram context */
5827d59a4aSTero KristoENTRY(save_secure_ram_context)
5927d59a4aSTero Kristo	stmfd	sp!, {r1-r12, lr}	@ save registers on stack
6027d59a4aSTero Kristosave_secure_ram_debug:
6127d59a4aSTero Kristo	/* b save_secure_ram_debug */	@ enable to debug save code
6227d59a4aSTero Kristo	adr	r3, api_params		@ r3 points to parameters
6327d59a4aSTero Kristo	str	r0, [r3,#0x4]		@ r0 has sdram address
6427d59a4aSTero Kristo	ldr	r12, high_mask
6527d59a4aSTero Kristo	and	r3, r3, r12
6627d59a4aSTero Kristo	ldr	r12, sram_phy_addr_mask
6727d59a4aSTero Kristo	orr	r3, r3, r12
6827d59a4aSTero Kristo	mov	r0, #25			@ set service ID for PPA
6927d59a4aSTero Kristo	mov	r12, r0			@ copy secure service ID in r12
7027d59a4aSTero Kristo	mov	r1, #0			@ set task id for ROM code in r1
7127d59a4aSTero Kristo	mov	r2, #7			@ set some flags in r2, r6
7227d59a4aSTero Kristo	mov	r6, #0xff
7327d59a4aSTero Kristo	mcr	p15, 0, r0, c7, c10, 4	@ data write barrier
7427d59a4aSTero Kristo	mcr	p15, 0, r0, c7, c10, 5	@ data memory barrier
7527d59a4aSTero Kristo	.word	0xE1600071		@ call SMI monitor (smi #1)
7627d59a4aSTero Kristo	nop
7727d59a4aSTero Kristo	nop
7827d59a4aSTero Kristo	nop
7927d59a4aSTero Kristo	nop
8027d59a4aSTero Kristo	ldmfd	sp!, {r1-r12, pc}
8127d59a4aSTero Kristosram_phy_addr_mask:
8227d59a4aSTero Kristo	.word	SRAM_BASE_P
8327d59a4aSTero Kristohigh_mask:
8427d59a4aSTero Kristo	.word	0xffff
8527d59a4aSTero Kristoapi_params:
8627d59a4aSTero Kristo	.word	0x4, 0x0, 0x0, 0x1, 0x1
8727d59a4aSTero KristoENTRY(save_secure_ram_context_sz)
8827d59a4aSTero Kristo	.word	. - save_secure_ram_context
8927d59a4aSTero Kristo
908bd22949SKevin Hilman/*
918bd22949SKevin Hilman * Forces OMAP into idle state
928bd22949SKevin Hilman *
938bd22949SKevin Hilman * omap34xx_suspend() - This bit of code just executes the WFI
948bd22949SKevin Hilman * for normal idles.
958bd22949SKevin Hilman *
968bd22949SKevin Hilman * Note: This code get's copied to internal SRAM at boot. When the OMAP
978bd22949SKevin Hilman *	 wakes up it continues execution at the point it went to sleep.
988bd22949SKevin Hilman */
998bd22949SKevin HilmanENTRY(omap34xx_cpu_suspend)
1008bd22949SKevin Hilman	stmfd	sp!, {r0-r12, lr}		@ save registers on stack
1018bd22949SKevin Hilmanloop:
1028bd22949SKevin Hilman	/*b	loop*/	@Enable to debug by stepping through code
1038bd22949SKevin Hilman	/* r0 contains restore pointer in sdram */
1048bd22949SKevin Hilman	/* r1 contains information about saving context */
1058bd22949SKevin Hilman	ldr     r4, sdrc_power          @ read the SDRC_POWER register
1068bd22949SKevin Hilman	ldr     r5, [r4]                @ read the contents of SDRC_POWER
1078bd22949SKevin Hilman	orr     r5, r5, #0x40           @ enable self refresh on idle req
1088bd22949SKevin Hilman	str     r5, [r4]                @ write back to SDRC_POWER register
1098bd22949SKevin Hilman
1108bd22949SKevin Hilman	cmp	r1, #0x0
1118bd22949SKevin Hilman	/* If context save is required, do that and execute wfi */
1128bd22949SKevin Hilman	bne	save_context_wfi
1138bd22949SKevin Hilman	/* Data memory barrier and Data sync barrier */
1148bd22949SKevin Hilman	mov	r1, #0
1158bd22949SKevin Hilman	mcr	p15, 0, r1, c7, c10, 4
1168bd22949SKevin Hilman	mcr	p15, 0, r1, c7, c10, 5
1178bd22949SKevin Hilman
1188bd22949SKevin Hilman	wfi				@ wait for interrupt
1198bd22949SKevin Hilman
1208bd22949SKevin Hilman	nop
1218bd22949SKevin Hilman	nop
1228bd22949SKevin Hilman	nop
1238bd22949SKevin Hilman	nop
1248bd22949SKevin Hilman	nop
1258bd22949SKevin Hilman	nop
1268bd22949SKevin Hilman	nop
1278bd22949SKevin Hilman	nop
1288bd22949SKevin Hilman	nop
1298bd22949SKevin Hilman	nop
1308bd22949SKevin Hilman	bl i_dll_wait
1318bd22949SKevin Hilman
1328bd22949SKevin Hilman	ldmfd	sp!, {r0-r12, pc}		@ restore regs and return
1338bd22949SKevin Hilmanrestore:
1348bd22949SKevin Hilman	/* b restore*/  @ Enable to debug restore code
1358bd22949SKevin Hilman        /* Check what was the reason for mpu reset and store the reason in r9*/
1368bd22949SKevin Hilman        /* 1 - Only L1 and logic lost */
1378bd22949SKevin Hilman        /* 2 - Only L2 lost - In this case, we wont be here */
1388bd22949SKevin Hilman        /* 3 - Both L1 and L2 lost */
1398bd22949SKevin Hilman	ldr     r1, pm_pwstctrl_mpu
1408bd22949SKevin Hilman	ldr	r2, [r1]
1418bd22949SKevin Hilman	and     r2, r2, #0x3
1428bd22949SKevin Hilman	cmp     r2, #0x0	@ Check if target power state was OFF or RET
1438bd22949SKevin Hilman        moveq   r9, #0x3        @ MPU OFF => L1 and L2 lost
1448bd22949SKevin Hilman	movne	r9, #0x1	@ Only L1 and L2 lost => avoid L2 invalidation
1458bd22949SKevin Hilman	bne	logic_l1_restore
14627d59a4aSTero Kristo	ldr	r0, control_stat
14727d59a4aSTero Kristo	ldr	r1, [r0]
14827d59a4aSTero Kristo	and	r1, #0x700
14927d59a4aSTero Kristo	cmp	r1, #0x300
15027d59a4aSTero Kristo	beq	l2_inv_gp
15127d59a4aSTero Kristo	mov	r0, #40		@ set service ID for PPA
15227d59a4aSTero Kristo	mov	r12, r0		@ copy secure Service ID in r12
15327d59a4aSTero Kristo	mov	r1, #0		@ set task id for ROM code in r1
15427d59a4aSTero Kristo	mov	r2, #4		@ set some flags in r2, r6
15527d59a4aSTero Kristo	mov	r6, #0xff
15627d59a4aSTero Kristo	adr	r3, l2_inv_api_params	@ r3 points to dummy parameters
15727d59a4aSTero Kristo	mcr	p15, 0, r0, c7, c10, 4	@ data write barrier
15827d59a4aSTero Kristo	mcr	p15, 0, r0, c7, c10, 5	@ data memory barrier
15927d59a4aSTero Kristo	.word	0xE1600071		@ call SMI monitor (smi #1)
16027d59a4aSTero Kristo	/* Write to Aux control register to set some bits */
16127d59a4aSTero Kristo	mov	r0, #42		@ set service ID for PPA
16227d59a4aSTero Kristo	mov	r12, r0		@ copy secure Service ID in r12
16327d59a4aSTero Kristo	mov	r1, #0		@ set task id for ROM code in r1
16427d59a4aSTero Kristo	mov	r2, #4		@ set some flags in r2, r6
16527d59a4aSTero Kristo	mov	r6, #0xff
16627d59a4aSTero Kristo	adr	r3, write_aux_control_params	@ r3 points to parameters
16727d59a4aSTero Kristo	mcr	p15, 0, r0, c7, c10, 4	@ data write barrier
16827d59a4aSTero Kristo	mcr	p15, 0, r0, c7, c10, 5	@ data memory barrier
16927d59a4aSTero Kristo	.word	0xE1600071		@ call SMI monitor (smi #1)
17027d59a4aSTero Kristo
17127d59a4aSTero Kristo	b	logic_l1_restore
17227d59a4aSTero Kristol2_inv_api_params:
17327d59a4aSTero Kristo	.word   0x1, 0x00
17427d59a4aSTero Kristowrite_aux_control_params:
17527d59a4aSTero Kristo	.word   0x1, 0x72
17627d59a4aSTero Kristol2_inv_gp:
1778bd22949SKevin Hilman	/* Execute smi to invalidate L2 cache */
1788bd22949SKevin Hilman	mov r12, #0x1                         @ set up to invalide L2
1798bd22949SKevin Hilmansmi:    .word 0xE1600070		@ Call SMI monitor (smieq)
18027d59a4aSTero Kristo	/* Write to Aux control register to set some bits */
18127d59a4aSTero Kristo	mov	r0, #0x72
18227d59a4aSTero Kristo	mov	r12, #0x3
18327d59a4aSTero Kristo	.word 0xE1600070	@ Call SMI monitor (smieq)
1848bd22949SKevin Hilmanlogic_l1_restore:
1858bd22949SKevin Hilman	mov	r1, #0
1868bd22949SKevin Hilman	/* Invalidate all instruction caches to PoU
1878bd22949SKevin Hilman	 * and flush branch target cache */
1888bd22949SKevin Hilman	mcr	p15, 0, r1, c7, c5, 0
1898bd22949SKevin Hilman
1908bd22949SKevin Hilman	ldr	r4, scratchpad_base
1918bd22949SKevin Hilman	ldr	r3, [r4,#0xBC]
1928bd22949SKevin Hilman	ldmia	r3!, {r4-r6}
1938bd22949SKevin Hilman	mov	sp, r4
1948bd22949SKevin Hilman	msr	spsr_cxsf, r5
1958bd22949SKevin Hilman	mov	lr, r6
1968bd22949SKevin Hilman
1978bd22949SKevin Hilman	ldmia	r3!, {r4-r9}
1988bd22949SKevin Hilman	/* Coprocessor access Control Register */
1998bd22949SKevin Hilman	mcr p15, 0, r4, c1, c0, 2
2008bd22949SKevin Hilman
2018bd22949SKevin Hilman	/* TTBR0 */
2028bd22949SKevin Hilman	MCR p15, 0, r5, c2, c0, 0
2038bd22949SKevin Hilman	/* TTBR1 */
2048bd22949SKevin Hilman	MCR p15, 0, r6, c2, c0, 1
2058bd22949SKevin Hilman	/* Translation table base control register */
2068bd22949SKevin Hilman	MCR p15, 0, r7, c2, c0, 2
2078bd22949SKevin Hilman	/*domain access Control Register */
2088bd22949SKevin Hilman	MCR p15, 0, r8, c3, c0, 0
2098bd22949SKevin Hilman	/* data fault status Register */
2108bd22949SKevin Hilman	MCR p15, 0, r9, c5, c0, 0
2118bd22949SKevin Hilman
2128bd22949SKevin Hilman	ldmia  r3!,{r4-r8}
2138bd22949SKevin Hilman	/* instruction fault status Register */
2148bd22949SKevin Hilman	MCR p15, 0, r4, c5, c0, 1
2158bd22949SKevin Hilman	/*Data Auxiliary Fault Status Register */
2168bd22949SKevin Hilman	MCR p15, 0, r5, c5, c1, 0
2178bd22949SKevin Hilman	/*Instruction Auxiliary Fault Status Register*/
2188bd22949SKevin Hilman	MCR p15, 0, r6, c5, c1, 1
2198bd22949SKevin Hilman	/*Data Fault Address Register */
2208bd22949SKevin Hilman	MCR p15, 0, r7, c6, c0, 0
2218bd22949SKevin Hilman	/*Instruction Fault Address Register*/
2228bd22949SKevin Hilman	MCR p15, 0, r8, c6, c0, 2
2238bd22949SKevin Hilman	ldmia  r3!,{r4-r7}
2248bd22949SKevin Hilman
2258bd22949SKevin Hilman	/* user r/w thread and process ID */
2268bd22949SKevin Hilman	MCR p15, 0, r4, c13, c0, 2
2278bd22949SKevin Hilman	/* user ro thread and process ID */
2288bd22949SKevin Hilman	MCR p15, 0, r5, c13, c0, 3
2298bd22949SKevin Hilman	/*Privileged only thread and process ID */
2308bd22949SKevin Hilman	MCR p15, 0, r6, c13, c0, 4
2318bd22949SKevin Hilman	/* cache size selection */
2328bd22949SKevin Hilman	MCR p15, 2, r7, c0, c0, 0
2338bd22949SKevin Hilman	ldmia  r3!,{r4-r8}
2348bd22949SKevin Hilman	/* Data TLB lockdown registers */
2358bd22949SKevin Hilman	MCR p15, 0, r4, c10, c0, 0
2368bd22949SKevin Hilman	/* Instruction TLB lockdown registers */
2378bd22949SKevin Hilman	MCR p15, 0, r5, c10, c0, 1
2388bd22949SKevin Hilman	/* Secure or Nonsecure Vector Base Address */
2398bd22949SKevin Hilman	MCR p15, 0, r6, c12, c0, 0
2408bd22949SKevin Hilman	/* FCSE PID */
2418bd22949SKevin Hilman	MCR p15, 0, r7, c13, c0, 0
2428bd22949SKevin Hilman	/* Context PID */
2438bd22949SKevin Hilman	MCR p15, 0, r8, c13, c0, 1
2448bd22949SKevin Hilman
2458bd22949SKevin Hilman	ldmia  r3!,{r4-r5}
2468bd22949SKevin Hilman	/* primary memory remap register */
2478bd22949SKevin Hilman	MCR p15, 0, r4, c10, c2, 0
2488bd22949SKevin Hilman	/*normal memory remap register */
2498bd22949SKevin Hilman	MCR p15, 0, r5, c10, c2, 1
2508bd22949SKevin Hilman
2518bd22949SKevin Hilman	/* Restore cpsr */
2528bd22949SKevin Hilman	ldmia	r3!,{r4}	/*load CPSR from SDRAM*/
2538bd22949SKevin Hilman	msr	cpsr, r4	/*store cpsr */
2548bd22949SKevin Hilman
2558bd22949SKevin Hilman	/* Enabling MMU here */
2568bd22949SKevin Hilman	mrc	p15, 0, r7, c2, c0, 2 /* Read TTBRControl */
2578bd22949SKevin Hilman	/* Extract N (0:2) bits and decide whether to use TTBR0 or TTBR1*/
2588bd22949SKevin Hilman	and	r7, #0x7
2598bd22949SKevin Hilman	cmp	r7, #0x0
2608bd22949SKevin Hilman	beq	usettbr0
2618bd22949SKevin Hilmanttbr_error:
2628bd22949SKevin Hilman	/* More work needs to be done to support N[0:2] value other than 0
2638bd22949SKevin Hilman	* So looping here so that the error can be detected
2648bd22949SKevin Hilman	*/
2658bd22949SKevin Hilman	b	ttbr_error
2668bd22949SKevin Hilmanusettbr0:
2678bd22949SKevin Hilman	mrc	p15, 0, r2, c2, c0, 0
2688bd22949SKevin Hilman	ldr	r5, ttbrbit_mask
2698bd22949SKevin Hilman	and	r2, r5
2708bd22949SKevin Hilman	mov	r4, pc
2718bd22949SKevin Hilman	ldr	r5, table_index_mask
2728bd22949SKevin Hilman	and	r4, r5 /* r4 = 31 to 20 bits of pc */
2738bd22949SKevin Hilman	/* Extract the value to be written to table entry */
2748bd22949SKevin Hilman	ldr	r1, table_entry
2758bd22949SKevin Hilman	add	r1, r1, r4 /* r1 has value to be written to table entry*/
2768bd22949SKevin Hilman	/* Getting the address of table entry to modify */
2778bd22949SKevin Hilman	lsr	r4, #18
2788bd22949SKevin Hilman	add	r2, r4 /* r2 has the location which needs to be modified */
2798bd22949SKevin Hilman	/* Storing previous entry of location being modified */
2808bd22949SKevin Hilman	ldr	r5, scratchpad_base
2818bd22949SKevin Hilman	ldr	r4, [r2]
2828bd22949SKevin Hilman	str	r4, [r5, #0xC0]
2838bd22949SKevin Hilman	/* Modify the table entry */
2848bd22949SKevin Hilman	str	r1, [r2]
2858bd22949SKevin Hilman	/* Storing address of entry being modified
2868bd22949SKevin Hilman	 * - will be restored after enabling MMU */
2878bd22949SKevin Hilman	ldr	r5, scratchpad_base
2888bd22949SKevin Hilman	str	r2, [r5, #0xC4]
2898bd22949SKevin Hilman
2908bd22949SKevin Hilman	mov	r0, #0
2918bd22949SKevin Hilman	mcr	p15, 0, r0, c7, c5, 4	@ Flush prefetch buffer
2928bd22949SKevin Hilman	mcr	p15, 0, r0, c7, c5, 6	@ Invalidate branch predictor array
2938bd22949SKevin Hilman	mcr	p15, 0, r0, c8, c5, 0	@ Invalidate instruction TLB
2948bd22949SKevin Hilman	mcr	p15, 0, r0, c8, c6, 0	@ Invalidate data TLB
2958bd22949SKevin Hilman	/* Restore control register  but dont enable caches here*/
2968bd22949SKevin Hilman	/* Caches will be enabled after restoring MMU table entry */
2978bd22949SKevin Hilman	ldmia	r3!, {r4}
2988bd22949SKevin Hilman	/* Store previous value of control register in scratchpad */
2998bd22949SKevin Hilman	str	r4, [r5, #0xC8]
3008bd22949SKevin Hilman	ldr	r2, cache_pred_disable_mask
3018bd22949SKevin Hilman	and	r4, r2
3028bd22949SKevin Hilman	mcr	p15, 0, r4, c1, c0, 0
3038bd22949SKevin Hilman
3048bd22949SKevin Hilman	ldmfd	sp!, {r0-r12, pc}		@ restore regs and return
3058bd22949SKevin Hilmansave_context_wfi:
3068bd22949SKevin Hilman	/*b	save_context_wfi*/	@ enable to debug save code
3078bd22949SKevin Hilman	mov	r8, r0 /* Store SDRAM address in r8 */
3088bd22949SKevin Hilman        /* Check what that target sleep state is:stored in r1*/
3098bd22949SKevin Hilman        /* 1 - Only L1 and logic lost */
3108bd22949SKevin Hilman        /* 2 - Only L2 lost */
3118bd22949SKevin Hilman        /* 3 - Both L1 and L2 lost */
3128bd22949SKevin Hilman	cmp	r1, #0x2 /* Only L2 lost */
3138bd22949SKevin Hilman	beq	clean_l2
3148bd22949SKevin Hilman	cmp	r1, #0x1 /* L2 retained */
3158bd22949SKevin Hilman	/* r9 stores whether to clean L2 or not*/
3168bd22949SKevin Hilman	moveq	r9, #0x0 /* Dont Clean L2 */
3178bd22949SKevin Hilman	movne	r9, #0x1 /* Clean L2 */
3188bd22949SKevin Hilmanl1_logic_lost:
3198bd22949SKevin Hilman	/* Store sp and spsr to SDRAM */
3208bd22949SKevin Hilman	mov	r4, sp
3218bd22949SKevin Hilman	mrs	r5, spsr
3228bd22949SKevin Hilman	mov	r6, lr
3238bd22949SKevin Hilman	stmia	r8!, {r4-r6}
3248bd22949SKevin Hilman	/* Save all ARM registers */
3258bd22949SKevin Hilman	/* Coprocessor access control register */
3268bd22949SKevin Hilman	mrc	p15, 0, r6, c1, c0, 2
3278bd22949SKevin Hilman	stmia	r8!, {r6}
3288bd22949SKevin Hilman	/* TTBR0, TTBR1 and Translation table base control */
3298bd22949SKevin Hilman	mrc	p15, 0, r4, c2, c0, 0
3308bd22949SKevin Hilman	mrc	p15, 0, r5, c2, c0, 1
3318bd22949SKevin Hilman	mrc	p15, 0, r6, c2, c0, 2
3328bd22949SKevin Hilman	stmia	r8!, {r4-r6}
3338bd22949SKevin Hilman	/* Domain access control register, data fault status register,
3348bd22949SKevin Hilman	and instruction fault status register */
3358bd22949SKevin Hilman	mrc	p15, 0, r4, c3, c0, 0
3368bd22949SKevin Hilman	mrc	p15, 0, r5, c5, c0, 0
3378bd22949SKevin Hilman	mrc	p15, 0, r6, c5, c0, 1
3388bd22949SKevin Hilman	stmia	r8!, {r4-r6}
3398bd22949SKevin Hilman	/* Data aux fault status register, instruction aux fault status,
3408bd22949SKevin Hilman	datat fault address register and instruction fault address register*/
3418bd22949SKevin Hilman	mrc	p15, 0, r4, c5, c1, 0
3428bd22949SKevin Hilman	mrc	p15, 0, r5, c5, c1, 1
3438bd22949SKevin Hilman	mrc	p15, 0, r6, c6, c0, 0
3448bd22949SKevin Hilman	mrc	p15, 0, r7, c6, c0, 2
3458bd22949SKevin Hilman	stmia	r8!, {r4-r7}
3468bd22949SKevin Hilman	/* user r/w thread and process ID, user r/o thread and process ID,
3478bd22949SKevin Hilman	priv only thread and process ID, cache size selection */
3488bd22949SKevin Hilman	mrc	p15, 0, r4, c13, c0, 2
3498bd22949SKevin Hilman	mrc	p15, 0, r5, c13, c0, 3
3508bd22949SKevin Hilman	mrc	p15, 0, r6, c13, c0, 4
3518bd22949SKevin Hilman	mrc	p15, 2, r7, c0, c0, 0
3528bd22949SKevin Hilman	stmia	r8!, {r4-r7}
3538bd22949SKevin Hilman	/* Data TLB lockdown, instruction TLB lockdown registers */
3548bd22949SKevin Hilman	mrc	p15, 0, r5, c10, c0, 0
3558bd22949SKevin Hilman	mrc	p15, 0, r6, c10, c0, 1
3568bd22949SKevin Hilman	stmia	r8!, {r5-r6}
3578bd22949SKevin Hilman	/* Secure or non secure vector base address, FCSE PID, Context PID*/
3588bd22949SKevin Hilman	mrc	p15, 0, r4, c12, c0, 0
3598bd22949SKevin Hilman	mrc	p15, 0, r5, c13, c0, 0
3608bd22949SKevin Hilman	mrc	p15, 0, r6, c13, c0, 1
3618bd22949SKevin Hilman	stmia	r8!, {r4-r6}
3628bd22949SKevin Hilman	/* Primary remap, normal remap registers */
3638bd22949SKevin Hilman	mrc	p15, 0, r4, c10, c2, 0
3648bd22949SKevin Hilman	mrc	p15, 0, r5, c10, c2, 1
3658bd22949SKevin Hilman	stmia	r8!,{r4-r5}
3668bd22949SKevin Hilman
3678bd22949SKevin Hilman	/* Store current cpsr*/
3688bd22949SKevin Hilman	mrs	r2, cpsr
3698bd22949SKevin Hilman	stmia	r8!, {r2}
3708bd22949SKevin Hilman
3718bd22949SKevin Hilman	mrc	p15, 0, r4, c1, c0, 0
3728bd22949SKevin Hilman	/* save control register */
3738bd22949SKevin Hilman	stmia	r8!, {r4}
3748bd22949SKevin Hilmanclean_caches:
3758bd22949SKevin Hilman	/* Clean Data or unified cache to POU*/
3768bd22949SKevin Hilman	/* How to invalidate only L1 cache???? - #FIX_ME# */
3778bd22949SKevin Hilman	/* mcr	p15, 0, r11, c7, c11, 1 */
3788bd22949SKevin Hilman	cmp	r9, #1 /* Check whether L2 inval is required or not*/
3798bd22949SKevin Hilman	bne	skip_l2_inval
3808bd22949SKevin Hilmanclean_l2:
3818bd22949SKevin Hilman	/* read clidr */
3828bd22949SKevin Hilman	mrc     p15, 1, r0, c0, c0, 1
3838bd22949SKevin Hilman	/* extract loc from clidr */
3848bd22949SKevin Hilman	ands    r3, r0, #0x7000000
3858bd22949SKevin Hilman	/* left align loc bit field */
3868bd22949SKevin Hilman	mov     r3, r3, lsr #23
3878bd22949SKevin Hilman	/* if loc is 0, then no need to clean */
3888bd22949SKevin Hilman	beq     finished
3898bd22949SKevin Hilman	/* start clean at cache level 0 */
3908bd22949SKevin Hilman	mov     r10, #0
3918bd22949SKevin Hilmanloop1:
3928bd22949SKevin Hilman	/* work out 3x current cache level */
3938bd22949SKevin Hilman	add     r2, r10, r10, lsr #1
3948bd22949SKevin Hilman	/* extract cache type bits from clidr*/
3958bd22949SKevin Hilman	mov     r1, r0, lsr r2
3968bd22949SKevin Hilman	/* mask of the bits for current cache only */
3978bd22949SKevin Hilman	and     r1, r1, #7
3988bd22949SKevin Hilman	/* see what cache we have at this level */
3998bd22949SKevin Hilman	cmp     r1, #2
4008bd22949SKevin Hilman	/* skip if no cache, or just i-cache */
4018bd22949SKevin Hilman	blt     skip
4028bd22949SKevin Hilman	/* select current cache level in cssr */
4038bd22949SKevin Hilman	mcr     p15, 2, r10, c0, c0, 0
4048bd22949SKevin Hilman	/* isb to sych the new cssr&csidr */
4058bd22949SKevin Hilman	isb
4068bd22949SKevin Hilman	/* read the new csidr */
4078bd22949SKevin Hilman	mrc     p15, 1, r1, c0, c0, 0
4088bd22949SKevin Hilman	/* extract the length of the cache lines */
4098bd22949SKevin Hilman	and     r2, r1, #7
4108bd22949SKevin Hilman	/* add 4 (line length offset) */
4118bd22949SKevin Hilman	add     r2, r2, #4
4128bd22949SKevin Hilman	ldr     r4, assoc_mask
4138bd22949SKevin Hilman	/* find maximum number on the way size */
4148bd22949SKevin Hilman	ands    r4, r4, r1, lsr #3
4158bd22949SKevin Hilman	/* find bit position of way size increment */
4168bd22949SKevin Hilman	clz     r5, r4
4178bd22949SKevin Hilman	ldr     r7, numset_mask
4188bd22949SKevin Hilman	/* extract max number of the index size*/
4198bd22949SKevin Hilman	ands    r7, r7, r1, lsr #13
4208bd22949SKevin Hilmanloop2:
4218bd22949SKevin Hilman	mov     r9, r4
4228bd22949SKevin Hilman	/* create working copy of max way size*/
4238bd22949SKevin Hilmanloop3:
4248bd22949SKevin Hilman	/* factor way and cache number into r11 */
4258bd22949SKevin Hilman	orr     r11, r10, r9, lsl r5
4268bd22949SKevin Hilman	/* factor index number into r11 */
4278bd22949SKevin Hilman	orr     r11, r11, r7, lsl r2
4288bd22949SKevin Hilman	/*clean & invalidate by set/way */
4298bd22949SKevin Hilman	mcr     p15, 0, r11, c7, c10, 2
4308bd22949SKevin Hilman	/* decrement the way*/
4318bd22949SKevin Hilman	subs    r9, r9, #1
4328bd22949SKevin Hilman	bge     loop3
4338bd22949SKevin Hilman	/*decrement the index */
4348bd22949SKevin Hilman	subs    r7, r7, #1
4358bd22949SKevin Hilman	bge     loop2
4368bd22949SKevin Hilmanskip:
4378bd22949SKevin Hilman	add     r10, r10, #2
4388bd22949SKevin Hilman	/* increment cache number */
4398bd22949SKevin Hilman	cmp     r3, r10
4408bd22949SKevin Hilman	bgt     loop1
4418bd22949SKevin Hilmanfinished:
4428bd22949SKevin Hilman	/*swith back to cache level 0 */
4438bd22949SKevin Hilman	mov     r10, #0
4448bd22949SKevin Hilman	/* select current cache level in cssr */
4458bd22949SKevin Hilman	mcr     p15, 2, r10, c0, c0, 0
4468bd22949SKevin Hilman	isb
4478bd22949SKevin Hilmanskip_l2_inval:
4488bd22949SKevin Hilman	/* Data memory barrier and Data sync barrier */
4498bd22949SKevin Hilman	mov     r1, #0
4508bd22949SKevin Hilman	mcr     p15, 0, r1, c7, c10, 4
4518bd22949SKevin Hilman	mcr     p15, 0, r1, c7, c10, 5
4528bd22949SKevin Hilman
4538bd22949SKevin Hilman	wfi                             @ wait for interrupt
4548bd22949SKevin Hilman	nop
4558bd22949SKevin Hilman	nop
4568bd22949SKevin Hilman	nop
4578bd22949SKevin Hilman	nop
4588bd22949SKevin Hilman	nop
4598bd22949SKevin Hilman	nop
4608bd22949SKevin Hilman	nop
4618bd22949SKevin Hilman	nop
4628bd22949SKevin Hilman	nop
4638bd22949SKevin Hilman	nop
4648bd22949SKevin Hilman	bl i_dll_wait
4658bd22949SKevin Hilman	/* restore regs and return */
4668bd22949SKevin Hilman	ldmfd   sp!, {r0-r12, pc}
4678bd22949SKevin Hilman
4688bd22949SKevin Hilmani_dll_wait:
4698bd22949SKevin Hilman	ldr     r4, clk_stabilize_delay
4708bd22949SKevin Hilman
4718bd22949SKevin Hilmani_dll_delay:
4728bd22949SKevin Hilman	subs    r4, r4, #0x1
4738bd22949SKevin Hilman	bne     i_dll_delay
4748bd22949SKevin Hilman	ldr     r4, sdrc_power
4758bd22949SKevin Hilman	ldr     r5, [r4]
4768bd22949SKevin Hilman	bic     r5, r5, #0x40
4778bd22949SKevin Hilman	str     r5, [r4]
4788bd22949SKevin Hilman	bx	lr
4798bd22949SKevin Hilmanpm_prepwstst_core:
4808bd22949SKevin Hilman	.word	PM_PREPWSTST_CORE_V
4818bd22949SKevin Hilmanpm_prepwstst_mpu:
4828bd22949SKevin Hilman	.word	PM_PREPWSTST_MPU_V
4838bd22949SKevin Hilmanpm_pwstctrl_mpu:
4848bd22949SKevin Hilman	.word	PM_PWSTCTRL_MPU_P
4858bd22949SKevin Hilmanscratchpad_base:
4868bd22949SKevin Hilman	.word	SCRATCHPAD_BASE_P
4878bd22949SKevin Hilmansdrc_power:
4888bd22949SKevin Hilman	.word SDRC_POWER_V
4898bd22949SKevin Hilmanclk_stabilize_delay:
4908bd22949SKevin Hilman	.word 0x000001FF
4918bd22949SKevin Hilmanassoc_mask:
4928bd22949SKevin Hilman	.word	0x3ff
4938bd22949SKevin Hilmannumset_mask:
4948bd22949SKevin Hilman	.word	0x7fff
4958bd22949SKevin Hilmanttbrbit_mask:
4968bd22949SKevin Hilman	.word	0xFFFFC000
4978bd22949SKevin Hilmantable_index_mask:
4988bd22949SKevin Hilman	.word	0xFFF00000
4998bd22949SKevin Hilmantable_entry:
5008bd22949SKevin Hilman	.word	0x00000C02
5018bd22949SKevin Hilmancache_pred_disable_mask:
5028bd22949SKevin Hilman	.word	0xFFFFE7FB
50327d59a4aSTero Kristocontrol_stat:
50427d59a4aSTero Kristo	.word	CONTROL_STAT
5058bd22949SKevin HilmanENTRY(omap34xx_cpu_suspend_sz)
5068bd22949SKevin Hilman	.word	. - omap34xx_cpu_suspend
507