xref: /openbmc/linux/arch/arm/mach-omap2/sleep34xx.S (revision 89139dce)
18bd22949SKevin Hilman/*
28bd22949SKevin Hilman * linux/arch/arm/mach-omap2/sleep.S
38bd22949SKevin Hilman *
48bd22949SKevin Hilman * (C) Copyright 2007
58bd22949SKevin Hilman * Texas Instruments
68bd22949SKevin Hilman * Karthik Dasu <karthik-dp@ti.com>
78bd22949SKevin Hilman *
88bd22949SKevin Hilman * (C) Copyright 2004
98bd22949SKevin Hilman * Texas Instruments, <www.ti.com>
108bd22949SKevin Hilman * Richard Woodruff <r-woodruff2@ti.com>
118bd22949SKevin Hilman *
128bd22949SKevin Hilman * This program is free software; you can redistribute it and/or
138bd22949SKevin Hilman * modify it under the terms of the GNU General Public License as
148bd22949SKevin Hilman * published by the Free Software Foundation; either version 2 of
158bd22949SKevin Hilman * the License, or (at your option) any later version.
168bd22949SKevin Hilman *
178bd22949SKevin Hilman * This program is distributed in the hope that it will be useful,
188bd22949SKevin Hilman * but WITHOUT ANY WARRANTY; without even the implied warranty of
198bd22949SKevin Hilman * MERCHANTABILITY or FITNESS FOR A PARTICULAR /PURPOSE.  See the
208bd22949SKevin Hilman * GNU General Public License for more details.
218bd22949SKevin Hilman *
228bd22949SKevin Hilman * You should have received a copy of the GNU General Public License
238bd22949SKevin Hilman * along with this program; if not, write to the Free Software
248bd22949SKevin Hilman * Foundation, Inc., 59 Temple Place, Suite 330, Boston,
258bd22949SKevin Hilman * MA 02111-1307 USA
268bd22949SKevin Hilman */
278bd22949SKevin Hilman#include <linux/linkage.h>
288bd22949SKevin Hilman#include <asm/assembler.h>
298bd22949SKevin Hilman#include <mach/io.h>
30ce491cf8STony Lindgren#include <plat/control.h>
318bd22949SKevin Hilman
3289139dceSPeter 'p2' De Schrijver#include "cm.h"
338bd22949SKevin Hilman#include "prm.h"
348bd22949SKevin Hilman#include "sdrc.h"
358bd22949SKevin Hilman
368bd22949SKevin Hilman#define PM_PREPWSTST_CORE_V	OMAP34XX_PRM_REGADDR(CORE_MOD, \
378bd22949SKevin Hilman				OMAP3430_PM_PREPWSTST)
380795a75aSTero Kristo#define PM_PREPWSTST_CORE_P	0x48306AE8
398bd22949SKevin Hilman#define PM_PREPWSTST_MPU_V	OMAP34XX_PRM_REGADDR(MPU_MOD, \
408bd22949SKevin Hilman				OMAP3430_PM_PREPWSTST)
4161255ab9SRajendra Nayak#define PM_PWSTCTRL_MPU_P	OMAP3430_PRM_BASE + MPU_MOD + PM_PWSTCTRL
4289139dceSPeter 'p2' De Schrijver#define CM_IDLEST1_CORE_V	OMAP34XX_CM_REGADDR(CORE_MOD, CM_IDLEST1)
4327d59a4aSTero Kristo#define SRAM_BASE_P		0x40200000
4427d59a4aSTero Kristo#define CONTROL_STAT		0x480022F0
458bd22949SKevin Hilman#define SCRATCHPAD_MEM_OFFS	0x310 /* Move this as correct place is
468bd22949SKevin Hilman				       * available */
4761255ab9SRajendra Nayak#define SCRATCHPAD_BASE_P	(OMAP343X_CTRL_BASE + OMAP343X_CONTROL_MEM_WKUP\
4861255ab9SRajendra Nayak						+ SCRATCHPAD_MEM_OFFS)
498bd22949SKevin Hilman#define SDRC_POWER_V		OMAP34XX_SDRC_REGADDR(SDRC_POWER)
500795a75aSTero Kristo#define SDRC_SYSCONFIG_P	(OMAP343X_SDRC_BASE + SDRC_SYSCONFIG)
510795a75aSTero Kristo#define SDRC_MR_0_P		(OMAP343X_SDRC_BASE + SDRC_MR_0)
520795a75aSTero Kristo#define SDRC_EMR2_0_P		(OMAP343X_SDRC_BASE + SDRC_EMR2_0)
530795a75aSTero Kristo#define SDRC_MANUAL_0_P		(OMAP343X_SDRC_BASE + SDRC_MANUAL_0)
540795a75aSTero Kristo#define SDRC_MR_1_P		(OMAP343X_SDRC_BASE + SDRC_MR_1)
550795a75aSTero Kristo#define SDRC_EMR2_1_P		(OMAP343X_SDRC_BASE + SDRC_EMR2_1)
560795a75aSTero Kristo#define SDRC_MANUAL_1_P		(OMAP343X_SDRC_BASE + SDRC_MANUAL_1)
5789139dceSPeter 'p2' De Schrijver#define SDRC_DLLA_STATUS_V	OMAP34XX_SDRC_REGADDR(SDRC_DLLA_STATUS)
5889139dceSPeter 'p2' De Schrijver#define SDRC_DLLA_CTRL_V	OMAP34XX_SDRC_REGADDR(SDRC_DLLA_CTRL)
598bd22949SKevin Hilman
608bd22949SKevin Hilman	.text
618bd22949SKevin Hilman/* Function call to get the restore pointer for resume from OFF */
628bd22949SKevin HilmanENTRY(get_restore_pointer)
638bd22949SKevin Hilman        stmfd   sp!, {lr}     @ save registers on stack
648bd22949SKevin Hilman	adr	r0, restore
658bd22949SKevin Hilman        ldmfd   sp!, {pc}     @ restore regs and return
668bd22949SKevin HilmanENTRY(get_restore_pointer_sz)
670795a75aSTero Kristo        .word   . - get_restore_pointer
680795a75aSTero Kristo
690795a75aSTero Kristo	.text
700795a75aSTero Kristo/* Function call to get the restore pointer for for ES3 to resume from OFF */
710795a75aSTero KristoENTRY(get_es3_restore_pointer)
720795a75aSTero Kristo	stmfd	sp!, {lr}	@ save registers on stack
730795a75aSTero Kristo	adr	r0, restore_es3
740795a75aSTero Kristo	ldmfd	sp!, {pc}	@ restore regs and return
750795a75aSTero KristoENTRY(get_es3_restore_pointer_sz)
760795a75aSTero Kristo	.word	. - get_es3_restore_pointer
770795a75aSTero Kristo
780795a75aSTero KristoENTRY(es3_sdrc_fix)
790795a75aSTero Kristo	ldr	r4, sdrc_syscfg		@ get config addr
800795a75aSTero Kristo	ldr	r5, [r4]		@ get value
810795a75aSTero Kristo	tst	r5, #0x100		@ is part access blocked
820795a75aSTero Kristo	it	eq
830795a75aSTero Kristo	biceq	r5, r5, #0x100		@ clear bit if set
840795a75aSTero Kristo	str	r5, [r4]		@ write back change
850795a75aSTero Kristo	ldr	r4, sdrc_mr_0		@ get config addr
860795a75aSTero Kristo	ldr	r5, [r4]		@ get value
870795a75aSTero Kristo	str	r5, [r4]		@ write back change
880795a75aSTero Kristo	ldr	r4, sdrc_emr2_0		@ get config addr
890795a75aSTero Kristo	ldr	r5, [r4]		@ get value
900795a75aSTero Kristo	str	r5, [r4]		@ write back change
910795a75aSTero Kristo	ldr	r4, sdrc_manual_0	@ get config addr
920795a75aSTero Kristo	mov	r5, #0x2		@ autorefresh command
930795a75aSTero Kristo	str	r5, [r4]		@ kick off refreshes
940795a75aSTero Kristo	ldr	r4, sdrc_mr_1		@ get config addr
950795a75aSTero Kristo	ldr	r5, [r4]		@ get value
960795a75aSTero Kristo	str	r5, [r4]		@ write back change
970795a75aSTero Kristo	ldr	r4, sdrc_emr2_1		@ get config addr
980795a75aSTero Kristo	ldr	r5, [r4]		@ get value
990795a75aSTero Kristo	str	r5, [r4]		@ write back change
1000795a75aSTero Kristo	ldr	r4, sdrc_manual_1	@ get config addr
1010795a75aSTero Kristo	mov	r5, #0x2		@ autorefresh command
1020795a75aSTero Kristo	str	r5, [r4]		@ kick off refreshes
1030795a75aSTero Kristo	bx	lr
1040795a75aSTero Kristosdrc_syscfg:
1050795a75aSTero Kristo	.word	SDRC_SYSCONFIG_P
1060795a75aSTero Kristosdrc_mr_0:
1070795a75aSTero Kristo	.word	SDRC_MR_0_P
1080795a75aSTero Kristosdrc_emr2_0:
1090795a75aSTero Kristo	.word	SDRC_EMR2_0_P
1100795a75aSTero Kristosdrc_manual_0:
1110795a75aSTero Kristo	.word	SDRC_MANUAL_0_P
1120795a75aSTero Kristosdrc_mr_1:
1130795a75aSTero Kristo	.word	SDRC_MR_1_P
1140795a75aSTero Kristosdrc_emr2_1:
1150795a75aSTero Kristo	.word	SDRC_EMR2_1_P
1160795a75aSTero Kristosdrc_manual_1:
1170795a75aSTero Kristo	.word	SDRC_MANUAL_1_P
1180795a75aSTero KristoENTRY(es3_sdrc_fix_sz)
1190795a75aSTero Kristo	.word	. - es3_sdrc_fix
12027d59a4aSTero Kristo
12127d59a4aSTero Kristo/* Function to call rom code to save secure ram context */
12227d59a4aSTero KristoENTRY(save_secure_ram_context)
12327d59a4aSTero Kristo	stmfd	sp!, {r1-r12, lr}	@ save registers on stack
12427d59a4aSTero Kristosave_secure_ram_debug:
12527d59a4aSTero Kristo	/* b save_secure_ram_debug */	@ enable to debug save code
12627d59a4aSTero Kristo	adr	r3, api_params		@ r3 points to parameters
12727d59a4aSTero Kristo	str	r0, [r3,#0x4]		@ r0 has sdram address
12827d59a4aSTero Kristo	ldr	r12, high_mask
12927d59a4aSTero Kristo	and	r3, r3, r12
13027d59a4aSTero Kristo	ldr	r12, sram_phy_addr_mask
13127d59a4aSTero Kristo	orr	r3, r3, r12
13227d59a4aSTero Kristo	mov	r0, #25			@ set service ID for PPA
13327d59a4aSTero Kristo	mov	r12, r0			@ copy secure service ID in r12
13427d59a4aSTero Kristo	mov	r1, #0			@ set task id for ROM code in r1
135ba50ea7eSKalle Jokiniemi	mov	r2, #4			@ set some flags in r2, r6
13627d59a4aSTero Kristo	mov	r6, #0xff
13727d59a4aSTero Kristo	mcr	p15, 0, r0, c7, c10, 4	@ data write barrier
13827d59a4aSTero Kristo	mcr	p15, 0, r0, c7, c10, 5	@ data memory barrier
13927d59a4aSTero Kristo	.word	0xE1600071		@ call SMI monitor (smi #1)
14027d59a4aSTero Kristo	nop
14127d59a4aSTero Kristo	nop
14227d59a4aSTero Kristo	nop
14327d59a4aSTero Kristo	nop
14427d59a4aSTero Kristo	ldmfd	sp!, {r1-r12, pc}
14527d59a4aSTero Kristosram_phy_addr_mask:
14627d59a4aSTero Kristo	.word	SRAM_BASE_P
14727d59a4aSTero Kristohigh_mask:
14827d59a4aSTero Kristo	.word	0xffff
14927d59a4aSTero Kristoapi_params:
15027d59a4aSTero Kristo	.word	0x4, 0x0, 0x0, 0x1, 0x1
15127d59a4aSTero KristoENTRY(save_secure_ram_context_sz)
15227d59a4aSTero Kristo	.word	. - save_secure_ram_context
15327d59a4aSTero Kristo
1548bd22949SKevin Hilman/*
1558bd22949SKevin Hilman * Forces OMAP into idle state
1568bd22949SKevin Hilman *
1578bd22949SKevin Hilman * omap34xx_suspend() - This bit of code just executes the WFI
1588bd22949SKevin Hilman * for normal idles.
1598bd22949SKevin Hilman *
1608bd22949SKevin Hilman * Note: This code get's copied to internal SRAM at boot. When the OMAP
1618bd22949SKevin Hilman *	 wakes up it continues execution at the point it went to sleep.
1628bd22949SKevin Hilman */
1638bd22949SKevin HilmanENTRY(omap34xx_cpu_suspend)
1648bd22949SKevin Hilman	stmfd	sp!, {r0-r12, lr}		@ save registers on stack
1658bd22949SKevin Hilmanloop:
1668bd22949SKevin Hilman	/*b	loop*/	@Enable to debug by stepping through code
1678bd22949SKevin Hilman	/* r0 contains restore pointer in sdram */
1688bd22949SKevin Hilman	/* r1 contains information about saving context */
1698bd22949SKevin Hilman	ldr     r4, sdrc_power          @ read the SDRC_POWER register
1708bd22949SKevin Hilman	ldr     r5, [r4]                @ read the contents of SDRC_POWER
1718bd22949SKevin Hilman	orr     r5, r5, #0x40           @ enable self refresh on idle req
1728bd22949SKevin Hilman	str     r5, [r4]                @ write back to SDRC_POWER register
1738bd22949SKevin Hilman
1748bd22949SKevin Hilman	cmp	r1, #0x0
1758bd22949SKevin Hilman	/* If context save is required, do that and execute wfi */
1768bd22949SKevin Hilman	bne	save_context_wfi
1778bd22949SKevin Hilman	/* Data memory barrier and Data sync barrier */
1788bd22949SKevin Hilman	mov	r1, #0
1798bd22949SKevin Hilman	mcr	p15, 0, r1, c7, c10, 4
1808bd22949SKevin Hilman	mcr	p15, 0, r1, c7, c10, 5
1818bd22949SKevin Hilman
1828bd22949SKevin Hilman	wfi				@ wait for interrupt
1838bd22949SKevin Hilman
1848bd22949SKevin Hilman	nop
1858bd22949SKevin Hilman	nop
1868bd22949SKevin Hilman	nop
1878bd22949SKevin Hilman	nop
1888bd22949SKevin Hilman	nop
1898bd22949SKevin Hilman	nop
1908bd22949SKevin Hilman	nop
1918bd22949SKevin Hilman	nop
1928bd22949SKevin Hilman	nop
1938bd22949SKevin Hilman	nop
19489139dceSPeter 'p2' De Schrijver	bl wait_sdrc_ok
1958bd22949SKevin Hilman
1968bd22949SKevin Hilman	ldmfd	sp!, {r0-r12, pc}		@ restore regs and return
1970795a75aSTero Kristorestore_es3:
1980795a75aSTero Kristo	/*b restore_es3*/		@ Enable to debug restore code
1990795a75aSTero Kristo	ldr	r5, pm_prepwstst_core_p
2000795a75aSTero Kristo	ldr	r4, [r5]
2010795a75aSTero Kristo	and	r4, r4, #0x3
2020795a75aSTero Kristo	cmp	r4, #0x0	@ Check if previous power state of CORE is OFF
2030795a75aSTero Kristo	bne	restore
2040795a75aSTero Kristo	adr	r0, es3_sdrc_fix
2050795a75aSTero Kristo	ldr	r1, sram_base
2060795a75aSTero Kristo	ldr	r2, es3_sdrc_fix_sz
2070795a75aSTero Kristo	mov	r2, r2, ror #2
2080795a75aSTero Kristocopy_to_sram:
2090795a75aSTero Kristo	ldmia	r0!, {r3}	@ val = *src
2100795a75aSTero Kristo	stmia	r1!, {r3}	@ *dst = val
2110795a75aSTero Kristo	subs	r2, r2, #0x1	@ num_words--
2120795a75aSTero Kristo	bne	copy_to_sram
2130795a75aSTero Kristo	ldr	r1, sram_base
2140795a75aSTero Kristo	blx	r1
2158bd22949SKevin Hilmanrestore:
2168bd22949SKevin Hilman	/* b restore*/  @ Enable to debug restore code
2178bd22949SKevin Hilman        /* Check what was the reason for mpu reset and store the reason in r9*/
2188bd22949SKevin Hilman        /* 1 - Only L1 and logic lost */
2198bd22949SKevin Hilman        /* 2 - Only L2 lost - In this case, we wont be here */
2208bd22949SKevin Hilman        /* 3 - Both L1 and L2 lost */
2218bd22949SKevin Hilman	ldr     r1, pm_pwstctrl_mpu
2228bd22949SKevin Hilman	ldr	r2, [r1]
2238bd22949SKevin Hilman	and     r2, r2, #0x3
2248bd22949SKevin Hilman	cmp     r2, #0x0	@ Check if target power state was OFF or RET
2258bd22949SKevin Hilman        moveq   r9, #0x3        @ MPU OFF => L1 and L2 lost
2268bd22949SKevin Hilman	movne	r9, #0x1	@ Only L1 and L2 lost => avoid L2 invalidation
2278bd22949SKevin Hilman	bne	logic_l1_restore
22827d59a4aSTero Kristo	ldr	r0, control_stat
22927d59a4aSTero Kristo	ldr	r1, [r0]
23027d59a4aSTero Kristo	and	r1, #0x700
23127d59a4aSTero Kristo	cmp	r1, #0x300
23227d59a4aSTero Kristo	beq	l2_inv_gp
23327d59a4aSTero Kristo	mov	r0, #40		@ set service ID for PPA
23427d59a4aSTero Kristo	mov	r12, r0		@ copy secure Service ID in r12
23527d59a4aSTero Kristo	mov	r1, #0		@ set task id for ROM code in r1
23627d59a4aSTero Kristo	mov	r2, #4		@ set some flags in r2, r6
23727d59a4aSTero Kristo	mov	r6, #0xff
23827d59a4aSTero Kristo	adr	r3, l2_inv_api_params	@ r3 points to dummy parameters
23927d59a4aSTero Kristo	mcr	p15, 0, r0, c7, c10, 4	@ data write barrier
24027d59a4aSTero Kristo	mcr	p15, 0, r0, c7, c10, 5	@ data memory barrier
24127d59a4aSTero Kristo	.word	0xE1600071		@ call SMI monitor (smi #1)
24227d59a4aSTero Kristo	/* Write to Aux control register to set some bits */
24327d59a4aSTero Kristo	mov	r0, #42		@ set service ID for PPA
24427d59a4aSTero Kristo	mov	r12, r0		@ copy secure Service ID in r12
24527d59a4aSTero Kristo	mov	r1, #0		@ set task id for ROM code in r1
24627d59a4aSTero Kristo	mov	r2, #4		@ set some flags in r2, r6
24727d59a4aSTero Kristo	mov	r6, #0xff
24827d59a4aSTero Kristo	adr	r3, write_aux_control_params	@ r3 points to parameters
24927d59a4aSTero Kristo	mcr	p15, 0, r0, c7, c10, 4	@ data write barrier
25027d59a4aSTero Kristo	mcr	p15, 0, r0, c7, c10, 5	@ data memory barrier
25127d59a4aSTero Kristo	.word	0xE1600071		@ call SMI monitor (smi #1)
25227d59a4aSTero Kristo
25327d59a4aSTero Kristo	b	logic_l1_restore
25427d59a4aSTero Kristol2_inv_api_params:
25527d59a4aSTero Kristo	.word   0x1, 0x00
25627d59a4aSTero Kristowrite_aux_control_params:
25727d59a4aSTero Kristo	.word   0x1, 0x72
25827d59a4aSTero Kristol2_inv_gp:
2598bd22949SKevin Hilman	/* Execute smi to invalidate L2 cache */
2608bd22949SKevin Hilman	mov r12, #0x1                         @ set up to invalide L2
2618bd22949SKevin Hilmansmi:    .word 0xE1600070		@ Call SMI monitor (smieq)
26227d59a4aSTero Kristo	/* Write to Aux control register to set some bits */
26327d59a4aSTero Kristo	mov	r0, #0x72
26427d59a4aSTero Kristo	mov	r12, #0x3
26527d59a4aSTero Kristo	.word 0xE1600070	@ Call SMI monitor (smieq)
2668bd22949SKevin Hilmanlogic_l1_restore:
2678bd22949SKevin Hilman	mov	r1, #0
2688bd22949SKevin Hilman	/* Invalidate all instruction caches to PoU
2698bd22949SKevin Hilman	 * and flush branch target cache */
2708bd22949SKevin Hilman	mcr	p15, 0, r1, c7, c5, 0
2718bd22949SKevin Hilman
2728bd22949SKevin Hilman	ldr	r4, scratchpad_base
2738bd22949SKevin Hilman	ldr	r3, [r4,#0xBC]
2748bd22949SKevin Hilman	ldmia	r3!, {r4-r6}
2758bd22949SKevin Hilman	mov	sp, r4
2768bd22949SKevin Hilman	msr	spsr_cxsf, r5
2778bd22949SKevin Hilman	mov	lr, r6
2788bd22949SKevin Hilman
2798bd22949SKevin Hilman	ldmia	r3!, {r4-r9}
2808bd22949SKevin Hilman	/* Coprocessor access Control Register */
2818bd22949SKevin Hilman	mcr p15, 0, r4, c1, c0, 2
2828bd22949SKevin Hilman
2838bd22949SKevin Hilman	/* TTBR0 */
2848bd22949SKevin Hilman	MCR p15, 0, r5, c2, c0, 0
2858bd22949SKevin Hilman	/* TTBR1 */
2868bd22949SKevin Hilman	MCR p15, 0, r6, c2, c0, 1
2878bd22949SKevin Hilman	/* Translation table base control register */
2888bd22949SKevin Hilman	MCR p15, 0, r7, c2, c0, 2
2898bd22949SKevin Hilman	/*domain access Control Register */
2908bd22949SKevin Hilman	MCR p15, 0, r8, c3, c0, 0
2918bd22949SKevin Hilman	/* data fault status Register */
2928bd22949SKevin Hilman	MCR p15, 0, r9, c5, c0, 0
2938bd22949SKevin Hilman
2948bd22949SKevin Hilman	ldmia  r3!,{r4-r8}
2958bd22949SKevin Hilman	/* instruction fault status Register */
2968bd22949SKevin Hilman	MCR p15, 0, r4, c5, c0, 1
2978bd22949SKevin Hilman	/*Data Auxiliary Fault Status Register */
2988bd22949SKevin Hilman	MCR p15, 0, r5, c5, c1, 0
2998bd22949SKevin Hilman	/*Instruction Auxiliary Fault Status Register*/
3008bd22949SKevin Hilman	MCR p15, 0, r6, c5, c1, 1
3018bd22949SKevin Hilman	/*Data Fault Address Register */
3028bd22949SKevin Hilman	MCR p15, 0, r7, c6, c0, 0
3038bd22949SKevin Hilman	/*Instruction Fault Address Register*/
3048bd22949SKevin Hilman	MCR p15, 0, r8, c6, c0, 2
3058bd22949SKevin Hilman	ldmia  r3!,{r4-r7}
3068bd22949SKevin Hilman
3078bd22949SKevin Hilman	/* user r/w thread and process ID */
3088bd22949SKevin Hilman	MCR p15, 0, r4, c13, c0, 2
3098bd22949SKevin Hilman	/* user ro thread and process ID */
3108bd22949SKevin Hilman	MCR p15, 0, r5, c13, c0, 3
3118bd22949SKevin Hilman	/*Privileged only thread and process ID */
3128bd22949SKevin Hilman	MCR p15, 0, r6, c13, c0, 4
3138bd22949SKevin Hilman	/* cache size selection */
3148bd22949SKevin Hilman	MCR p15, 2, r7, c0, c0, 0
3158bd22949SKevin Hilman	ldmia  r3!,{r4-r8}
3168bd22949SKevin Hilman	/* Data TLB lockdown registers */
3178bd22949SKevin Hilman	MCR p15, 0, r4, c10, c0, 0
3188bd22949SKevin Hilman	/* Instruction TLB lockdown registers */
3198bd22949SKevin Hilman	MCR p15, 0, r5, c10, c0, 1
3208bd22949SKevin Hilman	/* Secure or Nonsecure Vector Base Address */
3218bd22949SKevin Hilman	MCR p15, 0, r6, c12, c0, 0
3228bd22949SKevin Hilman	/* FCSE PID */
3238bd22949SKevin Hilman	MCR p15, 0, r7, c13, c0, 0
3248bd22949SKevin Hilman	/* Context PID */
3258bd22949SKevin Hilman	MCR p15, 0, r8, c13, c0, 1
3268bd22949SKevin Hilman
3278bd22949SKevin Hilman	ldmia  r3!,{r4-r5}
3288bd22949SKevin Hilman	/* primary memory remap register */
3298bd22949SKevin Hilman	MCR p15, 0, r4, c10, c2, 0
3308bd22949SKevin Hilman	/*normal memory remap register */
3318bd22949SKevin Hilman	MCR p15, 0, r5, c10, c2, 1
3328bd22949SKevin Hilman
3338bd22949SKevin Hilman	/* Restore cpsr */
3348bd22949SKevin Hilman	ldmia	r3!,{r4}	/*load CPSR from SDRAM*/
3358bd22949SKevin Hilman	msr	cpsr, r4	/*store cpsr */
3368bd22949SKevin Hilman
3378bd22949SKevin Hilman	/* Enabling MMU here */
3388bd22949SKevin Hilman	mrc	p15, 0, r7, c2, c0, 2 /* Read TTBRControl */
3398bd22949SKevin Hilman	/* Extract N (0:2) bits and decide whether to use TTBR0 or TTBR1*/
3408bd22949SKevin Hilman	and	r7, #0x7
3418bd22949SKevin Hilman	cmp	r7, #0x0
3428bd22949SKevin Hilman	beq	usettbr0
3438bd22949SKevin Hilmanttbr_error:
3448bd22949SKevin Hilman	/* More work needs to be done to support N[0:2] value other than 0
3458bd22949SKevin Hilman	* So looping here so that the error can be detected
3468bd22949SKevin Hilman	*/
3478bd22949SKevin Hilman	b	ttbr_error
3488bd22949SKevin Hilmanusettbr0:
3498bd22949SKevin Hilman	mrc	p15, 0, r2, c2, c0, 0
3508bd22949SKevin Hilman	ldr	r5, ttbrbit_mask
3518bd22949SKevin Hilman	and	r2, r5
3528bd22949SKevin Hilman	mov	r4, pc
3538bd22949SKevin Hilman	ldr	r5, table_index_mask
3548bd22949SKevin Hilman	and	r4, r5 /* r4 = 31 to 20 bits of pc */
3558bd22949SKevin Hilman	/* Extract the value to be written to table entry */
3568bd22949SKevin Hilman	ldr	r1, table_entry
3578bd22949SKevin Hilman	add	r1, r1, r4 /* r1 has value to be written to table entry*/
3588bd22949SKevin Hilman	/* Getting the address of table entry to modify */
3598bd22949SKevin Hilman	lsr	r4, #18
3608bd22949SKevin Hilman	add	r2, r4 /* r2 has the location which needs to be modified */
3618bd22949SKevin Hilman	/* Storing previous entry of location being modified */
3628bd22949SKevin Hilman	ldr	r5, scratchpad_base
3638bd22949SKevin Hilman	ldr	r4, [r2]
3648bd22949SKevin Hilman	str	r4, [r5, #0xC0]
3658bd22949SKevin Hilman	/* Modify the table entry */
3668bd22949SKevin Hilman	str	r1, [r2]
3678bd22949SKevin Hilman	/* Storing address of entry being modified
3688bd22949SKevin Hilman	 * - will be restored after enabling MMU */
3698bd22949SKevin Hilman	ldr	r5, scratchpad_base
3708bd22949SKevin Hilman	str	r2, [r5, #0xC4]
3718bd22949SKevin Hilman
3728bd22949SKevin Hilman	mov	r0, #0
3738bd22949SKevin Hilman	mcr	p15, 0, r0, c7, c5, 4	@ Flush prefetch buffer
3748bd22949SKevin Hilman	mcr	p15, 0, r0, c7, c5, 6	@ Invalidate branch predictor array
3758bd22949SKevin Hilman	mcr	p15, 0, r0, c8, c5, 0	@ Invalidate instruction TLB
3768bd22949SKevin Hilman	mcr	p15, 0, r0, c8, c6, 0	@ Invalidate data TLB
3778bd22949SKevin Hilman	/* Restore control register  but dont enable caches here*/
3788bd22949SKevin Hilman	/* Caches will be enabled after restoring MMU table entry */
3798bd22949SKevin Hilman	ldmia	r3!, {r4}
3808bd22949SKevin Hilman	/* Store previous value of control register in scratchpad */
3818bd22949SKevin Hilman	str	r4, [r5, #0xC8]
3828bd22949SKevin Hilman	ldr	r2, cache_pred_disable_mask
3838bd22949SKevin Hilman	and	r4, r2
3848bd22949SKevin Hilman	mcr	p15, 0, r4, c1, c0, 0
3858bd22949SKevin Hilman
3868bd22949SKevin Hilman	ldmfd	sp!, {r0-r12, pc}		@ restore regs and return
3878bd22949SKevin Hilmansave_context_wfi:
3888bd22949SKevin Hilman	/*b	save_context_wfi*/	@ enable to debug save code
3898bd22949SKevin Hilman	mov	r8, r0 /* Store SDRAM address in r8 */
3908bd22949SKevin Hilman        /* Check what that target sleep state is:stored in r1*/
3918bd22949SKevin Hilman        /* 1 - Only L1 and logic lost */
3928bd22949SKevin Hilman        /* 2 - Only L2 lost */
3938bd22949SKevin Hilman        /* 3 - Both L1 and L2 lost */
3948bd22949SKevin Hilman	cmp	r1, #0x2 /* Only L2 lost */
3958bd22949SKevin Hilman	beq	clean_l2
3968bd22949SKevin Hilman	cmp	r1, #0x1 /* L2 retained */
3978bd22949SKevin Hilman	/* r9 stores whether to clean L2 or not*/
3988bd22949SKevin Hilman	moveq	r9, #0x0 /* Dont Clean L2 */
3998bd22949SKevin Hilman	movne	r9, #0x1 /* Clean L2 */
4008bd22949SKevin Hilmanl1_logic_lost:
4018bd22949SKevin Hilman	/* Store sp and spsr to SDRAM */
4028bd22949SKevin Hilman	mov	r4, sp
4038bd22949SKevin Hilman	mrs	r5, spsr
4048bd22949SKevin Hilman	mov	r6, lr
4058bd22949SKevin Hilman	stmia	r8!, {r4-r6}
4068bd22949SKevin Hilman	/* Save all ARM registers */
4078bd22949SKevin Hilman	/* Coprocessor access control register */
4088bd22949SKevin Hilman	mrc	p15, 0, r6, c1, c0, 2
4098bd22949SKevin Hilman	stmia	r8!, {r6}
4108bd22949SKevin Hilman	/* TTBR0, TTBR1 and Translation table base control */
4118bd22949SKevin Hilman	mrc	p15, 0, r4, c2, c0, 0
4128bd22949SKevin Hilman	mrc	p15, 0, r5, c2, c0, 1
4138bd22949SKevin Hilman	mrc	p15, 0, r6, c2, c0, 2
4148bd22949SKevin Hilman	stmia	r8!, {r4-r6}
4158bd22949SKevin Hilman	/* Domain access control register, data fault status register,
4168bd22949SKevin Hilman	and instruction fault status register */
4178bd22949SKevin Hilman	mrc	p15, 0, r4, c3, c0, 0
4188bd22949SKevin Hilman	mrc	p15, 0, r5, c5, c0, 0
4198bd22949SKevin Hilman	mrc	p15, 0, r6, c5, c0, 1
4208bd22949SKevin Hilman	stmia	r8!, {r4-r6}
4218bd22949SKevin Hilman	/* Data aux fault status register, instruction aux fault status,
4228bd22949SKevin Hilman	datat fault address register and instruction fault address register*/
4238bd22949SKevin Hilman	mrc	p15, 0, r4, c5, c1, 0
4248bd22949SKevin Hilman	mrc	p15, 0, r5, c5, c1, 1
4258bd22949SKevin Hilman	mrc	p15, 0, r6, c6, c0, 0
4268bd22949SKevin Hilman	mrc	p15, 0, r7, c6, c0, 2
4278bd22949SKevin Hilman	stmia	r8!, {r4-r7}
4288bd22949SKevin Hilman	/* user r/w thread and process ID, user r/o thread and process ID,
4298bd22949SKevin Hilman	priv only thread and process ID, cache size selection */
4308bd22949SKevin Hilman	mrc	p15, 0, r4, c13, c0, 2
4318bd22949SKevin Hilman	mrc	p15, 0, r5, c13, c0, 3
4328bd22949SKevin Hilman	mrc	p15, 0, r6, c13, c0, 4
4338bd22949SKevin Hilman	mrc	p15, 2, r7, c0, c0, 0
4348bd22949SKevin Hilman	stmia	r8!, {r4-r7}
4358bd22949SKevin Hilman	/* Data TLB lockdown, instruction TLB lockdown registers */
4368bd22949SKevin Hilman	mrc	p15, 0, r5, c10, c0, 0
4378bd22949SKevin Hilman	mrc	p15, 0, r6, c10, c0, 1
4388bd22949SKevin Hilman	stmia	r8!, {r5-r6}
4398bd22949SKevin Hilman	/* Secure or non secure vector base address, FCSE PID, Context PID*/
4408bd22949SKevin Hilman	mrc	p15, 0, r4, c12, c0, 0
4418bd22949SKevin Hilman	mrc	p15, 0, r5, c13, c0, 0
4428bd22949SKevin Hilman	mrc	p15, 0, r6, c13, c0, 1
4438bd22949SKevin Hilman	stmia	r8!, {r4-r6}
4448bd22949SKevin Hilman	/* Primary remap, normal remap registers */
4458bd22949SKevin Hilman	mrc	p15, 0, r4, c10, c2, 0
4468bd22949SKevin Hilman	mrc	p15, 0, r5, c10, c2, 1
4478bd22949SKevin Hilman	stmia	r8!,{r4-r5}
4488bd22949SKevin Hilman
4498bd22949SKevin Hilman	/* Store current cpsr*/
4508bd22949SKevin Hilman	mrs	r2, cpsr
4518bd22949SKevin Hilman	stmia	r8!, {r2}
4528bd22949SKevin Hilman
4538bd22949SKevin Hilman	mrc	p15, 0, r4, c1, c0, 0
4548bd22949SKevin Hilman	/* save control register */
4558bd22949SKevin Hilman	stmia	r8!, {r4}
4568bd22949SKevin Hilmanclean_caches:
4578bd22949SKevin Hilman	/* Clean Data or unified cache to POU*/
4588bd22949SKevin Hilman	/* How to invalidate only L1 cache???? - #FIX_ME# */
4598bd22949SKevin Hilman	/* mcr	p15, 0, r11, c7, c11, 1 */
4608bd22949SKevin Hilman	cmp	r9, #1 /* Check whether L2 inval is required or not*/
4618bd22949SKevin Hilman	bne	skip_l2_inval
4628bd22949SKevin Hilmanclean_l2:
4638bd22949SKevin Hilman	/* read clidr */
4648bd22949SKevin Hilman	mrc     p15, 1, r0, c0, c0, 1
4658bd22949SKevin Hilman	/* extract loc from clidr */
4668bd22949SKevin Hilman	ands    r3, r0, #0x7000000
4678bd22949SKevin Hilman	/* left align loc bit field */
4688bd22949SKevin Hilman	mov     r3, r3, lsr #23
4698bd22949SKevin Hilman	/* if loc is 0, then no need to clean */
4708bd22949SKevin Hilman	beq     finished
4718bd22949SKevin Hilman	/* start clean at cache level 0 */
4728bd22949SKevin Hilman	mov     r10, #0
4738bd22949SKevin Hilmanloop1:
4748bd22949SKevin Hilman	/* work out 3x current cache level */
4758bd22949SKevin Hilman	add     r2, r10, r10, lsr #1
4768bd22949SKevin Hilman	/* extract cache type bits from clidr*/
4778bd22949SKevin Hilman	mov     r1, r0, lsr r2
4788bd22949SKevin Hilman	/* mask of the bits for current cache only */
4798bd22949SKevin Hilman	and     r1, r1, #7
4808bd22949SKevin Hilman	/* see what cache we have at this level */
4818bd22949SKevin Hilman	cmp     r1, #2
4828bd22949SKevin Hilman	/* skip if no cache, or just i-cache */
4838bd22949SKevin Hilman	blt     skip
4848bd22949SKevin Hilman	/* select current cache level in cssr */
4858bd22949SKevin Hilman	mcr     p15, 2, r10, c0, c0, 0
4868bd22949SKevin Hilman	/* isb to sych the new cssr&csidr */
4878bd22949SKevin Hilman	isb
4888bd22949SKevin Hilman	/* read the new csidr */
4898bd22949SKevin Hilman	mrc     p15, 1, r1, c0, c0, 0
4908bd22949SKevin Hilman	/* extract the length of the cache lines */
4918bd22949SKevin Hilman	and     r2, r1, #7
4928bd22949SKevin Hilman	/* add 4 (line length offset) */
4938bd22949SKevin Hilman	add     r2, r2, #4
4948bd22949SKevin Hilman	ldr     r4, assoc_mask
4958bd22949SKevin Hilman	/* find maximum number on the way size */
4968bd22949SKevin Hilman	ands    r4, r4, r1, lsr #3
4978bd22949SKevin Hilman	/* find bit position of way size increment */
4988bd22949SKevin Hilman	clz     r5, r4
4998bd22949SKevin Hilman	ldr     r7, numset_mask
5008bd22949SKevin Hilman	/* extract max number of the index size*/
5018bd22949SKevin Hilman	ands    r7, r7, r1, lsr #13
5028bd22949SKevin Hilmanloop2:
5038bd22949SKevin Hilman	mov     r9, r4
5048bd22949SKevin Hilman	/* create working copy of max way size*/
5058bd22949SKevin Hilmanloop3:
5068bd22949SKevin Hilman	/* factor way and cache number into r11 */
5078bd22949SKevin Hilman	orr     r11, r10, r9, lsl r5
5088bd22949SKevin Hilman	/* factor index number into r11 */
5098bd22949SKevin Hilman	orr     r11, r11, r7, lsl r2
5108bd22949SKevin Hilman	/*clean & invalidate by set/way */
5118bd22949SKevin Hilman	mcr     p15, 0, r11, c7, c10, 2
5128bd22949SKevin Hilman	/* decrement the way*/
5138bd22949SKevin Hilman	subs    r9, r9, #1
5148bd22949SKevin Hilman	bge     loop3
5158bd22949SKevin Hilman	/*decrement the index */
5168bd22949SKevin Hilman	subs    r7, r7, #1
5178bd22949SKevin Hilman	bge     loop2
5188bd22949SKevin Hilmanskip:
5198bd22949SKevin Hilman	add     r10, r10, #2
5208bd22949SKevin Hilman	/* increment cache number */
5218bd22949SKevin Hilman	cmp     r3, r10
5228bd22949SKevin Hilman	bgt     loop1
5238bd22949SKevin Hilmanfinished:
5248bd22949SKevin Hilman	/*swith back to cache level 0 */
5258bd22949SKevin Hilman	mov     r10, #0
5268bd22949SKevin Hilman	/* select current cache level in cssr */
5278bd22949SKevin Hilman	mcr     p15, 2, r10, c0, c0, 0
5288bd22949SKevin Hilman	isb
5298bd22949SKevin Hilmanskip_l2_inval:
5308bd22949SKevin Hilman	/* Data memory barrier and Data sync barrier */
5318bd22949SKevin Hilman	mov     r1, #0
5328bd22949SKevin Hilman	mcr     p15, 0, r1, c7, c10, 4
5338bd22949SKevin Hilman	mcr     p15, 0, r1, c7, c10, 5
5348bd22949SKevin Hilman
5358bd22949SKevin Hilman	wfi                             @ wait for interrupt
5368bd22949SKevin Hilman	nop
5378bd22949SKevin Hilman	nop
5388bd22949SKevin Hilman	nop
5398bd22949SKevin Hilman	nop
5408bd22949SKevin Hilman	nop
5418bd22949SKevin Hilman	nop
5428bd22949SKevin Hilman	nop
5438bd22949SKevin Hilman	nop
5448bd22949SKevin Hilman	nop
5458bd22949SKevin Hilman	nop
54689139dceSPeter 'p2' De Schrijver	bl wait_sdrc_ok
5478bd22949SKevin Hilman	/* restore regs and return */
5488bd22949SKevin Hilman	ldmfd   sp!, {r0-r12, pc}
5498bd22949SKevin Hilman
55089139dceSPeter 'p2' De Schrijver/* Make sure SDRC accesses are ok */
55189139dceSPeter 'p2' De Schrijverwait_sdrc_ok:
55289139dceSPeter 'p2' De Schrijver        ldr     r4, cm_idlest1_core
55389139dceSPeter 'p2' De Schrijver        ldr     r5, [r4]
55489139dceSPeter 'p2' De Schrijver        and     r5, r5, #0x2
55589139dceSPeter 'p2' De Schrijver        cmp     r5, #0
55689139dceSPeter 'p2' De Schrijver        bne     wait_sdrc_ok
5578bd22949SKevin Hilman        ldr     r4, sdrc_power
5588bd22949SKevin Hilman        ldr     r5, [r4]
5598bd22949SKevin Hilman        bic     r5, r5, #0x40
5608bd22949SKevin Hilman        str     r5, [r4]
56189139dceSPeter 'p2' De Schrijverwait_dll_lock:
56289139dceSPeter 'p2' De Schrijver        /* Is dll in lock mode? */
56389139dceSPeter 'p2' De Schrijver        ldr     r4, sdrc_dlla_ctrl
56489139dceSPeter 'p2' De Schrijver        ldr     r5, [r4]
56589139dceSPeter 'p2' De Schrijver        tst     r5, #0x4
56689139dceSPeter 'p2' De Schrijver        bxne    lr
56789139dceSPeter 'p2' De Schrijver        /* wait till dll locks */
56889139dceSPeter 'p2' De Schrijver        ldr     r4, sdrc_dlla_status
56989139dceSPeter 'p2' De Schrijver        ldr     r5, [r4]
57089139dceSPeter 'p2' De Schrijver        and     r5, r5, #0x4
57189139dceSPeter 'p2' De Schrijver        cmp     r5, #0x4
57289139dceSPeter 'p2' De Schrijver        bne     wait_dll_lock
5738bd22949SKevin Hilman        bx      lr
57489139dceSPeter 'p2' De Schrijver
57589139dceSPeter 'p2' De Schrijvercm_idlest1_core:
57689139dceSPeter 'p2' De Schrijver	.word	CM_IDLEST1_CORE_V
57789139dceSPeter 'p2' De Schrijversdrc_dlla_status:
57889139dceSPeter 'p2' De Schrijver	.word	SDRC_DLLA_STATUS_V
57989139dceSPeter 'p2' De Schrijversdrc_dlla_ctrl:
58089139dceSPeter 'p2' De Schrijver	.word	SDRC_DLLA_CTRL_V
5818bd22949SKevin Hilmanpm_prepwstst_core:
5828bd22949SKevin Hilman	.word	PM_PREPWSTST_CORE_V
5830795a75aSTero Kristopm_prepwstst_core_p:
5840795a75aSTero Kristo	.word	PM_PREPWSTST_CORE_P
5858bd22949SKevin Hilmanpm_prepwstst_mpu:
5868bd22949SKevin Hilman	.word	PM_PREPWSTST_MPU_V
5878bd22949SKevin Hilmanpm_pwstctrl_mpu:
5888bd22949SKevin Hilman	.word	PM_PWSTCTRL_MPU_P
5898bd22949SKevin Hilmanscratchpad_base:
5908bd22949SKevin Hilman	.word	SCRATCHPAD_BASE_P
5910795a75aSTero Kristosram_base:
5920795a75aSTero Kristo	.word	SRAM_BASE_P + 0x8000
5938bd22949SKevin Hilmansdrc_power:
5948bd22949SKevin Hilman	.word SDRC_POWER_V
5958bd22949SKevin Hilmanclk_stabilize_delay:
5968bd22949SKevin Hilman	.word 0x000001FF
5978bd22949SKevin Hilmanassoc_mask:
5988bd22949SKevin Hilman	.word	0x3ff
5998bd22949SKevin Hilmannumset_mask:
6008bd22949SKevin Hilman	.word	0x7fff
6018bd22949SKevin Hilmanttbrbit_mask:
6028bd22949SKevin Hilman	.word	0xFFFFC000
6038bd22949SKevin Hilmantable_index_mask:
6048bd22949SKevin Hilman	.word	0xFFF00000
6058bd22949SKevin Hilmantable_entry:
6068bd22949SKevin Hilman	.word	0x00000C02
6078bd22949SKevin Hilmancache_pred_disable_mask:
6088bd22949SKevin Hilman	.word	0xFFFFE7FB
60927d59a4aSTero Kristocontrol_stat:
61027d59a4aSTero Kristo	.word	CONTROL_STAT
6118bd22949SKevin HilmanENTRY(omap34xx_cpu_suspend_sz)
6128bd22949SKevin Hilman	.word	. - omap34xx_cpu_suspend
613