xref: /openbmc/linux/arch/arm/mach-omap2/sleep34xx.S (revision 4dc7ccf7)
1/*
2 * linux/arch/arm/mach-omap2/sleep.S
3 *
4 * (C) Copyright 2007
5 * Texas Instruments
6 * Karthik Dasu <karthik-dp@ti.com>
7 *
8 * (C) Copyright 2004
9 * Texas Instruments, <www.ti.com>
10 * Richard Woodruff <r-woodruff2@ti.com>
11 *
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License as
14 * published by the Free Software Foundation; either version 2 of
15 * the License, or (at your option) any later version.
16 *
17 * This program is distributed in the hope that it will be useful,
18 * but WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR /PURPOSE.  See the
20 * GNU General Public License for more details.
21 *
22 * You should have received a copy of the GNU General Public License
23 * along with this program; if not, write to the Free Software
24 * Foundation, Inc., 59 Temple Place, Suite 330, Boston,
25 * MA 02111-1307 USA
26 */
27#include <linux/linkage.h>
28#include <asm/assembler.h>
29#include <mach/io.h>
30#include <plat/control.h>
31
32#include "cm.h"
33#include "prm.h"
34#include "sdrc.h"
35
36#define SDRC_SCRATCHPAD_SEM_V	0xfa00291c
37
38#define PM_PREPWSTST_CORE_V	OMAP34XX_PRM_REGADDR(CORE_MOD, \
39				OMAP3430_PM_PREPWSTST)
40#define PM_PREPWSTST_CORE_P	0x48306AE8
41#define PM_PREPWSTST_MPU_V	OMAP34XX_PRM_REGADDR(MPU_MOD, \
42				OMAP3430_PM_PREPWSTST)
43#define PM_PWSTCTRL_MPU_P	OMAP3430_PRM_BASE + MPU_MOD + OMAP2_PM_PWSTCTRL
44#define CM_IDLEST1_CORE_V	OMAP34XX_CM_REGADDR(CORE_MOD, CM_IDLEST1)
45#define SRAM_BASE_P		0x40200000
46#define CONTROL_STAT		0x480022F0
47#define SCRATCHPAD_MEM_OFFS	0x310 /* Move this as correct place is
48				       * available */
49#define SCRATCHPAD_BASE_P	(OMAP343X_CTRL_BASE + OMAP343X_CONTROL_MEM_WKUP\
50						+ SCRATCHPAD_MEM_OFFS)
51#define SDRC_POWER_V		OMAP34XX_SDRC_REGADDR(SDRC_POWER)
52#define SDRC_SYSCONFIG_P	(OMAP343X_SDRC_BASE + SDRC_SYSCONFIG)
53#define SDRC_MR_0_P		(OMAP343X_SDRC_BASE + SDRC_MR_0)
54#define SDRC_EMR2_0_P		(OMAP343X_SDRC_BASE + SDRC_EMR2_0)
55#define SDRC_MANUAL_0_P		(OMAP343X_SDRC_BASE + SDRC_MANUAL_0)
56#define SDRC_MR_1_P		(OMAP343X_SDRC_BASE + SDRC_MR_1)
57#define SDRC_EMR2_1_P		(OMAP343X_SDRC_BASE + SDRC_EMR2_1)
58#define SDRC_MANUAL_1_P		(OMAP343X_SDRC_BASE + SDRC_MANUAL_1)
59#define SDRC_DLLA_STATUS_V	OMAP34XX_SDRC_REGADDR(SDRC_DLLA_STATUS)
60#define SDRC_DLLA_CTRL_V	OMAP34XX_SDRC_REGADDR(SDRC_DLLA_CTRL)
61
62        .text
63/* Function to aquire the semaphore in scratchpad */
64ENTRY(lock_scratchpad_sem)
65	stmfd	sp!, {lr}	@ save registers on stack
66wait_sem:
67	mov	r0,#1
68	ldr	r1, sdrc_scratchpad_sem
69wait_loop:
70	ldr	r2, [r1]	@ load the lock value
71	cmp	r2, r0		@ is the lock free ?
72	beq	wait_loop	@ not free...
73	swp	r2, r0, [r1]	@ semaphore free so lock it and proceed
74	cmp	r2, r0		@ did we succeed ?
75	beq	wait_sem	@ no - try again
76	ldmfd	sp!, {pc}	@ restore regs and return
77sdrc_scratchpad_sem:
78        .word SDRC_SCRATCHPAD_SEM_V
79ENTRY(lock_scratchpad_sem_sz)
80        .word   . - lock_scratchpad_sem
81
82        .text
83/* Function to release the scratchpad semaphore */
84ENTRY(unlock_scratchpad_sem)
85	stmfd	sp!, {lr}	@ save registers on stack
86	ldr	r3, sdrc_scratchpad_sem
87	mov	r2,#0
88	str	r2,[r3]
89	ldmfd	sp!, {pc}	@ restore regs and return
90ENTRY(unlock_scratchpad_sem_sz)
91        .word   . - unlock_scratchpad_sem
92
93	.text
94/* Function call to get the restore pointer for resume from OFF */
95ENTRY(get_restore_pointer)
96        stmfd   sp!, {lr}     @ save registers on stack
97	adr	r0, restore
98        ldmfd   sp!, {pc}     @ restore regs and return
99ENTRY(get_restore_pointer_sz)
100        .word   . - get_restore_pointer
101
102	.text
103/* Function call to get the restore pointer for for ES3 to resume from OFF */
104ENTRY(get_es3_restore_pointer)
105	stmfd	sp!, {lr}	@ save registers on stack
106	adr	r0, restore_es3
107	ldmfd	sp!, {pc}	@ restore regs and return
108ENTRY(get_es3_restore_pointer_sz)
109	.word	. - get_es3_restore_pointer
110
111ENTRY(es3_sdrc_fix)
112	ldr	r4, sdrc_syscfg		@ get config addr
113	ldr	r5, [r4]		@ get value
114	tst	r5, #0x100		@ is part access blocked
115	it	eq
116	biceq	r5, r5, #0x100		@ clear bit if set
117	str	r5, [r4]		@ write back change
118	ldr	r4, sdrc_mr_0		@ get config addr
119	ldr	r5, [r4]		@ get value
120	str	r5, [r4]		@ write back change
121	ldr	r4, sdrc_emr2_0		@ get config addr
122	ldr	r5, [r4]		@ get value
123	str	r5, [r4]		@ write back change
124	ldr	r4, sdrc_manual_0	@ get config addr
125	mov	r5, #0x2		@ autorefresh command
126	str	r5, [r4]		@ kick off refreshes
127	ldr	r4, sdrc_mr_1		@ get config addr
128	ldr	r5, [r4]		@ get value
129	str	r5, [r4]		@ write back change
130	ldr	r4, sdrc_emr2_1		@ get config addr
131	ldr	r5, [r4]		@ get value
132	str	r5, [r4]		@ write back change
133	ldr	r4, sdrc_manual_1	@ get config addr
134	mov	r5, #0x2		@ autorefresh command
135	str	r5, [r4]		@ kick off refreshes
136	bx	lr
137sdrc_syscfg:
138	.word	SDRC_SYSCONFIG_P
139sdrc_mr_0:
140	.word	SDRC_MR_0_P
141sdrc_emr2_0:
142	.word	SDRC_EMR2_0_P
143sdrc_manual_0:
144	.word	SDRC_MANUAL_0_P
145sdrc_mr_1:
146	.word	SDRC_MR_1_P
147sdrc_emr2_1:
148	.word	SDRC_EMR2_1_P
149sdrc_manual_1:
150	.word	SDRC_MANUAL_1_P
151ENTRY(es3_sdrc_fix_sz)
152	.word	. - es3_sdrc_fix
153
154/* Function to call rom code to save secure ram context */
155ENTRY(save_secure_ram_context)
156	stmfd	sp!, {r1-r12, lr}	@ save registers on stack
157save_secure_ram_debug:
158	/* b save_secure_ram_debug */	@ enable to debug save code
159	adr	r3, api_params		@ r3 points to parameters
160	str	r0, [r3,#0x4]		@ r0 has sdram address
161	ldr	r12, high_mask
162	and	r3, r3, r12
163	ldr	r12, sram_phy_addr_mask
164	orr	r3, r3, r12
165	mov	r0, #25			@ set service ID for PPA
166	mov	r12, r0			@ copy secure service ID in r12
167	mov	r1, #0			@ set task id for ROM code in r1
168	mov	r2, #4			@ set some flags in r2, r6
169	mov	r6, #0xff
170	mcr	p15, 0, r0, c7, c10, 4	@ data write barrier
171	mcr	p15, 0, r0, c7, c10, 5	@ data memory barrier
172	.word	0xE1600071		@ call SMI monitor (smi #1)
173	nop
174	nop
175	nop
176	nop
177	ldmfd	sp!, {r1-r12, pc}
178sram_phy_addr_mask:
179	.word	SRAM_BASE_P
180high_mask:
181	.word	0xffff
182api_params:
183	.word	0x4, 0x0, 0x0, 0x1, 0x1
184ENTRY(save_secure_ram_context_sz)
185	.word	. - save_secure_ram_context
186
187/*
188 * Forces OMAP into idle state
189 *
190 * omap34xx_suspend() - This bit of code just executes the WFI
191 * for normal idles.
192 *
193 * Note: This code get's copied to internal SRAM at boot. When the OMAP
194 *	 wakes up it continues execution at the point it went to sleep.
195 */
196ENTRY(omap34xx_cpu_suspend)
197	stmfd	sp!, {r0-r12, lr}		@ save registers on stack
198loop:
199	/*b	loop*/	@Enable to debug by stepping through code
200	/* r0 contains restore pointer in sdram */
201	/* r1 contains information about saving context */
202	ldr     r4, sdrc_power          @ read the SDRC_POWER register
203	ldr     r5, [r4]                @ read the contents of SDRC_POWER
204	orr     r5, r5, #0x40           @ enable self refresh on idle req
205	str     r5, [r4]                @ write back to SDRC_POWER register
206
207	cmp	r1, #0x0
208	/* If context save is required, do that and execute wfi */
209	bne	save_context_wfi
210	/* Data memory barrier and Data sync barrier */
211	mov	r1, #0
212	mcr	p15, 0, r1, c7, c10, 4
213	mcr	p15, 0, r1, c7, c10, 5
214
215	wfi				@ wait for interrupt
216
217	nop
218	nop
219	nop
220	nop
221	nop
222	nop
223	nop
224	nop
225	nop
226	nop
227	bl wait_sdrc_ok
228
229	ldmfd	sp!, {r0-r12, pc}		@ restore regs and return
230restore_es3:
231	/*b restore_es3*/		@ Enable to debug restore code
232	ldr	r5, pm_prepwstst_core_p
233	ldr	r4, [r5]
234	and	r4, r4, #0x3
235	cmp	r4, #0x0	@ Check if previous power state of CORE is OFF
236	bne	restore
237	adr	r0, es3_sdrc_fix
238	ldr	r1, sram_base
239	ldr	r2, es3_sdrc_fix_sz
240	mov	r2, r2, ror #2
241copy_to_sram:
242	ldmia	r0!, {r3}	@ val = *src
243	stmia	r1!, {r3}	@ *dst = val
244	subs	r2, r2, #0x1	@ num_words--
245	bne	copy_to_sram
246	ldr	r1, sram_base
247	blx	r1
248restore:
249	/* b restore*/  @ Enable to debug restore code
250        /* Check what was the reason for mpu reset and store the reason in r9*/
251        /* 1 - Only L1 and logic lost */
252        /* 2 - Only L2 lost - In this case, we wont be here */
253        /* 3 - Both L1 and L2 lost */
254	ldr     r1, pm_pwstctrl_mpu
255	ldr	r2, [r1]
256	and     r2, r2, #0x3
257	cmp     r2, #0x0	@ Check if target power state was OFF or RET
258        moveq   r9, #0x3        @ MPU OFF => L1 and L2 lost
259	movne	r9, #0x1	@ Only L1 and L2 lost => avoid L2 invalidation
260	bne	logic_l1_restore
261	ldr	r0, control_stat
262	ldr	r1, [r0]
263	and	r1, #0x700
264	cmp	r1, #0x300
265	beq	l2_inv_gp
266	mov	r0, #40		@ set service ID for PPA
267	mov	r12, r0		@ copy secure Service ID in r12
268	mov	r1, #0		@ set task id for ROM code in r1
269	mov	r2, #4		@ set some flags in r2, r6
270	mov	r6, #0xff
271	adr	r3, l2_inv_api_params	@ r3 points to dummy parameters
272	mcr	p15, 0, r0, c7, c10, 4	@ data write barrier
273	mcr	p15, 0, r0, c7, c10, 5	@ data memory barrier
274	.word	0xE1600071		@ call SMI monitor (smi #1)
275	/* Write to Aux control register to set some bits */
276	mov	r0, #42		@ set service ID for PPA
277	mov	r12, r0		@ copy secure Service ID in r12
278	mov	r1, #0		@ set task id for ROM code in r1
279	mov	r2, #4		@ set some flags in r2, r6
280	mov	r6, #0xff
281	ldr	r4, scratchpad_base
282	ldr	r3, [r4, #0xBC]	@ r3 points to parameters
283	mcr	p15, 0, r0, c7, c10, 4	@ data write barrier
284	mcr	p15, 0, r0, c7, c10, 5	@ data memory barrier
285	.word	0xE1600071		@ call SMI monitor (smi #1)
286
287#ifdef CONFIG_OMAP3_L2_AUX_SECURE_SAVE_RESTORE
288	/* Restore L2 aux control register */
289	@ set service ID for PPA
290	mov	r0, #CONFIG_OMAP3_L2_AUX_SECURE_SERVICE_SET_ID
291	mov	r12, r0		@ copy service ID in r12
292	mov	r1, #0		@ set task ID for ROM code in r1
293	mov	r2, #4		@ set some flags in r2, r6
294	mov	r6, #0xff
295	ldr	r4, scratchpad_base
296	ldr	r3, [r4, #0xBC]
297	adds	r3, r3, #8	@ r3 points to parameters
298	mcr	p15, 0, r0, c7, c10, 4	@ data write barrier
299	mcr	p15, 0, r0, c7, c10, 5	@ data memory barrier
300	.word	0xE1600071		@ call SMI monitor (smi #1)
301#endif
302	b	logic_l1_restore
303l2_inv_api_params:
304	.word   0x1, 0x00
305l2_inv_gp:
306	/* Execute smi to invalidate L2 cache */
307	mov r12, #0x1                         @ set up to invalide L2
308smi:    .word 0xE1600070		@ Call SMI monitor (smieq)
309	/* Write to Aux control register to set some bits */
310	ldr	r4, scratchpad_base
311	ldr	r3, [r4,#0xBC]
312	ldr	r0, [r3,#4]
313	mov	r12, #0x3
314	.word 0xE1600070	@ Call SMI monitor (smieq)
315	ldr	r4, scratchpad_base
316	ldr	r3, [r4,#0xBC]
317	ldr	r0, [r3,#12]
318	mov	r12, #0x2
319	.word 0xE1600070	@ Call SMI monitor (smieq)
320logic_l1_restore:
321	mov	r1, #0
322	/* Invalidate all instruction caches to PoU
323	 * and flush branch target cache */
324	mcr	p15, 0, r1, c7, c5, 0
325
326	ldr	r4, scratchpad_base
327	ldr	r3, [r4,#0xBC]
328	adds	r3, r3, #16
329	ldmia	r3!, {r4-r6}
330	mov	sp, r4
331	msr	spsr_cxsf, r5
332	mov	lr, r6
333
334	ldmia	r3!, {r4-r9}
335	/* Coprocessor access Control Register */
336	mcr p15, 0, r4, c1, c0, 2
337
338	/* TTBR0 */
339	MCR p15, 0, r5, c2, c0, 0
340	/* TTBR1 */
341	MCR p15, 0, r6, c2, c0, 1
342	/* Translation table base control register */
343	MCR p15, 0, r7, c2, c0, 2
344	/*domain access Control Register */
345	MCR p15, 0, r8, c3, c0, 0
346	/* data fault status Register */
347	MCR p15, 0, r9, c5, c0, 0
348
349	ldmia  r3!,{r4-r8}
350	/* instruction fault status Register */
351	MCR p15, 0, r4, c5, c0, 1
352	/*Data Auxiliary Fault Status Register */
353	MCR p15, 0, r5, c5, c1, 0
354	/*Instruction Auxiliary Fault Status Register*/
355	MCR p15, 0, r6, c5, c1, 1
356	/*Data Fault Address Register */
357	MCR p15, 0, r7, c6, c0, 0
358	/*Instruction Fault Address Register*/
359	MCR p15, 0, r8, c6, c0, 2
360	ldmia  r3!,{r4-r7}
361
362	/* user r/w thread and process ID */
363	MCR p15, 0, r4, c13, c0, 2
364	/* user ro thread and process ID */
365	MCR p15, 0, r5, c13, c0, 3
366	/*Privileged only thread and process ID */
367	MCR p15, 0, r6, c13, c0, 4
368	/* cache size selection */
369	MCR p15, 2, r7, c0, c0, 0
370	ldmia  r3!,{r4-r8}
371	/* Data TLB lockdown registers */
372	MCR p15, 0, r4, c10, c0, 0
373	/* Instruction TLB lockdown registers */
374	MCR p15, 0, r5, c10, c0, 1
375	/* Secure or Nonsecure Vector Base Address */
376	MCR p15, 0, r6, c12, c0, 0
377	/* FCSE PID */
378	MCR p15, 0, r7, c13, c0, 0
379	/* Context PID */
380	MCR p15, 0, r8, c13, c0, 1
381
382	ldmia  r3!,{r4-r5}
383	/* primary memory remap register */
384	MCR p15, 0, r4, c10, c2, 0
385	/*normal memory remap register */
386	MCR p15, 0, r5, c10, c2, 1
387
388	/* Restore cpsr */
389	ldmia	r3!,{r4}	/*load CPSR from SDRAM*/
390	msr	cpsr, r4	/*store cpsr */
391
392	/* Enabling MMU here */
393	mrc	p15, 0, r7, c2, c0, 2 /* Read TTBRControl */
394	/* Extract N (0:2) bits and decide whether to use TTBR0 or TTBR1*/
395	and	r7, #0x7
396	cmp	r7, #0x0
397	beq	usettbr0
398ttbr_error:
399	/* More work needs to be done to support N[0:2] value other than 0
400	* So looping here so that the error can be detected
401	*/
402	b	ttbr_error
403usettbr0:
404	mrc	p15, 0, r2, c2, c0, 0
405	ldr	r5, ttbrbit_mask
406	and	r2, r5
407	mov	r4, pc
408	ldr	r5, table_index_mask
409	and	r4, r5 /* r4 = 31 to 20 bits of pc */
410	/* Extract the value to be written to table entry */
411	ldr	r1, table_entry
412	add	r1, r1, r4 /* r1 has value to be written to table entry*/
413	/* Getting the address of table entry to modify */
414	lsr	r4, #18
415	add	r2, r4 /* r2 has the location which needs to be modified */
416	/* Storing previous entry of location being modified */
417	ldr	r5, scratchpad_base
418	ldr	r4, [r2]
419	str	r4, [r5, #0xC0]
420	/* Modify the table entry */
421	str	r1, [r2]
422	/* Storing address of entry being modified
423	 * - will be restored after enabling MMU */
424	ldr	r5, scratchpad_base
425	str	r2, [r5, #0xC4]
426
427	mov	r0, #0
428	mcr	p15, 0, r0, c7, c5, 4	@ Flush prefetch buffer
429	mcr	p15, 0, r0, c7, c5, 6	@ Invalidate branch predictor array
430	mcr	p15, 0, r0, c8, c5, 0	@ Invalidate instruction TLB
431	mcr	p15, 0, r0, c8, c6, 0	@ Invalidate data TLB
432	/* Restore control register  but dont enable caches here*/
433	/* Caches will be enabled after restoring MMU table entry */
434	ldmia	r3!, {r4}
435	/* Store previous value of control register in scratchpad */
436	str	r4, [r5, #0xC8]
437	ldr	r2, cache_pred_disable_mask
438	and	r4, r2
439	mcr	p15, 0, r4, c1, c0, 0
440
441	ldmfd	sp!, {r0-r12, pc}		@ restore regs and return
442save_context_wfi:
443	/*b	save_context_wfi*/	@ enable to debug save code
444	mov	r8, r0 /* Store SDRAM address in r8 */
445	mrc	p15, 0, r5, c1, c0, 1	@ Read Auxiliary Control Register
446	mov	r4, #0x1		@ Number of parameters for restore call
447	stmia	r8!, {r4-r5}		@ Push parameters for restore call
448	mrc	p15, 1, r5, c9, c0, 2	@ Read L2 AUX ctrl register
449	stmia	r8!, {r4-r5}		@ Push parameters for restore call
450        /* Check what that target sleep state is:stored in r1*/
451        /* 1 - Only L1 and logic lost */
452        /* 2 - Only L2 lost */
453        /* 3 - Both L1 and L2 lost */
454	cmp	r1, #0x2 /* Only L2 lost */
455	beq	clean_l2
456	cmp	r1, #0x1 /* L2 retained */
457	/* r9 stores whether to clean L2 or not*/
458	moveq	r9, #0x0 /* Dont Clean L2 */
459	movne	r9, #0x1 /* Clean L2 */
460l1_logic_lost:
461	/* Store sp and spsr to SDRAM */
462	mov	r4, sp
463	mrs	r5, spsr
464	mov	r6, lr
465	stmia	r8!, {r4-r6}
466	/* Save all ARM registers */
467	/* Coprocessor access control register */
468	mrc	p15, 0, r6, c1, c0, 2
469	stmia	r8!, {r6}
470	/* TTBR0, TTBR1 and Translation table base control */
471	mrc	p15, 0, r4, c2, c0, 0
472	mrc	p15, 0, r5, c2, c0, 1
473	mrc	p15, 0, r6, c2, c0, 2
474	stmia	r8!, {r4-r6}
475	/* Domain access control register, data fault status register,
476	and instruction fault status register */
477	mrc	p15, 0, r4, c3, c0, 0
478	mrc	p15, 0, r5, c5, c0, 0
479	mrc	p15, 0, r6, c5, c0, 1
480	stmia	r8!, {r4-r6}
481	/* Data aux fault status register, instruction aux fault status,
482	datat fault address register and instruction fault address register*/
483	mrc	p15, 0, r4, c5, c1, 0
484	mrc	p15, 0, r5, c5, c1, 1
485	mrc	p15, 0, r6, c6, c0, 0
486	mrc	p15, 0, r7, c6, c0, 2
487	stmia	r8!, {r4-r7}
488	/* user r/w thread and process ID, user r/o thread and process ID,
489	priv only thread and process ID, cache size selection */
490	mrc	p15, 0, r4, c13, c0, 2
491	mrc	p15, 0, r5, c13, c0, 3
492	mrc	p15, 0, r6, c13, c0, 4
493	mrc	p15, 2, r7, c0, c0, 0
494	stmia	r8!, {r4-r7}
495	/* Data TLB lockdown, instruction TLB lockdown registers */
496	mrc	p15, 0, r5, c10, c0, 0
497	mrc	p15, 0, r6, c10, c0, 1
498	stmia	r8!, {r5-r6}
499	/* Secure or non secure vector base address, FCSE PID, Context PID*/
500	mrc	p15, 0, r4, c12, c0, 0
501	mrc	p15, 0, r5, c13, c0, 0
502	mrc	p15, 0, r6, c13, c0, 1
503	stmia	r8!, {r4-r6}
504	/* Primary remap, normal remap registers */
505	mrc	p15, 0, r4, c10, c2, 0
506	mrc	p15, 0, r5, c10, c2, 1
507	stmia	r8!,{r4-r5}
508
509	/* Store current cpsr*/
510	mrs	r2, cpsr
511	stmia	r8!, {r2}
512
513	mrc	p15, 0, r4, c1, c0, 0
514	/* save control register */
515	stmia	r8!, {r4}
516clean_caches:
517	/* Clean Data or unified cache to POU*/
518	/* How to invalidate only L1 cache???? - #FIX_ME# */
519	/* mcr	p15, 0, r11, c7, c11, 1 */
520	cmp	r9, #1 /* Check whether L2 inval is required or not*/
521	bne	skip_l2_inval
522clean_l2:
523	/* read clidr */
524	mrc     p15, 1, r0, c0, c0, 1
525	/* extract loc from clidr */
526	ands    r3, r0, #0x7000000
527	/* left align loc bit field */
528	mov     r3, r3, lsr #23
529	/* if loc is 0, then no need to clean */
530	beq     finished
531	/* start clean at cache level 0 */
532	mov     r10, #0
533loop1:
534	/* work out 3x current cache level */
535	add     r2, r10, r10, lsr #1
536	/* extract cache type bits from clidr*/
537	mov     r1, r0, lsr r2
538	/* mask of the bits for current cache only */
539	and     r1, r1, #7
540	/* see what cache we have at this level */
541	cmp     r1, #2
542	/* skip if no cache, or just i-cache */
543	blt     skip
544	/* select current cache level in cssr */
545	mcr     p15, 2, r10, c0, c0, 0
546	/* isb to sych the new cssr&csidr */
547	isb
548	/* read the new csidr */
549	mrc     p15, 1, r1, c0, c0, 0
550	/* extract the length of the cache lines */
551	and     r2, r1, #7
552	/* add 4 (line length offset) */
553	add     r2, r2, #4
554	ldr     r4, assoc_mask
555	/* find maximum number on the way size */
556	ands    r4, r4, r1, lsr #3
557	/* find bit position of way size increment */
558	clz     r5, r4
559	ldr     r7, numset_mask
560	/* extract max number of the index size*/
561	ands    r7, r7, r1, lsr #13
562loop2:
563	mov     r9, r4
564	/* create working copy of max way size*/
565loop3:
566	/* factor way and cache number into r11 */
567	orr     r11, r10, r9, lsl r5
568	/* factor index number into r11 */
569	orr     r11, r11, r7, lsl r2
570	/*clean & invalidate by set/way */
571	mcr     p15, 0, r11, c7, c10, 2
572	/* decrement the way*/
573	subs    r9, r9, #1
574	bge     loop3
575	/*decrement the index */
576	subs    r7, r7, #1
577	bge     loop2
578skip:
579	add     r10, r10, #2
580	/* increment cache number */
581	cmp     r3, r10
582	bgt     loop1
583finished:
584	/*swith back to cache level 0 */
585	mov     r10, #0
586	/* select current cache level in cssr */
587	mcr     p15, 2, r10, c0, c0, 0
588	isb
589skip_l2_inval:
590	/* Data memory barrier and Data sync barrier */
591	mov     r1, #0
592	mcr     p15, 0, r1, c7, c10, 4
593	mcr     p15, 0, r1, c7, c10, 5
594
595	wfi                             @ wait for interrupt
596	nop
597	nop
598	nop
599	nop
600	nop
601	nop
602	nop
603	nop
604	nop
605	nop
606	bl wait_sdrc_ok
607	/* restore regs and return */
608	ldmfd   sp!, {r0-r12, pc}
609
610/* Make sure SDRC accesses are ok */
611wait_sdrc_ok:
612        ldr     r4, cm_idlest1_core
613        ldr     r5, [r4]
614        and     r5, r5, #0x2
615        cmp     r5, #0
616        bne     wait_sdrc_ok
617        ldr     r4, sdrc_power
618        ldr     r5, [r4]
619        bic     r5, r5, #0x40
620        str     r5, [r4]
621wait_dll_lock:
622        /* Is dll in lock mode? */
623        ldr     r4, sdrc_dlla_ctrl
624        ldr     r5, [r4]
625        tst     r5, #0x4
626        bxne    lr
627        /* wait till dll locks */
628        ldr     r4, sdrc_dlla_status
629        ldr     r5, [r4]
630        and     r5, r5, #0x4
631        cmp     r5, #0x4
632        bne     wait_dll_lock
633        bx      lr
634
635cm_idlest1_core:
636	.word	CM_IDLEST1_CORE_V
637sdrc_dlla_status:
638	.word	SDRC_DLLA_STATUS_V
639sdrc_dlla_ctrl:
640	.word	SDRC_DLLA_CTRL_V
641pm_prepwstst_core:
642	.word	PM_PREPWSTST_CORE_V
643pm_prepwstst_core_p:
644	.word	PM_PREPWSTST_CORE_P
645pm_prepwstst_mpu:
646	.word	PM_PREPWSTST_MPU_V
647pm_pwstctrl_mpu:
648	.word	PM_PWSTCTRL_MPU_P
649scratchpad_base:
650	.word	SCRATCHPAD_BASE_P
651sram_base:
652	.word	SRAM_BASE_P + 0x8000
653sdrc_power:
654	.word SDRC_POWER_V
655clk_stabilize_delay:
656	.word 0x000001FF
657assoc_mask:
658	.word	0x3ff
659numset_mask:
660	.word	0x7fff
661ttbrbit_mask:
662	.word	0xFFFFC000
663table_index_mask:
664	.word	0xFFF00000
665table_entry:
666	.word	0x00000C02
667cache_pred_disable_mask:
668	.word	0xFFFFE7FB
669control_stat:
670	.word	CONTROL_STAT
671ENTRY(omap34xx_cpu_suspend_sz)
672	.word	. - omap34xx_cpu_suspend
673