xref: /openbmc/linux/arch/arm/mach-omap2/sleep34xx.S (revision a09d2831)
1/*
2 * linux/arch/arm/mach-omap2/sleep.S
3 *
4 * (C) Copyright 2007
5 * Texas Instruments
6 * Karthik Dasu <karthik-dp@ti.com>
7 *
8 * (C) Copyright 2004
9 * Texas Instruments, <www.ti.com>
10 * Richard Woodruff <r-woodruff2@ti.com>
11 *
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License as
14 * published by the Free Software Foundation; either version 2 of
15 * the License, or (at your option) any later version.
16 *
17 * This program is distributed in the hope that it will be useful,
18 * but WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR /PURPOSE.  See the
20 * GNU General Public License for more details.
21 *
22 * You should have received a copy of the GNU General Public License
23 * along with this program; if not, write to the Free Software
24 * Foundation, Inc., 59 Temple Place, Suite 330, Boston,
25 * MA 02111-1307 USA
26 */
27#include <linux/linkage.h>
28#include <asm/assembler.h>
29#include <mach/io.h>
30#include <plat/control.h>
31
32#include "cm.h"
33#include "prm.h"
34#include "sdrc.h"
35
36#define PM_PREPWSTST_CORE_V	OMAP34XX_PRM_REGADDR(CORE_MOD, \
37				OMAP3430_PM_PREPWSTST)
38#define PM_PREPWSTST_CORE_P	0x48306AE8
39#define PM_PREPWSTST_MPU_V	OMAP34XX_PRM_REGADDR(MPU_MOD, \
40				OMAP3430_PM_PREPWSTST)
41#define PM_PWSTCTRL_MPU_P	OMAP3430_PRM_BASE + MPU_MOD + PM_PWSTCTRL
42#define CM_IDLEST1_CORE_V	OMAP34XX_CM_REGADDR(CORE_MOD, CM_IDLEST1)
43#define SRAM_BASE_P		0x40200000
44#define CONTROL_STAT		0x480022F0
45#define SCRATCHPAD_MEM_OFFS	0x310 /* Move this as correct place is
46				       * available */
47#define SCRATCHPAD_BASE_P	(OMAP343X_CTRL_BASE + OMAP343X_CONTROL_MEM_WKUP\
48						+ SCRATCHPAD_MEM_OFFS)
49#define SDRC_POWER_V		OMAP34XX_SDRC_REGADDR(SDRC_POWER)
50#define SDRC_SYSCONFIG_P	(OMAP343X_SDRC_BASE + SDRC_SYSCONFIG)
51#define SDRC_MR_0_P		(OMAP343X_SDRC_BASE + SDRC_MR_0)
52#define SDRC_EMR2_0_P		(OMAP343X_SDRC_BASE + SDRC_EMR2_0)
53#define SDRC_MANUAL_0_P		(OMAP343X_SDRC_BASE + SDRC_MANUAL_0)
54#define SDRC_MR_1_P		(OMAP343X_SDRC_BASE + SDRC_MR_1)
55#define SDRC_EMR2_1_P		(OMAP343X_SDRC_BASE + SDRC_EMR2_1)
56#define SDRC_MANUAL_1_P		(OMAP343X_SDRC_BASE + SDRC_MANUAL_1)
57#define SDRC_DLLA_STATUS_V	OMAP34XX_SDRC_REGADDR(SDRC_DLLA_STATUS)
58#define SDRC_DLLA_CTRL_V	OMAP34XX_SDRC_REGADDR(SDRC_DLLA_CTRL)
59
60	.text
61/* Function call to get the restore pointer for resume from OFF */
62ENTRY(get_restore_pointer)
63        stmfd   sp!, {lr}     @ save registers on stack
64	adr	r0, restore
65        ldmfd   sp!, {pc}     @ restore regs and return
66ENTRY(get_restore_pointer_sz)
67        .word   . - get_restore_pointer
68
69	.text
70/* Function call to get the restore pointer for for ES3 to resume from OFF */
71ENTRY(get_es3_restore_pointer)
72	stmfd	sp!, {lr}	@ save registers on stack
73	adr	r0, restore_es3
74	ldmfd	sp!, {pc}	@ restore regs and return
75ENTRY(get_es3_restore_pointer_sz)
76	.word	. - get_es3_restore_pointer
77
78ENTRY(es3_sdrc_fix)
79	ldr	r4, sdrc_syscfg		@ get config addr
80	ldr	r5, [r4]		@ get value
81	tst	r5, #0x100		@ is part access blocked
82	it	eq
83	biceq	r5, r5, #0x100		@ clear bit if set
84	str	r5, [r4]		@ write back change
85	ldr	r4, sdrc_mr_0		@ get config addr
86	ldr	r5, [r4]		@ get value
87	str	r5, [r4]		@ write back change
88	ldr	r4, sdrc_emr2_0		@ get config addr
89	ldr	r5, [r4]		@ get value
90	str	r5, [r4]		@ write back change
91	ldr	r4, sdrc_manual_0	@ get config addr
92	mov	r5, #0x2		@ autorefresh command
93	str	r5, [r4]		@ kick off refreshes
94	ldr	r4, sdrc_mr_1		@ get config addr
95	ldr	r5, [r4]		@ get value
96	str	r5, [r4]		@ write back change
97	ldr	r4, sdrc_emr2_1		@ get config addr
98	ldr	r5, [r4]		@ get value
99	str	r5, [r4]		@ write back change
100	ldr	r4, sdrc_manual_1	@ get config addr
101	mov	r5, #0x2		@ autorefresh command
102	str	r5, [r4]		@ kick off refreshes
103	bx	lr
104sdrc_syscfg:
105	.word	SDRC_SYSCONFIG_P
106sdrc_mr_0:
107	.word	SDRC_MR_0_P
108sdrc_emr2_0:
109	.word	SDRC_EMR2_0_P
110sdrc_manual_0:
111	.word	SDRC_MANUAL_0_P
112sdrc_mr_1:
113	.word	SDRC_MR_1_P
114sdrc_emr2_1:
115	.word	SDRC_EMR2_1_P
116sdrc_manual_1:
117	.word	SDRC_MANUAL_1_P
118ENTRY(es3_sdrc_fix_sz)
119	.word	. - es3_sdrc_fix
120
121/* Function to call rom code to save secure ram context */
122ENTRY(save_secure_ram_context)
123	stmfd	sp!, {r1-r12, lr}	@ save registers on stack
124save_secure_ram_debug:
125	/* b save_secure_ram_debug */	@ enable to debug save code
126	adr	r3, api_params		@ r3 points to parameters
127	str	r0, [r3,#0x4]		@ r0 has sdram address
128	ldr	r12, high_mask
129	and	r3, r3, r12
130	ldr	r12, sram_phy_addr_mask
131	orr	r3, r3, r12
132	mov	r0, #25			@ set service ID for PPA
133	mov	r12, r0			@ copy secure service ID in r12
134	mov	r1, #0			@ set task id for ROM code in r1
135	mov	r2, #4			@ set some flags in r2, r6
136	mov	r6, #0xff
137	mcr	p15, 0, r0, c7, c10, 4	@ data write barrier
138	mcr	p15, 0, r0, c7, c10, 5	@ data memory barrier
139	.word	0xE1600071		@ call SMI monitor (smi #1)
140	nop
141	nop
142	nop
143	nop
144	ldmfd	sp!, {r1-r12, pc}
145sram_phy_addr_mask:
146	.word	SRAM_BASE_P
147high_mask:
148	.word	0xffff
149api_params:
150	.word	0x4, 0x0, 0x0, 0x1, 0x1
151ENTRY(save_secure_ram_context_sz)
152	.word	. - save_secure_ram_context
153
154/*
155 * Forces OMAP into idle state
156 *
157 * omap34xx_suspend() - This bit of code just executes the WFI
158 * for normal idles.
159 *
160 * Note: This code get's copied to internal SRAM at boot. When the OMAP
161 *	 wakes up it continues execution at the point it went to sleep.
162 */
163ENTRY(omap34xx_cpu_suspend)
164	stmfd	sp!, {r0-r12, lr}		@ save registers on stack
165loop:
166	/*b	loop*/	@Enable to debug by stepping through code
167	/* r0 contains restore pointer in sdram */
168	/* r1 contains information about saving context */
169	ldr     r4, sdrc_power          @ read the SDRC_POWER register
170	ldr     r5, [r4]                @ read the contents of SDRC_POWER
171	orr     r5, r5, #0x40           @ enable self refresh on idle req
172	str     r5, [r4]                @ write back to SDRC_POWER register
173
174	cmp	r1, #0x0
175	/* If context save is required, do that and execute wfi */
176	bne	save_context_wfi
177	/* Data memory barrier and Data sync barrier */
178	mov	r1, #0
179	mcr	p15, 0, r1, c7, c10, 4
180	mcr	p15, 0, r1, c7, c10, 5
181
182	wfi				@ wait for interrupt
183
184	nop
185	nop
186	nop
187	nop
188	nop
189	nop
190	nop
191	nop
192	nop
193	nop
194	bl wait_sdrc_ok
195
196	ldmfd	sp!, {r0-r12, pc}		@ restore regs and return
197restore_es3:
198	/*b restore_es3*/		@ Enable to debug restore code
199	ldr	r5, pm_prepwstst_core_p
200	ldr	r4, [r5]
201	and	r4, r4, #0x3
202	cmp	r4, #0x0	@ Check if previous power state of CORE is OFF
203	bne	restore
204	adr	r0, es3_sdrc_fix
205	ldr	r1, sram_base
206	ldr	r2, es3_sdrc_fix_sz
207	mov	r2, r2, ror #2
208copy_to_sram:
209	ldmia	r0!, {r3}	@ val = *src
210	stmia	r1!, {r3}	@ *dst = val
211	subs	r2, r2, #0x1	@ num_words--
212	bne	copy_to_sram
213	ldr	r1, sram_base
214	blx	r1
215restore:
216	/* b restore*/  @ Enable to debug restore code
217        /* Check what was the reason for mpu reset and store the reason in r9*/
218        /* 1 - Only L1 and logic lost */
219        /* 2 - Only L2 lost - In this case, we wont be here */
220        /* 3 - Both L1 and L2 lost */
221	ldr     r1, pm_pwstctrl_mpu
222	ldr	r2, [r1]
223	and     r2, r2, #0x3
224	cmp     r2, #0x0	@ Check if target power state was OFF or RET
225        moveq   r9, #0x3        @ MPU OFF => L1 and L2 lost
226	movne	r9, #0x1	@ Only L1 and L2 lost => avoid L2 invalidation
227	bne	logic_l1_restore
228	ldr	r0, control_stat
229	ldr	r1, [r0]
230	and	r1, #0x700
231	cmp	r1, #0x300
232	beq	l2_inv_gp
233	mov	r0, #40		@ set service ID for PPA
234	mov	r12, r0		@ copy secure Service ID in r12
235	mov	r1, #0		@ set task id for ROM code in r1
236	mov	r2, #4		@ set some flags in r2, r6
237	mov	r6, #0xff
238	adr	r3, l2_inv_api_params	@ r3 points to dummy parameters
239	mcr	p15, 0, r0, c7, c10, 4	@ data write barrier
240	mcr	p15, 0, r0, c7, c10, 5	@ data memory barrier
241	.word	0xE1600071		@ call SMI monitor (smi #1)
242	/* Write to Aux control register to set some bits */
243	mov	r0, #42		@ set service ID for PPA
244	mov	r12, r0		@ copy secure Service ID in r12
245	mov	r1, #0		@ set task id for ROM code in r1
246	mov	r2, #4		@ set some flags in r2, r6
247	mov	r6, #0xff
248	adr	r3, write_aux_control_params	@ r3 points to parameters
249	mcr	p15, 0, r0, c7, c10, 4	@ data write barrier
250	mcr	p15, 0, r0, c7, c10, 5	@ data memory barrier
251	.word	0xE1600071		@ call SMI monitor (smi #1)
252
253	b	logic_l1_restore
254l2_inv_api_params:
255	.word   0x1, 0x00
256write_aux_control_params:
257	.word   0x1, 0x72
258l2_inv_gp:
259	/* Execute smi to invalidate L2 cache */
260	mov r12, #0x1                         @ set up to invalide L2
261smi:    .word 0xE1600070		@ Call SMI monitor (smieq)
262	/* Write to Aux control register to set some bits */
263	mov	r0, #0x72
264	mov	r12, #0x3
265	.word 0xE1600070	@ Call SMI monitor (smieq)
266logic_l1_restore:
267	mov	r1, #0
268	/* Invalidate all instruction caches to PoU
269	 * and flush branch target cache */
270	mcr	p15, 0, r1, c7, c5, 0
271
272	ldr	r4, scratchpad_base
273	ldr	r3, [r4,#0xBC]
274	ldmia	r3!, {r4-r6}
275	mov	sp, r4
276	msr	spsr_cxsf, r5
277	mov	lr, r6
278
279	ldmia	r3!, {r4-r9}
280	/* Coprocessor access Control Register */
281	mcr p15, 0, r4, c1, c0, 2
282
283	/* TTBR0 */
284	MCR p15, 0, r5, c2, c0, 0
285	/* TTBR1 */
286	MCR p15, 0, r6, c2, c0, 1
287	/* Translation table base control register */
288	MCR p15, 0, r7, c2, c0, 2
289	/*domain access Control Register */
290	MCR p15, 0, r8, c3, c0, 0
291	/* data fault status Register */
292	MCR p15, 0, r9, c5, c0, 0
293
294	ldmia  r3!,{r4-r8}
295	/* instruction fault status Register */
296	MCR p15, 0, r4, c5, c0, 1
297	/*Data Auxiliary Fault Status Register */
298	MCR p15, 0, r5, c5, c1, 0
299	/*Instruction Auxiliary Fault Status Register*/
300	MCR p15, 0, r6, c5, c1, 1
301	/*Data Fault Address Register */
302	MCR p15, 0, r7, c6, c0, 0
303	/*Instruction Fault Address Register*/
304	MCR p15, 0, r8, c6, c0, 2
305	ldmia  r3!,{r4-r7}
306
307	/* user r/w thread and process ID */
308	MCR p15, 0, r4, c13, c0, 2
309	/* user ro thread and process ID */
310	MCR p15, 0, r5, c13, c0, 3
311	/*Privileged only thread and process ID */
312	MCR p15, 0, r6, c13, c0, 4
313	/* cache size selection */
314	MCR p15, 2, r7, c0, c0, 0
315	ldmia  r3!,{r4-r8}
316	/* Data TLB lockdown registers */
317	MCR p15, 0, r4, c10, c0, 0
318	/* Instruction TLB lockdown registers */
319	MCR p15, 0, r5, c10, c0, 1
320	/* Secure or Nonsecure Vector Base Address */
321	MCR p15, 0, r6, c12, c0, 0
322	/* FCSE PID */
323	MCR p15, 0, r7, c13, c0, 0
324	/* Context PID */
325	MCR p15, 0, r8, c13, c0, 1
326
327	ldmia  r3!,{r4-r5}
328	/* primary memory remap register */
329	MCR p15, 0, r4, c10, c2, 0
330	/*normal memory remap register */
331	MCR p15, 0, r5, c10, c2, 1
332
333	/* Restore cpsr */
334	ldmia	r3!,{r4}	/*load CPSR from SDRAM*/
335	msr	cpsr, r4	/*store cpsr */
336
337	/* Enabling MMU here */
338	mrc	p15, 0, r7, c2, c0, 2 /* Read TTBRControl */
339	/* Extract N (0:2) bits and decide whether to use TTBR0 or TTBR1*/
340	and	r7, #0x7
341	cmp	r7, #0x0
342	beq	usettbr0
343ttbr_error:
344	/* More work needs to be done to support N[0:2] value other than 0
345	* So looping here so that the error can be detected
346	*/
347	b	ttbr_error
348usettbr0:
349	mrc	p15, 0, r2, c2, c0, 0
350	ldr	r5, ttbrbit_mask
351	and	r2, r5
352	mov	r4, pc
353	ldr	r5, table_index_mask
354	and	r4, r5 /* r4 = 31 to 20 bits of pc */
355	/* Extract the value to be written to table entry */
356	ldr	r1, table_entry
357	add	r1, r1, r4 /* r1 has value to be written to table entry*/
358	/* Getting the address of table entry to modify */
359	lsr	r4, #18
360	add	r2, r4 /* r2 has the location which needs to be modified */
361	/* Storing previous entry of location being modified */
362	ldr	r5, scratchpad_base
363	ldr	r4, [r2]
364	str	r4, [r5, #0xC0]
365	/* Modify the table entry */
366	str	r1, [r2]
367	/* Storing address of entry being modified
368	 * - will be restored after enabling MMU */
369	ldr	r5, scratchpad_base
370	str	r2, [r5, #0xC4]
371
372	mov	r0, #0
373	mcr	p15, 0, r0, c7, c5, 4	@ Flush prefetch buffer
374	mcr	p15, 0, r0, c7, c5, 6	@ Invalidate branch predictor array
375	mcr	p15, 0, r0, c8, c5, 0	@ Invalidate instruction TLB
376	mcr	p15, 0, r0, c8, c6, 0	@ Invalidate data TLB
377	/* Restore control register  but dont enable caches here*/
378	/* Caches will be enabled after restoring MMU table entry */
379	ldmia	r3!, {r4}
380	/* Store previous value of control register in scratchpad */
381	str	r4, [r5, #0xC8]
382	ldr	r2, cache_pred_disable_mask
383	and	r4, r2
384	mcr	p15, 0, r4, c1, c0, 0
385
386	ldmfd	sp!, {r0-r12, pc}		@ restore regs and return
387save_context_wfi:
388	/*b	save_context_wfi*/	@ enable to debug save code
389	mov	r8, r0 /* Store SDRAM address in r8 */
390        /* Check what that target sleep state is:stored in r1*/
391        /* 1 - Only L1 and logic lost */
392        /* 2 - Only L2 lost */
393        /* 3 - Both L1 and L2 lost */
394	cmp	r1, #0x2 /* Only L2 lost */
395	beq	clean_l2
396	cmp	r1, #0x1 /* L2 retained */
397	/* r9 stores whether to clean L2 or not*/
398	moveq	r9, #0x0 /* Dont Clean L2 */
399	movne	r9, #0x1 /* Clean L2 */
400l1_logic_lost:
401	/* Store sp and spsr to SDRAM */
402	mov	r4, sp
403	mrs	r5, spsr
404	mov	r6, lr
405	stmia	r8!, {r4-r6}
406	/* Save all ARM registers */
407	/* Coprocessor access control register */
408	mrc	p15, 0, r6, c1, c0, 2
409	stmia	r8!, {r6}
410	/* TTBR0, TTBR1 and Translation table base control */
411	mrc	p15, 0, r4, c2, c0, 0
412	mrc	p15, 0, r5, c2, c0, 1
413	mrc	p15, 0, r6, c2, c0, 2
414	stmia	r8!, {r4-r6}
415	/* Domain access control register, data fault status register,
416	and instruction fault status register */
417	mrc	p15, 0, r4, c3, c0, 0
418	mrc	p15, 0, r5, c5, c0, 0
419	mrc	p15, 0, r6, c5, c0, 1
420	stmia	r8!, {r4-r6}
421	/* Data aux fault status register, instruction aux fault status,
422	datat fault address register and instruction fault address register*/
423	mrc	p15, 0, r4, c5, c1, 0
424	mrc	p15, 0, r5, c5, c1, 1
425	mrc	p15, 0, r6, c6, c0, 0
426	mrc	p15, 0, r7, c6, c0, 2
427	stmia	r8!, {r4-r7}
428	/* user r/w thread and process ID, user r/o thread and process ID,
429	priv only thread and process ID, cache size selection */
430	mrc	p15, 0, r4, c13, c0, 2
431	mrc	p15, 0, r5, c13, c0, 3
432	mrc	p15, 0, r6, c13, c0, 4
433	mrc	p15, 2, r7, c0, c0, 0
434	stmia	r8!, {r4-r7}
435	/* Data TLB lockdown, instruction TLB lockdown registers */
436	mrc	p15, 0, r5, c10, c0, 0
437	mrc	p15, 0, r6, c10, c0, 1
438	stmia	r8!, {r5-r6}
439	/* Secure or non secure vector base address, FCSE PID, Context PID*/
440	mrc	p15, 0, r4, c12, c0, 0
441	mrc	p15, 0, r5, c13, c0, 0
442	mrc	p15, 0, r6, c13, c0, 1
443	stmia	r8!, {r4-r6}
444	/* Primary remap, normal remap registers */
445	mrc	p15, 0, r4, c10, c2, 0
446	mrc	p15, 0, r5, c10, c2, 1
447	stmia	r8!,{r4-r5}
448
449	/* Store current cpsr*/
450	mrs	r2, cpsr
451	stmia	r8!, {r2}
452
453	mrc	p15, 0, r4, c1, c0, 0
454	/* save control register */
455	stmia	r8!, {r4}
456clean_caches:
457	/* Clean Data or unified cache to POU*/
458	/* How to invalidate only L1 cache???? - #FIX_ME# */
459	/* mcr	p15, 0, r11, c7, c11, 1 */
460	cmp	r9, #1 /* Check whether L2 inval is required or not*/
461	bne	skip_l2_inval
462clean_l2:
463	/* read clidr */
464	mrc     p15, 1, r0, c0, c0, 1
465	/* extract loc from clidr */
466	ands    r3, r0, #0x7000000
467	/* left align loc bit field */
468	mov     r3, r3, lsr #23
469	/* if loc is 0, then no need to clean */
470	beq     finished
471	/* start clean at cache level 0 */
472	mov     r10, #0
473loop1:
474	/* work out 3x current cache level */
475	add     r2, r10, r10, lsr #1
476	/* extract cache type bits from clidr*/
477	mov     r1, r0, lsr r2
478	/* mask of the bits for current cache only */
479	and     r1, r1, #7
480	/* see what cache we have at this level */
481	cmp     r1, #2
482	/* skip if no cache, or just i-cache */
483	blt     skip
484	/* select current cache level in cssr */
485	mcr     p15, 2, r10, c0, c0, 0
486	/* isb to sych the new cssr&csidr */
487	isb
488	/* read the new csidr */
489	mrc     p15, 1, r1, c0, c0, 0
490	/* extract the length of the cache lines */
491	and     r2, r1, #7
492	/* add 4 (line length offset) */
493	add     r2, r2, #4
494	ldr     r4, assoc_mask
495	/* find maximum number on the way size */
496	ands    r4, r4, r1, lsr #3
497	/* find bit position of way size increment */
498	clz     r5, r4
499	ldr     r7, numset_mask
500	/* extract max number of the index size*/
501	ands    r7, r7, r1, lsr #13
502loop2:
503	mov     r9, r4
504	/* create working copy of max way size*/
505loop3:
506	/* factor way and cache number into r11 */
507	orr     r11, r10, r9, lsl r5
508	/* factor index number into r11 */
509	orr     r11, r11, r7, lsl r2
510	/*clean & invalidate by set/way */
511	mcr     p15, 0, r11, c7, c10, 2
512	/* decrement the way*/
513	subs    r9, r9, #1
514	bge     loop3
515	/*decrement the index */
516	subs    r7, r7, #1
517	bge     loop2
518skip:
519	add     r10, r10, #2
520	/* increment cache number */
521	cmp     r3, r10
522	bgt     loop1
523finished:
524	/*swith back to cache level 0 */
525	mov     r10, #0
526	/* select current cache level in cssr */
527	mcr     p15, 2, r10, c0, c0, 0
528	isb
529skip_l2_inval:
530	/* Data memory barrier and Data sync barrier */
531	mov     r1, #0
532	mcr     p15, 0, r1, c7, c10, 4
533	mcr     p15, 0, r1, c7, c10, 5
534
535	wfi                             @ wait for interrupt
536	nop
537	nop
538	nop
539	nop
540	nop
541	nop
542	nop
543	nop
544	nop
545	nop
546	bl wait_sdrc_ok
547	/* restore regs and return */
548	ldmfd   sp!, {r0-r12, pc}
549
550/* Make sure SDRC accesses are ok */
551wait_sdrc_ok:
552        ldr     r4, cm_idlest1_core
553        ldr     r5, [r4]
554        and     r5, r5, #0x2
555        cmp     r5, #0
556        bne     wait_sdrc_ok
557        ldr     r4, sdrc_power
558        ldr     r5, [r4]
559        bic     r5, r5, #0x40
560        str     r5, [r4]
561wait_dll_lock:
562        /* Is dll in lock mode? */
563        ldr     r4, sdrc_dlla_ctrl
564        ldr     r5, [r4]
565        tst     r5, #0x4
566        bxne    lr
567        /* wait till dll locks */
568        ldr     r4, sdrc_dlla_status
569        ldr     r5, [r4]
570        and     r5, r5, #0x4
571        cmp     r5, #0x4
572        bne     wait_dll_lock
573        bx      lr
574
575cm_idlest1_core:
576	.word	CM_IDLEST1_CORE_V
577sdrc_dlla_status:
578	.word	SDRC_DLLA_STATUS_V
579sdrc_dlla_ctrl:
580	.word	SDRC_DLLA_CTRL_V
581pm_prepwstst_core:
582	.word	PM_PREPWSTST_CORE_V
583pm_prepwstst_core_p:
584	.word	PM_PREPWSTST_CORE_P
585pm_prepwstst_mpu:
586	.word	PM_PREPWSTST_MPU_V
587pm_pwstctrl_mpu:
588	.word	PM_PWSTCTRL_MPU_P
589scratchpad_base:
590	.word	SCRATCHPAD_BASE_P
591sram_base:
592	.word	SRAM_BASE_P + 0x8000
593sdrc_power:
594	.word SDRC_POWER_V
595clk_stabilize_delay:
596	.word 0x000001FF
597assoc_mask:
598	.word	0x3ff
599numset_mask:
600	.word	0x7fff
601ttbrbit_mask:
602	.word	0xFFFFC000
603table_index_mask:
604	.word	0xFFF00000
605table_entry:
606	.word	0x00000C02
607cache_pred_disable_mask:
608	.word	0xFFFFE7FB
609control_stat:
610	.word	CONTROL_STAT
611ENTRY(omap34xx_cpu_suspend_sz)
612	.word	. - omap34xx_cpu_suspend
613