xref: /openbmc/linux/arch/arm/mach-omap2/sleep34xx.S (revision 711aab1d)
1/*
2 * (C) Copyright 2007
3 * Texas Instruments
4 * Karthik Dasu <karthik-dp@ti.com>
5 *
6 * (C) Copyright 2004
7 * Texas Instruments, <www.ti.com>
8 * Richard Woodruff <r-woodruff2@ti.com>
9 *
10 * This program is free software; you can redistribute it and/or
11 * modify it under the terms of the GNU General Public License as
12 * published by the Free Software Foundation; either version 2 of
13 * the License, or (at your option) any later version.
14 *
15 * This program is distributed in the hope that it will be useful,
16 * but WITHOUT ANY WARRANTY; without even the implied warranty of
17 * MERCHANTABILITY or FITNESS FOR A PARTICULAR /PURPOSE.  See the
18 * GNU General Public License for more details.
19 *
20 * You should have received a copy of the GNU General Public License
21 * along with this program; if not, write to the Free Software
22 * Foundation, Inc., 59 Temple Place, Suite 330, Boston,
23 * MA 02111-1307 USA
24 */
25#include <linux/linkage.h>
26
27#include <asm/assembler.h>
28
29#include "omap34xx.h"
30#include "iomap.h"
31#include "cm3xxx.h"
32#include "prm3xxx.h"
33#include "sdrc.h"
34#include "sram.h"
35#include "control.h"
36
37/*
38 * Registers access definitions
39 */
40#define SDRC_SCRATCHPAD_SEM_OFFS	0xc
41#define SDRC_SCRATCHPAD_SEM_V	OMAP343X_SCRATCHPAD_REGADDR\
42					(SDRC_SCRATCHPAD_SEM_OFFS)
43#define PM_PREPWSTST_CORE_P	OMAP3430_PRM_BASE + CORE_MOD +\
44					OMAP3430_PM_PREPWSTST
45#define PM_PWSTCTRL_MPU_P	OMAP3430_PRM_BASE + MPU_MOD + OMAP2_PM_PWSTCTRL
46#define CM_IDLEST1_CORE_V	OMAP34XX_CM_REGADDR(CORE_MOD, CM_IDLEST1)
47#define CM_IDLEST_CKGEN_V	OMAP34XX_CM_REGADDR(PLL_MOD, CM_IDLEST)
48#define SRAM_BASE_P		OMAP3_SRAM_PA
49#define CONTROL_STAT		OMAP343X_CTRL_BASE + OMAP343X_CONTROL_STATUS
50#define CONTROL_MEM_RTA_CTRL	(OMAP343X_CTRL_BASE +\
51					OMAP36XX_CONTROL_MEM_RTA_CTRL)
52
53/* Move this as correct place is available */
54#define SCRATCHPAD_MEM_OFFS	0x310
55#define SCRATCHPAD_BASE_P	(OMAP343X_CTRL_BASE +\
56					OMAP343X_CONTROL_MEM_WKUP +\
57					SCRATCHPAD_MEM_OFFS)
58#define SDRC_POWER_V		OMAP34XX_SDRC_REGADDR(SDRC_POWER)
59#define SDRC_SYSCONFIG_P	(OMAP343X_SDRC_BASE + SDRC_SYSCONFIG)
60#define SDRC_MR_0_P		(OMAP343X_SDRC_BASE + SDRC_MR_0)
61#define SDRC_EMR2_0_P		(OMAP343X_SDRC_BASE + SDRC_EMR2_0)
62#define SDRC_MANUAL_0_P		(OMAP343X_SDRC_BASE + SDRC_MANUAL_0)
63#define SDRC_MR_1_P		(OMAP343X_SDRC_BASE + SDRC_MR_1)
64#define SDRC_EMR2_1_P		(OMAP343X_SDRC_BASE + SDRC_EMR2_1)
65#define SDRC_MANUAL_1_P		(OMAP343X_SDRC_BASE + SDRC_MANUAL_1)
66#define SDRC_DLLA_STATUS_V	OMAP34XX_SDRC_REGADDR(SDRC_DLLA_STATUS)
67#define SDRC_DLLA_CTRL_V	OMAP34XX_SDRC_REGADDR(SDRC_DLLA_CTRL)
68
69/*
70 * This file needs be built unconditionally as ARM to interoperate correctly
71 * with non-Thumb-2-capable firmware.
72 */
73	.arm
74
75/*
76 * API functions
77 */
78
79	.text
80/*
81 * L2 cache needs to be toggled for stable OFF mode functionality on 3630.
82 * This function sets up a flag that will allow for this toggling to take
83 * place on 3630. Hopefully some version in the future may not need this.
84 */
85ENTRY(enable_omap3630_toggle_l2_on_restore)
86	stmfd	sp!, {lr}	@ save registers on stack
87	/* Setup so that we will disable and enable l2 */
88	mov	r1, #0x1
89	adrl	r3, l2dis_3630_offset	@ may be too distant for plain adr
90	ldr	r2, [r3]		@ value for offset
91	str	r1, [r2, r3]		@ write to l2dis_3630
92	ldmfd	sp!, {pc}	@ restore regs and return
93ENDPROC(enable_omap3630_toggle_l2_on_restore)
94
95/*
96 * Function to call rom code to save secure ram context. This gets
97 * relocated to SRAM, so it can be all in .data section. Otherwise
98 * we need to initialize api_params separately.
99 */
100	.data
101	.align	3
102ENTRY(save_secure_ram_context)
103	stmfd	sp!, {r4 - r11, lr}	@ save registers on stack
104	adr	r3, api_params		@ r3 points to parameters
105	str	r0, [r3,#0x4]		@ r0 has sdram address
106	ldr	r12, high_mask
107	and	r3, r3, r12
108	ldr	r12, sram_phy_addr_mask
109	orr	r3, r3, r12
110	mov	r0, #25			@ set service ID for PPA
111	mov	r12, r0			@ copy secure service ID in r12
112	mov	r1, #0			@ set task id for ROM code in r1
113	mov	r2, #4			@ set some flags in r2, r6
114	mov	r6, #0xff
115	dsb				@ data write barrier
116	dmb				@ data memory barrier
117	smc	#1			@ call SMI monitor (smi #1)
118	nop
119	nop
120	nop
121	nop
122	ldmfd	sp!, {r4 - r11, pc}
123	.align
124sram_phy_addr_mask:
125	.word	SRAM_BASE_P
126high_mask:
127	.word	0xffff
128api_params:
129	.word	0x4, 0x0, 0x0, 0x1, 0x1
130ENDPROC(save_secure_ram_context)
131ENTRY(save_secure_ram_context_sz)
132	.word	. - save_secure_ram_context
133
134	.text
135
136/*
137 * ======================
138 * == Idle entry point ==
139 * ======================
140 */
141
142/*
143 * Forces OMAP into idle state
144 *
145 * omap34xx_cpu_suspend() - This bit of code saves the CPU context if needed
146 * and executes the WFI instruction. Calling WFI effectively changes the
147 * power domains states to the desired target power states.
148 *
149 *
150 * Notes:
151 * - only the minimum set of functions gets copied to internal SRAM at boot
152 *   and after wake-up from OFF mode, cf. omap_push_sram_idle. The function
153 *   pointers in SDRAM or SRAM are called depending on the desired low power
154 *   target state.
155 * - when the OMAP wakes up it continues at different execution points
156 *   depending on the low power mode (non-OFF vs OFF modes),
157 *   cf. 'Resume path for xxx mode' comments.
158 */
159	.align	3
160ENTRY(omap34xx_cpu_suspend)
161	stmfd	sp!, {r4 - r11, lr}	@ save registers on stack
162
163	/*
164	 * r0 contains information about saving context:
165	 *   0 - No context lost
166	 *   1 - Only L1 and logic lost
167	 *   2 - Only L2 lost (Even L1 is retained we clean it along with L2)
168	 *   3 - Both L1 and L2 lost and logic lost
169	 */
170
171	/*
172	 * For OFF mode: save context and jump to WFI in SDRAM (omap3_do_wfi)
173	 * For non-OFF modes: jump to the WFI code in SRAM (omap3_do_wfi_sram)
174	 */
175	ldr	r4, omap3_do_wfi_sram_addr
176	ldr	r5, [r4]
177	cmp	r0, #0x0		@ If no context save required,
178	bxeq	r5			@  jump to the WFI code in SRAM
179
180
181	/* Otherwise fall through to the save context code */
182save_context_wfi:
183	/*
184	 * jump out to kernel flush routine
185	 *  - reuse that code is better
186	 *  - it executes in a cached space so is faster than refetch per-block
187	 *  - should be faster and will change with kernel
188	 *  - 'might' have to copy address, load and jump to it
189	 * Flush all data from the L1 data cache before disabling
190	 * SCTLR.C bit.
191	 */
192	ldr	r1, kernel_flush
193	mov	lr, pc
194	bx	r1
195
196	/*
197	 * Clear the SCTLR.C bit to prevent further data cache
198	 * allocation. Clearing SCTLR.C would make all the data accesses
199	 * strongly ordered and would not hit the cache.
200	 */
201	mrc	p15, 0, r0, c1, c0, 0
202	bic	r0, r0, #(1 << 2)	@ Disable the C bit
203	mcr	p15, 0, r0, c1, c0, 0
204	isb
205
206	/*
207	 * Invalidate L1 data cache. Even though only invalidate is
208	 * necessary exported flush API is used here. Doing clean
209	 * on already clean cache would be almost NOP.
210	 */
211	ldr	r1, kernel_flush
212	blx	r1
213	b	omap3_do_wfi
214ENDPROC(omap34xx_cpu_suspend)
215omap3_do_wfi_sram_addr:
216	.word omap3_do_wfi_sram
217kernel_flush:
218	.word v7_flush_dcache_all
219
220/* ===================================
221 * == WFI instruction => Enter idle ==
222 * ===================================
223 */
224
225/*
226 * Do WFI instruction
227 * Includes the resume path for non-OFF modes
228 *
229 * This code gets copied to internal SRAM and is accessible
230 * from both SDRAM and SRAM:
231 * - executed from SRAM for non-off modes (omap3_do_wfi_sram),
232 * - executed from SDRAM for OFF mode (omap3_do_wfi).
233 */
234	.align	3
235ENTRY(omap3_do_wfi)
236	ldr	r4, sdrc_power		@ read the SDRC_POWER register
237	ldr	r5, [r4]		@ read the contents of SDRC_POWER
238	orr	r5, r5, #0x40		@ enable self refresh on idle req
239	str	r5, [r4]		@ write back to SDRC_POWER register
240
241	/* Data memory barrier and Data sync barrier */
242	dsb
243	dmb
244
245/*
246 * ===================================
247 * == WFI instruction => Enter idle ==
248 * ===================================
249 */
250	wfi				@ wait for interrupt
251
252/*
253 * ===================================
254 * == Resume path for non-OFF modes ==
255 * ===================================
256 */
257	nop
258	nop
259	nop
260	nop
261	nop
262	nop
263	nop
264	nop
265	nop
266	nop
267
268/*
269 * This function implements the erratum ID i581 WA:
270 *  SDRC state restore before accessing the SDRAM
271 *
272 * Only used at return from non-OFF mode. For OFF
273 * mode the ROM code configures the SDRC and
274 * the DPLL before calling the restore code directly
275 * from DDR.
276 */
277
278/* Make sure SDRC accesses are ok */
279wait_sdrc_ok:
280
281/* DPLL3 must be locked before accessing the SDRC. Maybe the HW ensures this */
282	ldr	r4, cm_idlest_ckgen
283wait_dpll3_lock:
284	ldr	r5, [r4]
285	tst	r5, #1
286	beq	wait_dpll3_lock
287
288	ldr	r4, cm_idlest1_core
289wait_sdrc_ready:
290	ldr	r5, [r4]
291	tst	r5, #0x2
292	bne	wait_sdrc_ready
293	/* allow DLL powerdown upon hw idle req */
294	ldr	r4, sdrc_power
295	ldr	r5, [r4]
296	bic	r5, r5, #0x40
297	str	r5, [r4]
298
299is_dll_in_lock_mode:
300	/* Is dll in lock mode? */
301	ldr	r4, sdrc_dlla_ctrl
302	ldr	r5, [r4]
303	tst	r5, #0x4
304	bne	exit_nonoff_modes	@ Return if locked
305	/* wait till dll locks */
306wait_dll_lock_timed:
307	ldr	r4, sdrc_dlla_status
308	/* Wait 20uS for lock */
309	mov	r6, #8
310wait_dll_lock:
311	subs	r6, r6, #0x1
312	beq	kick_dll
313	ldr	r5, [r4]
314	and	r5, r5, #0x4
315	cmp	r5, #0x4
316	bne	wait_dll_lock
317	b	exit_nonoff_modes	@ Return when locked
318
319	/* disable/reenable DLL if not locked */
320kick_dll:
321	ldr	r4, sdrc_dlla_ctrl
322	ldr	r5, [r4]
323	mov	r6, r5
324	bic	r6, #(1<<3)		@ disable dll
325	str	r6, [r4]
326	dsb
327	orr	r6, r6, #(1<<3)		@ enable dll
328	str	r6, [r4]
329	dsb
330	b	wait_dll_lock_timed
331
332exit_nonoff_modes:
333	/* Re-enable C-bit if needed */
334	mrc	p15, 0, r0, c1, c0, 0
335	tst	r0, #(1 << 2)		@ Check C bit enabled?
336	orreq	r0, r0, #(1 << 2)	@ Enable the C bit if cleared
337	mcreq	p15, 0, r0, c1, c0, 0
338	isb
339
340/*
341 * ===================================
342 * == Exit point from non-OFF modes ==
343 * ===================================
344 */
345	ldmfd	sp!, {r4 - r11, pc}	@ restore regs and return
346ENDPROC(omap3_do_wfi)
347sdrc_power:
348	.word	SDRC_POWER_V
349cm_idlest1_core:
350	.word	CM_IDLEST1_CORE_V
351cm_idlest_ckgen:
352	.word	CM_IDLEST_CKGEN_V
353sdrc_dlla_status:
354	.word	SDRC_DLLA_STATUS_V
355sdrc_dlla_ctrl:
356	.word	SDRC_DLLA_CTRL_V
357ENTRY(omap3_do_wfi_sz)
358	.word	. - omap3_do_wfi
359
360
361/*
362 * ==============================
363 * == Resume path for OFF mode ==
364 * ==============================
365 */
366
367/*
368 * The restore_* functions are called by the ROM code
369 *  when back from WFI in OFF mode.
370 * Cf. the get_*restore_pointer functions.
371 *
372 *  restore_es3: applies to 34xx >= ES3.0
373 *  restore_3630: applies to 36xx
374 *  restore: common code for 3xxx
375 *
376 * Note: when back from CORE and MPU OFF mode we are running
377 *  from SDRAM, without MMU, without the caches and prediction.
378 *  Also the SRAM content has been cleared.
379 */
380ENTRY(omap3_restore_es3)
381	ldr	r5, pm_prepwstst_core_p
382	ldr	r4, [r5]
383	and	r4, r4, #0x3
384	cmp	r4, #0x0	@ Check if previous power state of CORE is OFF
385	bne	omap3_restore	@ Fall through to OMAP3 common code
386	adr	r0, es3_sdrc_fix
387	ldr	r1, sram_base
388	ldr	r2, es3_sdrc_fix_sz
389	mov	r2, r2, ror #2
390copy_to_sram:
391	ldmia	r0!, {r3}	@ val = *src
392	stmia	r1!, {r3}	@ *dst = val
393	subs	r2, r2, #0x1	@ num_words--
394	bne	copy_to_sram
395	ldr	r1, sram_base
396	blx	r1
397	b	omap3_restore	@ Fall through to OMAP3 common code
398ENDPROC(omap3_restore_es3)
399
400ENTRY(omap3_restore_3630)
401	ldr	r1, pm_prepwstst_core_p
402	ldr	r2, [r1]
403	and	r2, r2, #0x3
404	cmp	r2, #0x0	@ Check if previous power state of CORE is OFF
405	bne	omap3_restore	@ Fall through to OMAP3 common code
406	/* Disable RTA before giving control */
407	ldr	r1, control_mem_rta
408	mov	r2, #OMAP36XX_RTA_DISABLE
409	str	r2, [r1]
410ENDPROC(omap3_restore_3630)
411
412	/* Fall through to common code for the remaining logic */
413
414ENTRY(omap3_restore)
415	/*
416	 * Read the pwstctrl register to check the reason for mpu reset.
417	 * This tells us what was lost.
418	 */
419	ldr	r1, pm_pwstctrl_mpu
420	ldr	r2, [r1]
421	and	r2, r2, #0x3
422	cmp	r2, #0x0	@ Check if target power state was OFF or RET
423	bne	logic_l1_restore
424
425	adr	r1, l2dis_3630_offset	@ address for offset
426	ldr	r0, [r1]		@ value for offset
427	ldr	r0, [r1, r0]		@ value at l2dis_3630
428	cmp	r0, #0x1	@ should we disable L2 on 3630?
429	bne	skipl2dis
430	mrc	p15, 0, r0, c1, c0, 1
431	bic	r0, r0, #2	@ disable L2 cache
432	mcr	p15, 0, r0, c1, c0, 1
433skipl2dis:
434	ldr	r0, control_stat
435	ldr	r1, [r0]
436	and	r1, #0x700
437	cmp	r1, #0x300
438	beq	l2_inv_gp
439	adr	r0, l2_inv_api_params_offset
440	ldr	r3, [r0]
441	add	r3, r3, r0		@ r3 points to dummy parameters
442	mov	r0, #40			@ set service ID for PPA
443	mov	r12, r0			@ copy secure Service ID in r12
444	mov	r1, #0			@ set task id for ROM code in r1
445	mov	r2, #4			@ set some flags in r2, r6
446	mov	r6, #0xff
447	dsb				@ data write barrier
448	dmb				@ data memory barrier
449	smc	#1			@ call SMI monitor (smi #1)
450	/* Write to Aux control register to set some bits */
451	mov	r0, #42			@ set service ID for PPA
452	mov	r12, r0			@ copy secure Service ID in r12
453	mov	r1, #0			@ set task id for ROM code in r1
454	mov	r2, #4			@ set some flags in r2, r6
455	mov	r6, #0xff
456	ldr	r4, scratchpad_base
457	ldr	r3, [r4, #0xBC]		@ r3 points to parameters
458	dsb				@ data write barrier
459	dmb				@ data memory barrier
460	smc	#1			@ call SMI monitor (smi #1)
461
462#ifdef CONFIG_OMAP3_L2_AUX_SECURE_SAVE_RESTORE
463	/* Restore L2 aux control register */
464					@ set service ID for PPA
465	mov	r0, #CONFIG_OMAP3_L2_AUX_SECURE_SERVICE_SET_ID
466	mov	r12, r0			@ copy service ID in r12
467	mov	r1, #0			@ set task ID for ROM code in r1
468	mov	r2, #4			@ set some flags in r2, r6
469	mov	r6, #0xff
470	ldr	r4, scratchpad_base
471	ldr	r3, [r4, #0xBC]
472	adds	r3, r3, #8		@ r3 points to parameters
473	dsb				@ data write barrier
474	dmb				@ data memory barrier
475	smc	#1			@ call SMI monitor (smi #1)
476#endif
477	b	logic_l1_restore
478
479	.align
480l2_inv_api_params_offset:
481	.long	l2_inv_api_params - .
482l2_inv_gp:
483	/* Execute smi to invalidate L2 cache */
484	mov r12, #0x1			@ set up to invalidate L2
485	smc	#0			@ Call SMI monitor (smieq)
486	/* Write to Aux control register to set some bits */
487	ldr	r4, scratchpad_base
488	ldr	r3, [r4,#0xBC]
489	ldr	r0, [r3,#4]
490	mov	r12, #0x3
491	smc	#0			@ Call SMI monitor (smieq)
492	ldr	r4, scratchpad_base
493	ldr	r3, [r4,#0xBC]
494	ldr	r0, [r3,#12]
495	mov	r12, #0x2
496	smc	#0			@ Call SMI monitor (smieq)
497logic_l1_restore:
498	adr	r0, l2dis_3630_offset	@ adress for offset
499	ldr	r1, [r0]		@ value for offset
500	ldr	r1, [r0, r1]		@ value at l2dis_3630
501	cmp	r1, #0x1		@ Test if L2 re-enable needed on 3630
502	bne	skipl2reen
503	mrc	p15, 0, r1, c1, c0, 1
504	orr	r1, r1, #2		@ re-enable L2 cache
505	mcr	p15, 0, r1, c1, c0, 1
506skipl2reen:
507
508	/* Now branch to the common CPU resume function */
509	b	cpu_resume
510ENDPROC(omap3_restore)
511
512	.ltorg
513
514/*
515 * Local variables
516 */
517pm_prepwstst_core_p:
518	.word	PM_PREPWSTST_CORE_P
519pm_pwstctrl_mpu:
520	.word	PM_PWSTCTRL_MPU_P
521scratchpad_base:
522	.word	SCRATCHPAD_BASE_P
523sram_base:
524	.word	SRAM_BASE_P + 0x8000
525control_stat:
526	.word	CONTROL_STAT
527control_mem_rta:
528	.word	CONTROL_MEM_RTA_CTRL
529l2dis_3630_offset:
530	.long	l2dis_3630 - .
531
532	.data
533	.align	2
534l2dis_3630:
535	.word	0
536
537	.data
538	.align	2
539l2_inv_api_params:
540	.word	0x1, 0x00
541
542/*
543 * Internal functions
544 */
545
546/*
547 * This function implements the erratum ID i443 WA, applies to 34xx >= ES3.0
548 * Copied to and run from SRAM in order to reconfigure the SDRC parameters.
549 */
550	.text
551	.align	3
552ENTRY(es3_sdrc_fix)
553	ldr	r4, sdrc_syscfg		@ get config addr
554	ldr	r5, [r4]		@ get value
555	tst	r5, #0x100		@ is part access blocked
556	it	eq
557	biceq	r5, r5, #0x100		@ clear bit if set
558	str	r5, [r4]		@ write back change
559	ldr	r4, sdrc_mr_0		@ get config addr
560	ldr	r5, [r4]		@ get value
561	str	r5, [r4]		@ write back change
562	ldr	r4, sdrc_emr2_0		@ get config addr
563	ldr	r5, [r4]		@ get value
564	str	r5, [r4]		@ write back change
565	ldr	r4, sdrc_manual_0	@ get config addr
566	mov	r5, #0x2		@ autorefresh command
567	str	r5, [r4]		@ kick off refreshes
568	ldr	r4, sdrc_mr_1		@ get config addr
569	ldr	r5, [r4]		@ get value
570	str	r5, [r4]		@ write back change
571	ldr	r4, sdrc_emr2_1		@ get config addr
572	ldr	r5, [r4]		@ get value
573	str	r5, [r4]		@ write back change
574	ldr	r4, sdrc_manual_1	@ get config addr
575	mov	r5, #0x2		@ autorefresh command
576	str	r5, [r4]		@ kick off refreshes
577	bx	lr
578
579/*
580 * Local variables
581 */
582	.align
583sdrc_syscfg:
584	.word	SDRC_SYSCONFIG_P
585sdrc_mr_0:
586	.word	SDRC_MR_0_P
587sdrc_emr2_0:
588	.word	SDRC_EMR2_0_P
589sdrc_manual_0:
590	.word	SDRC_MANUAL_0_P
591sdrc_mr_1:
592	.word	SDRC_MR_1_P
593sdrc_emr2_1:
594	.word	SDRC_EMR2_1_P
595sdrc_manual_1:
596	.word	SDRC_MANUAL_1_P
597ENDPROC(es3_sdrc_fix)
598ENTRY(es3_sdrc_fix_sz)
599	.word	. - es3_sdrc_fix
600