xref: /openbmc/linux/arch/arm/mach-omap2/sleep33xx.S (revision 8a94cd7e)
1/* SPDX-License-Identifier: GPL-2.0 */
2/*
3 * Low level suspend code for AM33XX SoCs
4 *
5 * Copyright (C) 2012-2018 Texas Instruments Incorporated - http://www.ti.com/
6 *	Dave Gerlach, Vaibhav Bedia
7 */
8
9#include <linux/linkage.h>
10#include <linux/ti-emif-sram.h>
11#include <asm/assembler.h>
12#include <asm/memory.h>
13
14#include "iomap.h"
15#include "cm33xx.h"
16
17#define AM33XX_CM_CLKCTRL_MODULESTATE_DISABLED			0x00030000
18#define AM33XX_CM_CLKCTRL_MODULEMODE_DISABLE			0x0003
19#define AM33XX_CM_CLKCTRL_MODULEMODE_ENABLE			0x0002
20
21	.arm
22	.align 3
23
24ENTRY(am33xx_do_wfi)
25	stmfd	sp!, {r4 - r11, lr}	@ save registers on stack
26
27	/*
28	 * Flush all data from the L1 and L2 data cache before disabling
29	 * SCTLR.C bit.
30	 */
31	ldr	r1, kernel_flush
32	blx	r1
33
34	/*
35	 * Clear the SCTLR.C bit to prevent further data cache
36	 * allocation. Clearing SCTLR.C would make all the data accesses
37	 * strongly ordered and would not hit the cache.
38	 */
39	mrc	p15, 0, r0, c1, c0, 0
40	bic	r0, r0, #(1 << 2)	@ Disable the C bit
41	mcr	p15, 0, r0, c1, c0, 0
42	isb
43
44	/*
45	 * Invalidate L1 and L2 data cache.
46	 */
47	ldr	r1, kernel_flush
48	blx	r1
49
50	adr	r9, am33xx_emif_sram_table
51
52	ldr	r3, [r9, #EMIF_PM_ENTER_SR_OFFSET]
53	blx	r3
54
55	ldr	r3, [r9, #EMIF_PM_SAVE_CONTEXT_OFFSET]
56	blx	r3
57
58	/* Disable EMIF */
59	ldr     r1, virt_emif_clkctrl
60	ldr     r2, [r1]
61	bic     r2, r2, #AM33XX_CM_CLKCTRL_MODULEMODE_DISABLE
62	str     r2, [r1]
63
64	ldr	r1, virt_emif_clkctrl
65wait_emif_disable:
66	ldr	r2, [r1]
67	mov	r3, #AM33XX_CM_CLKCTRL_MODULESTATE_DISABLED
68	cmp	r2, r3
69	bne	wait_emif_disable
70
71	/*
72	 * For the MPU WFI to be registered as an interrupt
73	 * to WKUP_M3, MPU_CLKCTRL.MODULEMODE needs to be set
74	 * to DISABLED
75	 */
76	ldr	r1, virt_mpu_clkctrl
77	ldr	r2, [r1]
78	bic	r2, r2, #AM33XX_CM_CLKCTRL_MODULEMODE_DISABLE
79	str	r2, [r1]
80
81	/*
82	 * Execute an ISB instruction to ensure that all of the
83	 * CP15 register changes have been committed.
84	 */
85	isb
86
87	/*
88	 * Execute a barrier instruction to ensure that all cache,
89	 * TLB and branch predictor maintenance operations issued
90	 * have completed.
91	 */
92	dsb
93	dmb
94
95	/*
96	 * Execute a WFI instruction and wait until the
97	 * STANDBYWFI output is asserted to indicate that the
98	 * CPU is in idle and low power state. CPU can specualatively
99	 * prefetch the instructions so add NOPs after WFI. Thirteen
100	 * NOPs as per Cortex-A8 pipeline.
101	 */
102	wfi
103
104	nop
105	nop
106	nop
107	nop
108	nop
109	nop
110	nop
111	nop
112	nop
113	nop
114	nop
115	nop
116	nop
117
118	/* We come here in case of an abort due to a late interrupt */
119
120	/* Set MPU_CLKCTRL.MODULEMODE back to ENABLE */
121	ldr	r1, virt_mpu_clkctrl
122	mov	r2, #AM33XX_CM_CLKCTRL_MODULEMODE_ENABLE
123	str	r2, [r1]
124
125	/* Re-enable EMIF */
126	ldr	r1, virt_emif_clkctrl
127	mov	r2, #AM33XX_CM_CLKCTRL_MODULEMODE_ENABLE
128	str	r2, [r1]
129wait_emif_enable:
130	ldr	r3, [r1]
131	cmp	r2, r3
132	bne	wait_emif_enable
133
134
135	ldr	r1, [r9, #EMIF_PM_ABORT_SR_OFFSET]
136	blx	r1
137
138	/*
139	 * Set SCTLR.C bit to allow data cache allocation
140	 */
141	mrc	p15, 0, r0, c1, c0, 0
142	orr	r0, r0, #(1 << 2)	@ Enable the C bit
143	mcr	p15, 0, r0, c1, c0, 0
144	isb
145
146	/* Let the suspend code know about the abort */
147	mov	r0, #1
148	ldmfd	sp!, {r4 - r11, pc}	@ restore regs and return
149ENDPROC(am33xx_do_wfi)
150
151	.align
152ENTRY(am33xx_resume_offset)
153	.word . - am33xx_do_wfi
154
155ENTRY(am33xx_resume_from_deep_sleep)
156	/* Re-enable EMIF */
157	ldr	r0, phys_emif_clkctrl
158	mov	r1, #AM33XX_CM_CLKCTRL_MODULEMODE_ENABLE
159	str	r1, [r0]
160wait_emif_enable1:
161	ldr	r2, [r0]
162	cmp	r1, r2
163	bne	wait_emif_enable1
164
165	adr	r9, am33xx_emif_sram_table
166
167	ldr	r1, [r9, #EMIF_PM_RESTORE_CONTEXT_OFFSET]
168	blx	r1
169
170	ldr	r1, [r9, #EMIF_PM_EXIT_SR_OFFSET]
171	blx	r1
172
173resume_to_ddr:
174	/* We are back. Branch to the common CPU resume routine */
175	mov	r0, #0
176	ldr	pc, resume_addr
177ENDPROC(am33xx_resume_from_deep_sleep)
178
179/*
180 * Local variables
181 */
182	.align
183resume_addr:
184	.word	cpu_resume - PAGE_OFFSET + 0x80000000
185kernel_flush:
186	.word   v7_flush_dcache_all
187virt_mpu_clkctrl:
188	.word	AM33XX_CM_MPU_MPU_CLKCTRL
189virt_emif_clkctrl:
190	.word	AM33XX_CM_PER_EMIF_CLKCTRL
191phys_emif_clkctrl:
192	.word	(AM33XX_CM_BASE + AM33XX_CM_PER_MOD + \
193		AM33XX_CM_PER_EMIF_CLKCTRL_OFFSET)
194
195.align 3
196/* DDR related defines */
197am33xx_emif_sram_table:
198	.space EMIF_PM_FUNCTIONS_SIZE
199
200ENTRY(am33xx_pm_sram)
201	.word am33xx_do_wfi
202	.word am33xx_do_wfi_sz
203	.word am33xx_resume_offset
204	.word am33xx_emif_sram_table
205	.word am33xx_pm_ro_sram_data
206
207.align 3
208ENTRY(am33xx_pm_ro_sram_data)
209	.space AMX3_PM_RO_SRAM_DATA_SIZE
210
211ENTRY(am33xx_do_wfi_sz)
212	.word	. - am33xx_do_wfi
213