blob: 47a816468cdb45a2cf55b3751dfd8dd82c1c40c3 [file] [log] [blame]
xjb04a4022021-11-25 15:01:52 +08001/* SPDX-License-Identifier: GPL-2.0 */
2/*
3 * Low level suspend code for AM33XX SoCs
4 *
5 * Copyright (C) 2012-2018 Texas Instruments Incorporated - http://www.ti.com/
6 * Dave Gerlach, Vaibhav Bedia
7 */
8
9#include <generated/ti-pm-asm-offsets.h>
10#include <linux/linkage.h>
11#include <linux/platform_data/pm33xx.h>
12#include <linux/ti-emif-sram.h>
13#include <asm/assembler.h>
14#include <asm/memory.h>
15
16#include "iomap.h"
17#include "cm33xx.h"
18
19#define AM33XX_CM_CLKCTRL_MODULESTATE_DISABLED 0x00030000
20#define AM33XX_CM_CLKCTRL_MODULEMODE_DISABLE 0x0003
21#define AM33XX_CM_CLKCTRL_MODULEMODE_ENABLE 0x0002
22
23/* replicated define because linux/bitops.h cannot be included in assembly */
24#define BIT(nr) (1 << (nr))
25
26 .arm
27 .align 3
28
29ENTRY(am33xx_do_wfi)
30 stmfd sp!, {r4 - r11, lr} @ save registers on stack
31
32 /* Save wfi_flags arg to data space */
33 mov r4, r0
34 adr r3, am33xx_pm_ro_sram_data
35 ldr r2, [r3, #AMX3_PM_RO_SRAM_DATA_VIRT_OFFSET]
36 str r4, [r2, #AMX3_PM_WFI_FLAGS_OFFSET]
37
38 /* Only flush cache is we know we are losing MPU context */
39 tst r4, #WFI_FLAG_FLUSH_CACHE
40 beq cache_skip_flush
41
42 /*
43 * Flush all data from the L1 and L2 data cache before disabling
44 * SCTLR.C bit.
45 */
46 ldr r1, kernel_flush
47 blx r1
48
49 /*
50 * Clear the SCTLR.C bit to prevent further data cache
51 * allocation. Clearing SCTLR.C would make all the data accesses
52 * strongly ordered and would not hit the cache.
53 */
54 mrc p15, 0, r0, c1, c0, 0
55 bic r0, r0, #(1 << 2) @ Disable the C bit
56 mcr p15, 0, r0, c1, c0, 0
57 isb
58
59 /*
60 * Invalidate L1 and L2 data cache.
61 */
62 ldr r1, kernel_flush
63 blx r1
64
65 adr r3, am33xx_pm_ro_sram_data
66 ldr r2, [r3, #AMX3_PM_RO_SRAM_DATA_VIRT_OFFSET]
67 ldr r4, [r2, #AMX3_PM_WFI_FLAGS_OFFSET]
68
69cache_skip_flush:
70 /* Check if we want self refresh */
71 tst r4, #WFI_FLAG_SELF_REFRESH
72 beq emif_skip_enter_sr
73
74 adr r9, am33xx_emif_sram_table
75
76 ldr r3, [r9, #EMIF_PM_ENTER_SR_OFFSET]
77 blx r3
78
79emif_skip_enter_sr:
80 /* Only necessary if PER is losing context */
81 tst r4, #WFI_FLAG_SAVE_EMIF
82 beq emif_skip_save
83
84 ldr r3, [r9, #EMIF_PM_SAVE_CONTEXT_OFFSET]
85 blx r3
86
87emif_skip_save:
88 /* Only can disable EMIF if we have entered self refresh */
89 tst r4, #WFI_FLAG_SELF_REFRESH
90 beq emif_skip_disable
91
92 /* Disable EMIF */
93 ldr r1, virt_emif_clkctrl
94 ldr r2, [r1]
95 bic r2, r2, #AM33XX_CM_CLKCTRL_MODULEMODE_DISABLE
96 str r2, [r1]
97
98 ldr r1, virt_emif_clkctrl
99wait_emif_disable:
100 ldr r2, [r1]
101 mov r3, #AM33XX_CM_CLKCTRL_MODULESTATE_DISABLED
102 cmp r2, r3
103 bne wait_emif_disable
104
105emif_skip_disable:
106 tst r4, #WFI_FLAG_WAKE_M3
107 beq wkup_m3_skip
108
109 /*
110 * For the MPU WFI to be registered as an interrupt
111 * to WKUP_M3, MPU_CLKCTRL.MODULEMODE needs to be set
112 * to DISABLED
113 */
114 ldr r1, virt_mpu_clkctrl
115 ldr r2, [r1]
116 bic r2, r2, #AM33XX_CM_CLKCTRL_MODULEMODE_DISABLE
117 str r2, [r1]
118
119wkup_m3_skip:
120 /*
121 * Execute an ISB instruction to ensure that all of the
122 * CP15 register changes have been committed.
123 */
124 isb
125
126 /*
127 * Execute a barrier instruction to ensure that all cache,
128 * TLB and branch predictor maintenance operations issued
129 * have completed.
130 */
131 dsb
132 dmb
133
134 /*
135 * Execute a WFI instruction and wait until the
136 * STANDBYWFI output is asserted to indicate that the
137 * CPU is in idle and low power state. CPU can specualatively
138 * prefetch the instructions so add NOPs after WFI. Thirteen
139 * NOPs as per Cortex-A8 pipeline.
140 */
141 wfi
142
143 nop
144 nop
145 nop
146 nop
147 nop
148 nop
149 nop
150 nop
151 nop
152 nop
153 nop
154 nop
155 nop
156
157 /* We come here in case of an abort due to a late interrupt */
158
159 /* Set MPU_CLKCTRL.MODULEMODE back to ENABLE */
160 ldr r1, virt_mpu_clkctrl
161 mov r2, #AM33XX_CM_CLKCTRL_MODULEMODE_ENABLE
162 str r2, [r1]
163
164 /* Re-enable EMIF */
165 ldr r1, virt_emif_clkctrl
166 mov r2, #AM33XX_CM_CLKCTRL_MODULEMODE_ENABLE
167 str r2, [r1]
168wait_emif_enable:
169 ldr r3, [r1]
170 cmp r2, r3
171 bne wait_emif_enable
172
173 /* Only necessary if PER is losing context */
174 tst r4, #WFI_FLAG_SELF_REFRESH
175 beq emif_skip_exit_sr_abt
176
177 adr r9, am33xx_emif_sram_table
178 ldr r1, [r9, #EMIF_PM_ABORT_SR_OFFSET]
179 blx r1
180
181emif_skip_exit_sr_abt:
182 tst r4, #WFI_FLAG_FLUSH_CACHE
183 beq cache_skip_restore
184
185 /*
186 * Set SCTLR.C bit to allow data cache allocation
187 */
188 mrc p15, 0, r0, c1, c0, 0
189 orr r0, r0, #(1 << 2) @ Enable the C bit
190 mcr p15, 0, r0, c1, c0, 0
191 isb
192
193cache_skip_restore:
194 /* Let the suspend code know about the abort */
195 mov r0, #1
196 ldmfd sp!, {r4 - r11, pc} @ restore regs and return
197ENDPROC(am33xx_do_wfi)
198
199 .align
200ENTRY(am33xx_resume_offset)
201 .word . - am33xx_do_wfi
202
203ENTRY(am33xx_resume_from_deep_sleep)
204 /* Re-enable EMIF */
205 ldr r0, phys_emif_clkctrl
206 mov r1, #AM33XX_CM_CLKCTRL_MODULEMODE_ENABLE
207 str r1, [r0]
208wait_emif_enable1:
209 ldr r2, [r0]
210 cmp r1, r2
211 bne wait_emif_enable1
212
213 adr r9, am33xx_emif_sram_table
214
215 ldr r1, [r9, #EMIF_PM_RESTORE_CONTEXT_OFFSET]
216 blx r1
217
218 ldr r1, [r9, #EMIF_PM_EXIT_SR_OFFSET]
219 blx r1
220
221resume_to_ddr:
222 /* We are back. Branch to the common CPU resume routine */
223 mov r0, #0
224 ldr pc, resume_addr
225ENDPROC(am33xx_resume_from_deep_sleep)
226
227/*
228 * Local variables
229 */
230 .align
231kernel_flush:
232 .word v7_flush_dcache_all
233virt_mpu_clkctrl:
234 .word AM33XX_CM_MPU_MPU_CLKCTRL
235virt_emif_clkctrl:
236 .word AM33XX_CM_PER_EMIF_CLKCTRL
237phys_emif_clkctrl:
238 .word (AM33XX_CM_BASE + AM33XX_CM_PER_MOD + \
239 AM33XX_CM_PER_EMIF_CLKCTRL_OFFSET)
240
241.align 3
242/* DDR related defines */
243am33xx_emif_sram_table:
244 .space EMIF_PM_FUNCTIONS_SIZE
245
246ENTRY(am33xx_pm_sram)
247 .word am33xx_do_wfi
248 .word am33xx_do_wfi_sz
249 .word am33xx_resume_offset
250 .word am33xx_emif_sram_table
251 .word am33xx_pm_ro_sram_data
252
253resume_addr:
254.word cpu_resume - PAGE_OFFSET + 0x80000000
255
256.align 3
257ENTRY(am33xx_pm_ro_sram_data)
258 .space AMX3_PM_RO_SRAM_DATA_SIZE
259
260ENTRY(am33xx_do_wfi_sz)
261 .word . - am33xx_do_wfi