blob: 5b9343b58fc700de1738601ac091b5990061fb7a [file] [log] [blame]
xjb04a4022021-11-25 15:01:52 +08001/* SPDX-License-Identifier: GPL-2.0 */
2/*
3 * Low level suspend code for AM43XX SoCs
4 *
5 * Copyright (C) 2013-2018 Texas Instruments Incorporated - http://www.ti.com/
6 * Dave Gerlach, Vaibhav Bedia
7 */
8
9#include <generated/ti-pm-asm-offsets.h>
10#include <linux/linkage.h>
11#include <linux/ti-emif-sram.h>
12#include <linux/platform_data/pm33xx.h>
13#include <asm/assembler.h>
14#include <asm/hardware/cache-l2x0.h>
15#include <asm/memory.h>
16
17#include "cm33xx.h"
18#include "common.h"
19#include "iomap.h"
20#include "omap-secure.h"
21#include "omap44xx.h"
22#include "prm33xx.h"
23#include "prcm43xx.h"
24
25/* replicated define because linux/bitops.h cannot be included in assembly */
26#define BIT(nr) (1 << (nr))
27
28#define AM33XX_CM_CLKCTRL_MODULESTATE_DISABLED 0x00030000
29#define AM33XX_CM_CLKCTRL_MODULEMODE_DISABLE 0x0003
30#define AM33XX_CM_CLKCTRL_MODULEMODE_ENABLE 0x0002
31
32#define AM43XX_EMIF_POWEROFF_ENABLE 0x1
33#define AM43XX_EMIF_POWEROFF_DISABLE 0x0
34
35#define AM43XX_CM_CLKSTCTRL_CLKTRCTRL_SW_SLEEP 0x1
36#define AM43XX_CM_CLKSTCTRL_CLKTRCTRL_HW_AUTO 0x3
37
38#define AM43XX_CM_BASE 0x44DF0000
39
40#define AM43XX_CM_REGADDR(inst, reg) \
41 AM33XX_L4_WK_IO_ADDRESS(AM43XX_CM_BASE + (inst) + (reg))
42
43#define AM43XX_CM_MPU_CLKSTCTRL AM43XX_CM_REGADDR(AM43XX_CM_MPU_INST, \
44 AM43XX_CM_MPU_MPU_CDOFFS)
45#define AM43XX_CM_MPU_MPU_CLKCTRL AM43XX_CM_REGADDR(AM43XX_CM_MPU_INST, \
46 AM43XX_CM_MPU_MPU_CLKCTRL_OFFSET)
47#define AM43XX_CM_PER_EMIF_CLKCTRL AM43XX_CM_REGADDR(AM43XX_CM_PER_INST, \
48 AM43XX_CM_PER_EMIF_CLKCTRL_OFFSET)
49#define AM43XX_PRM_EMIF_CTRL_OFFSET 0x0030
50
51#define RTC_SECONDS_REG 0x0
52#define RTC_PMIC_REG 0x98
53#define RTC_PMIC_POWER_EN BIT(16)
54#define RTC_PMIC_EXT_WAKEUP_STS BIT(12)
55#define RTC_PMIC_EXT_WAKEUP_POL BIT(4)
56#define RTC_PMIC_EXT_WAKEUP_EN BIT(0)
57
58 .arm
59 .align 3
60
61ENTRY(am43xx_do_wfi)
62 stmfd sp!, {r4 - r11, lr} @ save registers on stack
63
64 /* Save wfi_flags arg to data space */
65 mov r4, r0
66 adr r3, am43xx_pm_ro_sram_data
67 ldr r2, [r3, #AMX3_PM_RO_SRAM_DATA_VIRT_OFFSET]
68 str r4, [r2, #AMX3_PM_WFI_FLAGS_OFFSET]
69
70#ifdef CONFIG_CACHE_L2X0
71 /* Retrieve l2 cache virt address BEFORE we shut off EMIF */
72 ldr r1, get_l2cache_base
73 blx r1
74 mov r8, r0
75#endif
76
77 /* Only flush cache is we know we are losing MPU context */
78 tst r4, #WFI_FLAG_FLUSH_CACHE
79 beq cache_skip_flush
80
81 /*
82 * Flush all data from the L1 and L2 data cache before disabling
83 * SCTLR.C bit.
84 */
85 ldr r1, kernel_flush
86 blx r1
87
88 /*
89 * Clear the SCTLR.C bit to prevent further data cache
90 * allocation. Clearing SCTLR.C would make all the data accesses
91 * strongly ordered and would not hit the cache.
92 */
93 mrc p15, 0, r0, c1, c0, 0
94 bic r0, r0, #(1 << 2) @ Disable the C bit
95 mcr p15, 0, r0, c1, c0, 0
96 isb
97 dsb
98
99 /*
100 * Invalidate L1 and L2 data cache.
101 */
102 ldr r1, kernel_flush
103 blx r1
104
105#ifdef CONFIG_CACHE_L2X0
106 /*
107 * Clean and invalidate the L2 cache.
108 */
109#ifdef CONFIG_PL310_ERRATA_727915
110 mov r0, #0x03
111 mov r12, #OMAP4_MON_L2X0_DBG_CTRL_INDEX
112 dsb
113 smc #0
114 dsb
115#endif
116 mov r0, r8
117 adr r4, am43xx_pm_ro_sram_data
118 ldr r3, [r4, #AMX3_PM_RO_SRAM_DATA_VIRT_OFFSET]
119
120 mov r2, r0
121 ldr r0, [r2, #L2X0_AUX_CTRL]
122 str r0, [r3, #AMX3_PM_L2_AUX_CTRL_VAL_OFFSET]
123 ldr r0, [r2, #L310_PREFETCH_CTRL]
124 str r0, [r3, #AMX3_PM_L2_PREFETCH_CTRL_VAL_OFFSET]
125
126 ldr r0, l2_val
127 str r0, [r2, #L2X0_CLEAN_INV_WAY]
128wait:
129 ldr r0, [r2, #L2X0_CLEAN_INV_WAY]
130 ldr r1, l2_val
131 ands r0, r0, r1
132 bne wait
133#ifdef CONFIG_PL310_ERRATA_727915
134 mov r0, #0x00
135 mov r12, #OMAP4_MON_L2X0_DBG_CTRL_INDEX
136 dsb
137 smc #0
138 dsb
139#endif
140l2x_sync:
141 mov r0, r8
142 mov r2, r0
143 mov r0, #0x0
144 str r0, [r2, #L2X0_CACHE_SYNC]
145sync:
146 ldr r0, [r2, #L2X0_CACHE_SYNC]
147 ands r0, r0, #0x1
148 bne sync
149#endif
150
151 /* Restore wfi_flags */
152 adr r3, am43xx_pm_ro_sram_data
153 ldr r2, [r3, #AMX3_PM_RO_SRAM_DATA_VIRT_OFFSET]
154 ldr r4, [r2, #AMX3_PM_WFI_FLAGS_OFFSET]
155
156cache_skip_flush:
157 /*
158 * If we are trying to enter RTC+DDR mode we must perform
159 * a read from the rtc address space to ensure translation
160 * presence in the TLB to avoid page table walk after DDR
161 * is unavailable.
162 */
163 tst r4, #WFI_FLAG_RTC_ONLY
164 beq skip_rtc_va_refresh
165
166 adr r3, am43xx_pm_ro_sram_data
167 ldr r1, [r3, #AMX3_PM_RTC_BASE_VIRT_OFFSET]
168 ldr r0, [r1]
169
170skip_rtc_va_refresh:
171 /* Check if we want self refresh */
172 tst r4, #WFI_FLAG_SELF_REFRESH
173 beq emif_skip_enter_sr
174
175 adr r9, am43xx_emif_sram_table
176
177 ldr r3, [r9, #EMIF_PM_ENTER_SR_OFFSET]
178 blx r3
179
180emif_skip_enter_sr:
181 /* Only necessary if PER is losing context */
182 tst r4, #WFI_FLAG_SAVE_EMIF
183 beq emif_skip_save
184
185 ldr r3, [r9, #EMIF_PM_SAVE_CONTEXT_OFFSET]
186 blx r3
187
188emif_skip_save:
189 /* Only can disable EMIF if we have entered self refresh */
190 tst r4, #WFI_FLAG_SELF_REFRESH
191 beq emif_skip_disable
192
193 /* Disable EMIF */
194 ldr r1, am43xx_virt_emif_clkctrl
195 ldr r2, [r1]
196 bic r2, r2, #AM33XX_CM_CLKCTRL_MODULEMODE_DISABLE
197 str r2, [r1]
198
199wait_emif_disable:
200 ldr r2, [r1]
201 mov r3, #AM33XX_CM_CLKCTRL_MODULESTATE_DISABLED
202 cmp r2, r3
203 bne wait_emif_disable
204
205emif_skip_disable:
206 tst r4, #WFI_FLAG_RTC_ONLY
207 beq skip_rtc_only
208
209 adr r3, am43xx_pm_ro_sram_data
210 ldr r1, [r3, #AMX3_PM_RTC_BASE_VIRT_OFFSET]
211
212 ldr r0, [r1, #RTC_PMIC_REG]
213 orr r0, r0, #RTC_PMIC_POWER_EN
214 orr r0, r0, #RTC_PMIC_EXT_WAKEUP_STS
215 orr r0, r0, #RTC_PMIC_EXT_WAKEUP_EN
216 orr r0, r0, #RTC_PMIC_EXT_WAKEUP_POL
217 str r0, [r1, #RTC_PMIC_REG]
218 ldr r0, [r1, #RTC_PMIC_REG]
219 /* Wait for 2 seconds to lose power */
220 mov r3, #2
221 ldr r2, [r1, #RTC_SECONDS_REG]
222rtc_loop:
223 ldr r0, [r1, #RTC_SECONDS_REG]
224 cmp r0, r2
225 beq rtc_loop
226 mov r2, r0
227 subs r3, r3, #1
228 bne rtc_loop
229
230 b re_enable_emif
231
232skip_rtc_only:
233
234 tst r4, #WFI_FLAG_WAKE_M3
235 beq wkup_m3_skip
236
237 /*
238 * For the MPU WFI to be registered as an interrupt
239 * to WKUP_M3, MPU_CLKCTRL.MODULEMODE needs to be set
240 * to DISABLED
241 */
242 ldr r1, am43xx_virt_mpu_clkctrl
243 ldr r2, [r1]
244 bic r2, r2, #AM33XX_CM_CLKCTRL_MODULEMODE_DISABLE
245 str r2, [r1]
246
247 /*
248 * Put MPU CLKDM to SW_SLEEP
249 */
250 ldr r1, am43xx_virt_mpu_clkstctrl
251 mov r2, #AM43XX_CM_CLKSTCTRL_CLKTRCTRL_SW_SLEEP
252 str r2, [r1]
253
254wkup_m3_skip:
255 /*
256 * Execute a barrier instruction to ensure that all cache,
257 * TLB and branch predictor maintenance operations issued
258 * have completed.
259 */
260 dsb
261 dmb
262
263 /*
264 * Execute a WFI instruction and wait until the
265 * STANDBYWFI output is asserted to indicate that the
266 * CPU is in idle and low power state. CPU can specualatively
267 * prefetch the instructions so add NOPs after WFI. Sixteen
268 * NOPs as per Cortex-A9 pipeline.
269 */
270 wfi
271
272 nop
273 nop
274 nop
275 nop
276 nop
277 nop
278 nop
279 nop
280 nop
281 nop
282 nop
283 nop
284 nop
285 nop
286 nop
287 nop
288
289 /* We come here in case of an abort due to a late interrupt */
290 ldr r1, am43xx_virt_mpu_clkstctrl
291 mov r2, #AM43XX_CM_CLKSTCTRL_CLKTRCTRL_HW_AUTO
292 str r2, [r1]
293
294 /* Set MPU_CLKCTRL.MODULEMODE back to ENABLE */
295 ldr r1, am43xx_virt_mpu_clkctrl
296 mov r2, #AM33XX_CM_CLKCTRL_MODULEMODE_ENABLE
297 str r2, [r1]
298
299re_enable_emif:
300 /* Re-enable EMIF */
301 ldr r1, am43xx_virt_emif_clkctrl
302 mov r2, #AM33XX_CM_CLKCTRL_MODULEMODE_ENABLE
303 str r2, [r1]
304wait_emif_enable:
305 ldr r3, [r1]
306 cmp r2, r3
307 bne wait_emif_enable
308
309 tst r4, #WFI_FLAG_FLUSH_CACHE
310 beq cache_skip_restore
311
312 /*
313 * Set SCTLR.C bit to allow data cache allocation
314 */
315 mrc p15, 0, r0, c1, c0, 0
316 orr r0, r0, #(1 << 2) @ Enable the C bit
317 mcr p15, 0, r0, c1, c0, 0
318 isb
319
320cache_skip_restore:
321 /* Only necessary if PER is losing context */
322 tst r4, #WFI_FLAG_SELF_REFRESH
323 beq emif_skip_exit_sr_abt
324
325 adr r9, am43xx_emif_sram_table
326 ldr r1, [r9, #EMIF_PM_ABORT_SR_OFFSET]
327 blx r1
328
329emif_skip_exit_sr_abt:
330 /* Let the suspend code know about the abort */
331 mov r0, #1
332 ldmfd sp!, {r4 - r11, pc} @ restore regs and return
333ENDPROC(am43xx_do_wfi)
334
335 .align
336ENTRY(am43xx_resume_offset)
337 .word . - am43xx_do_wfi
338
339ENTRY(am43xx_resume_from_deep_sleep)
340 /* Set MPU CLKSTCTRL to HW AUTO so that CPUidle works properly */
341 ldr r1, am43xx_virt_mpu_clkstctrl
342 mov r2, #AM43XX_CM_CLKSTCTRL_CLKTRCTRL_HW_AUTO
343 str r2, [r1]
344
345 /* For AM43xx, use EMIF power down until context is restored */
346 ldr r2, am43xx_phys_emif_poweroff
347 mov r1, #AM43XX_EMIF_POWEROFF_ENABLE
348 str r1, [r2, #0x0]
349
350 /* Re-enable EMIF */
351 ldr r1, am43xx_phys_emif_clkctrl
352 mov r2, #AM33XX_CM_CLKCTRL_MODULEMODE_ENABLE
353 str r2, [r1]
354wait_emif_enable1:
355 ldr r3, [r1]
356 cmp r2, r3
357 bne wait_emif_enable1
358
359 adr r9, am43xx_emif_sram_table
360
361 ldr r1, [r9, #EMIF_PM_RESTORE_CONTEXT_OFFSET]
362 blx r1
363
364 ldr r1, [r9, #EMIF_PM_EXIT_SR_OFFSET]
365 blx r1
366
367 ldr r2, am43xx_phys_emif_poweroff
368 mov r1, #AM43XX_EMIF_POWEROFF_DISABLE
369 str r1, [r2, #0x0]
370
371#ifdef CONFIG_CACHE_L2X0
372 ldr r2, l2_cache_base
373 ldr r0, [r2, #L2X0_CTRL]
374 and r0, #0x0f
375 cmp r0, #1
376 beq skip_l2en @ Skip if already enabled
377
378 adr r4, am43xx_pm_ro_sram_data
379 ldr r3, [r4, #AMX3_PM_RO_SRAM_DATA_PHYS_OFFSET]
380 ldr r0, [r3, #AMX3_PM_L2_PREFETCH_CTRL_VAL_OFFSET]
381
382 ldr r12, l2_smc1
383 dsb
384 smc #0
385 dsb
386set_aux_ctrl:
387 ldr r0, [r3, #AMX3_PM_L2_AUX_CTRL_VAL_OFFSET]
388 ldr r12, l2_smc2
389 dsb
390 smc #0
391 dsb
392
393 /* L2 invalidate on resume */
394 ldr r0, l2_val
395 ldr r2, l2_cache_base
396 str r0, [r2, #L2X0_INV_WAY]
397wait2:
398 ldr r0, [r2, #L2X0_INV_WAY]
399 ldr r1, l2_val
400 ands r0, r0, r1
401 bne wait2
402#ifdef CONFIG_PL310_ERRATA_727915
403 mov r0, #0x00
404 mov r12, #OMAP4_MON_L2X0_DBG_CTRL_INDEX
405 dsb
406 smc #0
407 dsb
408#endif
409l2x_sync2:
410 ldr r2, l2_cache_base
411 mov r0, #0x0
412 str r0, [r2, #L2X0_CACHE_SYNC]
413sync2:
414 ldr r0, [r2, #L2X0_CACHE_SYNC]
415 ands r0, r0, #0x1
416 bne sync2
417
418 mov r0, #0x1
419 ldr r12, l2_smc3
420 dsb
421 smc #0
422 dsb
423#endif
424skip_l2en:
425 /* We are back. Branch to the common CPU resume routine */
426 mov r0, #0
427 ldr pc, resume_addr
428ENDPROC(am43xx_resume_from_deep_sleep)
429
430/*
431 * Local variables
432 */
433 .align
434kernel_flush:
435 .word v7_flush_dcache_all
436ddr_start:
437 .word PAGE_OFFSET
438
439am43xx_phys_emif_poweroff:
440 .word (AM43XX_CM_BASE + AM43XX_PRM_DEVICE_INST + \
441 AM43XX_PRM_EMIF_CTRL_OFFSET)
442am43xx_virt_mpu_clkstctrl:
443 .word (AM43XX_CM_MPU_CLKSTCTRL)
444am43xx_virt_mpu_clkctrl:
445 .word (AM43XX_CM_MPU_MPU_CLKCTRL)
446am43xx_virt_emif_clkctrl:
447 .word (AM43XX_CM_PER_EMIF_CLKCTRL)
448am43xx_phys_emif_clkctrl:
449 .word (AM43XX_CM_BASE + AM43XX_CM_PER_INST + \
450 AM43XX_CM_PER_EMIF_CLKCTRL_OFFSET)
451
452#ifdef CONFIG_CACHE_L2X0
453/* L2 cache related defines for AM437x */
454get_l2cache_base:
455 .word omap4_get_l2cache_base
456l2_cache_base:
457 .word OMAP44XX_L2CACHE_BASE
458l2_smc1:
459 .word OMAP4_MON_L2X0_PREFETCH_INDEX
460l2_smc2:
461 .word OMAP4_MON_L2X0_AUXCTRL_INDEX
462l2_smc3:
463 .word OMAP4_MON_L2X0_CTRL_INDEX
464l2_val:
465 .word 0xffff
466#endif
467
468.align 3
469/* DDR related defines */
470ENTRY(am43xx_emif_sram_table)
471 .space EMIF_PM_FUNCTIONS_SIZE
472
473ENTRY(am43xx_pm_sram)
474 .word am43xx_do_wfi
475 .word am43xx_do_wfi_sz
476 .word am43xx_resume_offset
477 .word am43xx_emif_sram_table
478 .word am43xx_pm_ro_sram_data
479
480resume_addr:
481 .word cpu_resume - PAGE_OFFSET + 0x80000000
482.align 3
483
484ENTRY(am43xx_pm_ro_sram_data)
485 .space AMX3_PM_RO_SRAM_DATA_SIZE
486
487ENTRY(am43xx_do_wfi_sz)
488 .word . - am43xx_do_wfi