blob: d8d90cf65b39f835ff84eea9fa687a845d2245ac [file] [log] [blame]
rjw1f884582022-01-06 17:20:42 +08001/*
2 * linux/arch/arm/mm/proc-v7.S
3 *
4 * Copyright (C) 2001 Deep Blue Solutions Ltd.
5 *
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License version 2 as
8 * published by the Free Software Foundation.
9 *
10 * This is the "shell" of the ARMv7 processor support.
11 */
12#include <linux/arm-smccc.h>
13#include <linux/init.h>
14#include <linux/linkage.h>
15#include <asm/assembler.h>
16#include <asm/asm-offsets.h>
17#include <asm/hwcap.h>
18#include <asm/pgtable-hwdef.h>
19#include <asm/pgtable.h>
20#include <asm/memory.h>
21
22#include "proc-macros.S"
23
24#ifdef CONFIG_ARM_LPAE
25#include "proc-v7-3level.S"
26#else
27#include "proc-v7-2level.S"
28#endif
29
30ENTRY(cpu_v7_proc_init)
31 ret lr
32ENDPROC(cpu_v7_proc_init)
33
34ENTRY(cpu_v7_proc_fin)
35 mrc p15, 0, r0, c1, c0, 0 @ ctrl register
36 bic r0, r0, #0x1000 @ ...i............
37 bic r0, r0, #0x0006 @ .............ca.
38 mcr p15, 0, r0, c1, c0, 0 @ disable caches
39 ret lr
40ENDPROC(cpu_v7_proc_fin)
41
42/*
43 * cpu_v7_reset(loc, hyp)
44 *
45 * Perform a soft reset of the system. Put the CPU into the
46 * same state as it would be if it had been reset, and branch
47 * to what would be the reset vector.
48 *
49 * - loc - location to jump to for soft reset
50 * - hyp - indicate if restart occurs in HYP mode
51 *
52 * This code must be executed using a flat identity mapping with
53 * caches disabled.
54 */
55 .align 5
56 .pushsection .idmap.text, "ax"
57ENTRY(cpu_v7_reset)
58 mrc p15, 0, r2, c1, c0, 0 @ ctrl register
59 bic r2, r2, #0x1 @ ...............m
60 THUMB( bic r2, r2, #1 << 30 ) @ SCTLR.TE (Thumb exceptions)
61 mcr p15, 0, r2, c1, c0, 0 @ disable MMU
62 isb
63#ifdef CONFIG_ARM_VIRT_EXT
64 teq r1, #0
65 bne __hyp_soft_restart
66#endif
67 bx r0
68ENDPROC(cpu_v7_reset)
69 .popsection
70
71/*
72 * cpu_v7_do_idle()
73 *
74 * Idle the processor (eg, wait for interrupt).
75 *
76 * IRQs are already disabled.
77 */
78ENTRY(cpu_v7_do_idle)
79 dsb @ WFI may enter a low-power mode
80 wfi
81 ret lr
82ENDPROC(cpu_v7_do_idle)
83
84ENTRY(cpu_v7_dcache_clean_area)
85 ALT_SMP(W(nop)) @ MP extensions imply L1 PTW
86 ALT_UP_B(1f)
87 ret lr
881: dcache_line_size r2, r3
892: mcr p15, 0, r0, c7, c10, 1 @ clean D entry
90 add r0, r0, r2
91 subs r1, r1, r2
92 bhi 2b
93 dsb ishst
94 ret lr
95ENDPROC(cpu_v7_dcache_clean_area)
96
97#ifdef CONFIG_ARM_PSCI
98 .arch_extension sec
99ENTRY(cpu_v7_smc_switch_mm)
100 stmfd sp!, {r0 - r3}
101 movw r0, #:lower16:ARM_SMCCC_ARCH_WORKAROUND_1
102 movt r0, #:upper16:ARM_SMCCC_ARCH_WORKAROUND_1
103 smc #0
104 ldmfd sp!, {r0 - r3}
105 b cpu_v7_switch_mm
106ENDPROC(cpu_v7_smc_switch_mm)
107 .arch_extension virt
108ENTRY(cpu_v7_hvc_switch_mm)
109 stmfd sp!, {r0 - r3}
110 movw r0, #:lower16:ARM_SMCCC_ARCH_WORKAROUND_1
111 movt r0, #:upper16:ARM_SMCCC_ARCH_WORKAROUND_1
112 hvc #0
113 ldmfd sp!, {r0 - r3}
114 b cpu_v7_switch_mm
115ENDPROC(cpu_v7_hvc_switch_mm)
116#endif
117ENTRY(cpu_v7_iciallu_switch_mm)
118 mov r3, #0
119 mcr p15, 0, r3, c7, c5, 0 @ ICIALLU
120 b cpu_v7_switch_mm
121ENDPROC(cpu_v7_iciallu_switch_mm)
122ENTRY(cpu_v7_bpiall_switch_mm)
123 mov r3, #0
124 mcr p15, 0, r3, c7, c5, 6 @ flush BTAC/BTB
125 b cpu_v7_switch_mm
126ENDPROC(cpu_v7_bpiall_switch_mm)
127
128 string cpu_v7_name, "ARMv7 Processor"
129 .align
130
131/* Suspend/resume support: derived from arch/arm/mach-s5pv210/sleep.S */
132.globl cpu_v7_suspend_size
133.equ cpu_v7_suspend_size, 4 * 9
134#ifdef CONFIG_ARM_CPU_SUSPEND
135ENTRY(cpu_v7_do_suspend)
136 stmfd sp!, {r4 - r11, lr}
137 mrc p15, 0, r4, c13, c0, 0 @ FCSE/PID
138 mrc p15, 0, r5, c13, c0, 3 @ User r/o thread ID
139 stmia r0!, {r4 - r5}
140#ifdef CONFIG_MMU
141 mrc p15, 0, r6, c3, c0, 0 @ Domain ID
142#ifdef CONFIG_ARM_LPAE
143 mrrc p15, 1, r5, r7, c2 @ TTB 1
144#else
145 mrc p15, 0, r7, c2, c0, 1 @ TTB 1
146#endif
147 mrc p15, 0, r11, c2, c0, 2 @ TTB control register
148#endif
149 mrc p15, 0, r8, c1, c0, 0 @ Control register
150 mrc p15, 0, r9, c1, c0, 1 @ Auxiliary control register
151 mrc p15, 0, r10, c1, c0, 2 @ Co-processor access control
152 stmia r0, {r5 - r11}
153 ldmfd sp!, {r4 - r11, pc}
154ENDPROC(cpu_v7_do_suspend)
155
156ENTRY(cpu_v7_do_resume)
157 mov ip, #0
158 mcr p15, 0, ip, c7, c5, 0 @ invalidate I cache
159 mcr p15, 0, ip, c13, c0, 1 @ set reserved context ID
160 ldmia r0!, {r4 - r5}
161 mcr p15, 0, r4, c13, c0, 0 @ FCSE/PID
162 mcr p15, 0, r5, c13, c0, 3 @ User r/o thread ID
163 ldmia r0, {r5 - r11}
164#ifdef CONFIG_MMU
165 mcr p15, 0, ip, c8, c7, 0 @ invalidate TLBs
166 mcr p15, 0, r6, c3, c0, 0 @ Domain ID
167#ifdef CONFIG_ARM_LPAE
168 mcrr p15, 0, r1, ip, c2 @ TTB 0
169 mcrr p15, 1, r5, r7, c2 @ TTB 1
170#else
171 ALT_SMP(orr r1, r1, #TTB_FLAGS_SMP)
172 ALT_UP(orr r1, r1, #TTB_FLAGS_UP)
173 mcr p15, 0, r1, c2, c0, 0 @ TTB 0
174 mcr p15, 0, r7, c2, c0, 1 @ TTB 1
175#endif
176 mcr p15, 0, r11, c2, c0, 2 @ TTB control register
177 ldr r4, =PRRR @ PRRR
178 ldr r5, =NMRR @ NMRR
179 mcr p15, 0, r4, c10, c2, 0 @ write PRRR
180 mcr p15, 0, r5, c10, c2, 1 @ write NMRR
181#endif /* CONFIG_MMU */
182 mrc p15, 0, r4, c1, c0, 1 @ Read Auxiliary control register
183 teq r4, r9 @ Is it already set?
184 mcrne p15, 0, r9, c1, c0, 1 @ No, so write it
185 mcr p15, 0, r10, c1, c0, 2 @ Co-processor access control
186 isb
187 dsb
188 mov r0, r8 @ control register
189 b cpu_resume_mmu
190ENDPROC(cpu_v7_do_resume)
191#endif
192
193.globl cpu_ca9mp_suspend_size
194.equ cpu_ca9mp_suspend_size, cpu_v7_suspend_size + 4 * 2
195#ifdef CONFIG_ARM_CPU_SUSPEND
196ENTRY(cpu_ca9mp_do_suspend)
197 stmfd sp!, {r4 - r5}
198 mrc p15, 0, r4, c15, c0, 1 @ Diagnostic register
199 mrc p15, 0, r5, c15, c0, 0 @ Power register
200 stmia r0!, {r4 - r5}
201 ldmfd sp!, {r4 - r5}
202 b cpu_v7_do_suspend
203ENDPROC(cpu_ca9mp_do_suspend)
204
205ENTRY(cpu_ca9mp_do_resume)
206 ldmia r0!, {r4 - r5}
207 mrc p15, 0, r10, c15, c0, 1 @ Read Diagnostic register
208 teq r4, r10 @ Already restored?
209 mcrne p15, 0, r4, c15, c0, 1 @ No, so restore it
210 mrc p15, 0, r10, c15, c0, 0 @ Read Power register
211 teq r5, r10 @ Already restored?
212 mcrne p15, 0, r5, c15, c0, 0 @ No, so restore it
213 b cpu_v7_do_resume
214ENDPROC(cpu_ca9mp_do_resume)
215#endif
216
217#ifdef CONFIG_CPU_PJ4B
218 globl_equ cpu_pj4b_switch_mm, cpu_v7_switch_mm
219 globl_equ cpu_pj4b_set_pte_ext, cpu_v7_set_pte_ext
220 globl_equ cpu_pj4b_proc_init, cpu_v7_proc_init
221 globl_equ cpu_pj4b_proc_fin, cpu_v7_proc_fin
222 globl_equ cpu_pj4b_reset, cpu_v7_reset
223#ifdef CONFIG_PJ4B_ERRATA_4742
224ENTRY(cpu_pj4b_do_idle)
225 dsb @ WFI may enter a low-power mode
226 wfi
227 dsb @barrier
228 ret lr
229ENDPROC(cpu_pj4b_do_idle)
230#else
231 globl_equ cpu_pj4b_do_idle, cpu_v7_do_idle
232#endif
233 globl_equ cpu_pj4b_dcache_clean_area, cpu_v7_dcache_clean_area
234#ifdef CONFIG_ARM_CPU_SUSPEND
235ENTRY(cpu_pj4b_do_suspend)
236 stmfd sp!, {r6 - r10}
237 mrc p15, 1, r6, c15, c1, 0 @ save CP15 - extra features
238 mrc p15, 1, r7, c15, c2, 0 @ save CP15 - Aux Func Modes Ctrl 0
239 mrc p15, 1, r8, c15, c1, 2 @ save CP15 - Aux Debug Modes Ctrl 2
240 mrc p15, 1, r9, c15, c1, 1 @ save CP15 - Aux Debug Modes Ctrl 1
241 mrc p15, 0, r10, c9, c14, 0 @ save CP15 - PMC
242 stmia r0!, {r6 - r10}
243 ldmfd sp!, {r6 - r10}
244 b cpu_v7_do_suspend
245ENDPROC(cpu_pj4b_do_suspend)
246
247ENTRY(cpu_pj4b_do_resume)
248 ldmia r0!, {r6 - r10}
249 mcr p15, 1, r6, c15, c1, 0 @ restore CP15 - extra features
250 mcr p15, 1, r7, c15, c2, 0 @ restore CP15 - Aux Func Modes Ctrl 0
251 mcr p15, 1, r8, c15, c1, 2 @ restore CP15 - Aux Debug Modes Ctrl 2
252 mcr p15, 1, r9, c15, c1, 1 @ restore CP15 - Aux Debug Modes Ctrl 1
253 mcr p15, 0, r10, c9, c14, 0 @ restore CP15 - PMC
254 b cpu_v7_do_resume
255ENDPROC(cpu_pj4b_do_resume)
256#endif
257.globl cpu_pj4b_suspend_size
258.equ cpu_pj4b_suspend_size, cpu_v7_suspend_size + 4 * 5
259
260#endif
261
262/*
263 * __v7_setup
264 *
265 * Initialise TLB, Caches, and MMU state ready to switch the MMU
266 * on. Return in r0 the new CP15 C1 control register setting.
267 *
268 * r1, r2, r4, r5, r9, r13 must be preserved - r13 is not a stack
269 * r4: TTBR0 (low word)
270 * r5: TTBR0 (high word if LPAE)
271 * r8: TTBR1
272 * r9: Main ID register
273 *
274 * This should be able to cover all ARMv7 cores.
275 *
276 * It is assumed that:
277 * - cache type register is implemented
278 */
279__v7_ca5mp_setup:
280__v7_ca9mp_setup:
281__v7_cr7mp_setup:
282 mov r10, #(1 << 0) @ Cache/TLB ops broadcasting
283 b 1f
284__v7_ca7mp_setup:
285__v7_ca12mp_setup:
286__v7_ca15mp_setup:
287__v7_b15mp_setup:
288__v7_ca17mp_setup:
289 mov r10, #0
2901: adr r0, __v7_setup_stack_ptr
291 ldr r12, [r0]
292 add r12, r12, r0 @ the local stack
293 stmia r12, {r1-r6, lr} @ v7_invalidate_l1 touches r0-r6
294 bl v7_invalidate_l1
295 ldmia r12, {r1-r6, lr}
296#ifdef CONFIG_SMP
297 orr r10, r10, #(1 << 6) @ Enable SMP/nAMP mode
298 ALT_SMP(mrc p15, 0, r0, c1, c0, 1)
299 ALT_UP(mov r0, r10) @ fake it for UP
300 orr r10, r10, r0 @ Set required bits
301 teq r10, r0 @ Were they already set?
302 mcrne p15, 0, r10, c1, c0, 1 @ No, update register
303#endif
304 b __v7_setup_cont
305
306/*
307 * Errata:
308 * r0, r10 available for use
309 * r1, r2, r4, r5, r9, r13: must be preserved
310 * r3: contains MIDR rX number in bits 23-20
311 * r6: contains MIDR rXpY as 8-bit XY number
312 * r9: MIDR
313 */
314__ca8_errata:
315#if defined(CONFIG_ARM_ERRATA_430973) && !defined(CONFIG_ARCH_MULTIPLATFORM)
316 teq r3, #0x00100000 @ only present in r1p*
317 mrceq p15, 0, r0, c1, c0, 1 @ read aux control register
318 orreq r0, r0, #(1 << 6) @ set IBE to 1
319 mcreq p15, 0, r0, c1, c0, 1 @ write aux control register
320#endif
321#ifdef CONFIG_ARM_ERRATA_458693
322 teq r6, #0x20 @ only present in r2p0
323 mrceq p15, 0, r0, c1, c0, 1 @ read aux control register
324 orreq r0, r0, #(1 << 5) @ set L1NEON to 1
325 orreq r0, r0, #(1 << 9) @ set PLDNOP to 1
326 mcreq p15, 0, r0, c1, c0, 1 @ write aux control register
327#endif
328#ifdef CONFIG_ARM_ERRATA_460075
329 teq r6, #0x20 @ only present in r2p0
330 mrceq p15, 1, r0, c9, c0, 2 @ read L2 cache aux ctrl register
331 tsteq r0, #1 << 22
332 orreq r0, r0, #(1 << 22) @ set the Write Allocate disable bit
333 mcreq p15, 1, r0, c9, c0, 2 @ write the L2 cache aux ctrl register
334#endif
335 b __errata_finish
336
337__ca9_errata:
338#ifdef CONFIG_ARM_ERRATA_742230
339 cmp r6, #0x22 @ only present up to r2p2
340 mrcle p15, 0, r0, c15, c0, 1 @ read diagnostic register
341 orrle r0, r0, #1 << 4 @ set bit #4
342 mcrle p15, 0, r0, c15, c0, 1 @ write diagnostic register
343#endif
344#ifdef CONFIG_ARM_ERRATA_742231
345 teq r6, #0x20 @ present in r2p0
346 teqne r6, #0x21 @ present in r2p1
347 teqne r6, #0x22 @ present in r2p2
348 mrceq p15, 0, r0, c15, c0, 1 @ read diagnostic register
349 orreq r0, r0, #1 << 12 @ set bit #12
350 orreq r0, r0, #1 << 22 @ set bit #22
351 mcreq p15, 0, r0, c15, c0, 1 @ write diagnostic register
352#endif
353#ifdef CONFIG_ARM_ERRATA_743622
354 teq r3, #0x00200000 @ only present in r2p*
355 mrceq p15, 0, r0, c15, c0, 1 @ read diagnostic register
356 orreq r0, r0, #1 << 6 @ set bit #6
357 mcreq p15, 0, r0, c15, c0, 1 @ write diagnostic register
358#endif
359#if defined(CONFIG_ARM_ERRATA_751472) && defined(CONFIG_SMP)
360 ALT_SMP(cmp r6, #0x30) @ present prior to r3p0
361 ALT_UP_B(1f)
362 mrclt p15, 0, r0, c15, c0, 1 @ read diagnostic register
363 orrlt r0, r0, #1 << 11 @ set bit #11
364 mcrlt p15, 0, r0, c15, c0, 1 @ write diagnostic register
3651:
366#endif
367 b __errata_finish
368
369__ca15_errata:
370#ifdef CONFIG_ARM_ERRATA_773022
371 cmp r6, #0x4 @ only present up to r0p4
372 mrcle p15, 0, r0, c1, c0, 1 @ read aux control register
373 orrle r0, r0, #1 << 1 @ disable loop buffer
374 mcrle p15, 0, r0, c1, c0, 1 @ write aux control register
375#endif
376 b __errata_finish
377
378__ca12_errata:
379#ifdef CONFIG_ARM_ERRATA_818325_852422
380 mrc p15, 0, r10, c15, c0, 1 @ read diagnostic register
381 orr r10, r10, #1 << 12 @ set bit #12
382 mcr p15, 0, r10, c15, c0, 1 @ write diagnostic register
383#endif
384#ifdef CONFIG_ARM_ERRATA_821420
385 mrc p15, 0, r10, c15, c0, 2 @ read internal feature reg
386 orr r10, r10, #1 << 1 @ set bit #1
387 mcr p15, 0, r10, c15, c0, 2 @ write internal feature reg
388#endif
389#ifdef CONFIG_ARM_ERRATA_825619
390 mrc p15, 0, r10, c15, c0, 1 @ read diagnostic register
391 orr r10, r10, #1 << 24 @ set bit #24
392 mcr p15, 0, r10, c15, c0, 1 @ write diagnostic register
393#endif
394 b __errata_finish
395
396__ca17_errata:
397#ifdef CONFIG_ARM_ERRATA_852421
398 cmp r6, #0x12 @ only present up to r1p2
399 mrcle p15, 0, r10, c15, c0, 1 @ read diagnostic register
400 orrle r10, r10, #1 << 24 @ set bit #24
401 mcrle p15, 0, r10, c15, c0, 1 @ write diagnostic register
402#endif
403#ifdef CONFIG_ARM_ERRATA_852423
404 cmp r6, #0x12 @ only present up to r1p2
405 mrcle p15, 0, r10, c15, c0, 1 @ read diagnostic register
406 orrle r10, r10, #1 << 12 @ set bit #12
407 mcrle p15, 0, r10, c15, c0, 1 @ write diagnostic register
408#endif
409 b __errata_finish
410
411__v7_pj4b_setup:
412#ifdef CONFIG_CPU_PJ4B
413
414/* Auxiliary Debug Modes Control 1 Register */
415#define PJ4B_STATIC_BP (1 << 2) /* Enable Static BP */
416#define PJ4B_INTER_PARITY (1 << 8) /* Disable Internal Parity Handling */
417#define PJ4B_CLEAN_LINE (1 << 16) /* Disable data transfer for clean line */
418
419/* Auxiliary Debug Modes Control 2 Register */
420#define PJ4B_FAST_LDR (1 << 23) /* Disable fast LDR */
421#define PJ4B_SNOOP_DATA (1 << 25) /* Do not interleave write and snoop data */
422#define PJ4B_CWF (1 << 27) /* Disable Critical Word First feature */
423#define PJ4B_OUTSDNG_NC (1 << 29) /* Disable outstanding non cacheable rqst */
424#define PJ4B_L1_REP_RR (1 << 30) /* L1 replacement - Strict round robin */
425#define PJ4B_AUX_DBG_CTRL2 (PJ4B_SNOOP_DATA | PJ4B_CWF |\
426 PJ4B_OUTSDNG_NC | PJ4B_L1_REP_RR)
427
428/* Auxiliary Functional Modes Control Register 0 */
429#define PJ4B_SMP_CFB (1 << 1) /* Set SMP mode. Join the coherency fabric */
430#define PJ4B_L1_PAR_CHK (1 << 2) /* Support L1 parity checking */
431#define PJ4B_BROADCAST_CACHE (1 << 8) /* Broadcast Cache and TLB maintenance */
432
433/* Auxiliary Debug Modes Control 0 Register */
434#define PJ4B_WFI_WFE (1 << 22) /* WFI/WFE - serve the DVM and back to idle */
435
436 /* Auxiliary Debug Modes Control 1 Register */
437 mrc p15, 1, r0, c15, c1, 1
438 orr r0, r0, #PJ4B_CLEAN_LINE
439 orr r0, r0, #PJ4B_INTER_PARITY
440 bic r0, r0, #PJ4B_STATIC_BP
441 mcr p15, 1, r0, c15, c1, 1
442
443 /* Auxiliary Debug Modes Control 2 Register */
444 mrc p15, 1, r0, c15, c1, 2
445 bic r0, r0, #PJ4B_FAST_LDR
446 orr r0, r0, #PJ4B_AUX_DBG_CTRL2
447 mcr p15, 1, r0, c15, c1, 2
448
449 /* Auxiliary Functional Modes Control Register 0 */
450 mrc p15, 1, r0, c15, c2, 0
451#ifdef CONFIG_SMP
452 orr r0, r0, #PJ4B_SMP_CFB
453#endif
454 orr r0, r0, #PJ4B_L1_PAR_CHK
455 orr r0, r0, #PJ4B_BROADCAST_CACHE
456 mcr p15, 1, r0, c15, c2, 0
457
458 /* Auxiliary Debug Modes Control 0 Register */
459 mrc p15, 1, r0, c15, c1, 0
460 orr r0, r0, #PJ4B_WFI_WFE
461 mcr p15, 1, r0, c15, c1, 0
462
463#endif /* CONFIG_CPU_PJ4B */
464
465__v7_setup:
466 adr r0, __v7_setup_stack_ptr
467 ldr r12, [r0]
468 add r12, r12, r0 @ the local stack
469 stmia r12, {r1-r6, lr} @ v7_invalidate_l1 touches r0-r6
470 bl v7_invalidate_l1
471 ldmia r12, {r1-r6, lr}
472
473__v7_setup_cont:
474 and r0, r9, #0xff000000 @ ARM?
475 teq r0, #0x41000000
476 bne __errata_finish
477 and r3, r9, #0x00f00000 @ variant
478 and r6, r9, #0x0000000f @ revision
479 orr r6, r6, r3, lsr #20-4 @ combine variant and revision
480 ubfx r0, r9, #4, #12 @ primary part number
481
482 /* Cortex-A8 Errata */
483 ldr r10, =0x00000c08 @ Cortex-A8 primary part number
484 teq r0, r10
485 beq __ca8_errata
486
487 /* Cortex-A9 Errata */
488 ldr r10, =0x00000c09 @ Cortex-A9 primary part number
489 teq r0, r10
490 beq __ca9_errata
491
492 /* Cortex-A12 Errata */
493 ldr r10, =0x00000c0d @ Cortex-A12 primary part number
494 teq r0, r10
495 beq __ca12_errata
496
497 /* Cortex-A17 Errata */
498 ldr r10, =0x00000c0e @ Cortex-A17 primary part number
499 teq r0, r10
500 beq __ca17_errata
501
502 /* Cortex-A15 Errata */
503 ldr r10, =0x00000c0f @ Cortex-A15 primary part number
504 teq r0, r10
505 beq __ca15_errata
506
507__errata_finish:
508 mov r10, #0
509 mcr p15, 0, r10, c7, c5, 0 @ I+BTB cache invalidate
510#ifdef CONFIG_MMU
511 mcr p15, 0, r10, c8, c7, 0 @ invalidate I + D TLBs
512 v7_ttb_setup r10, r4, r5, r8, r3 @ TTBCR, TTBRx setup
513 ldr r3, =PRRR @ PRRR
514 ldr r6, =NMRR @ NMRR
515 mcr p15, 0, r3, c10, c2, 0 @ write PRRR
516 mcr p15, 0, r6, c10, c2, 1 @ write NMRR
517#endif
518 dsb @ Complete invalidations
519#ifndef CONFIG_ARM_THUMBEE
520 mrc p15, 0, r0, c0, c1, 0 @ read ID_PFR0 for ThumbEE
521 and r0, r0, #(0xf << 12) @ ThumbEE enabled field
522 teq r0, #(1 << 12) @ check if ThumbEE is present
523 bne 1f
524 mov r3, #0
525 mcr p14, 6, r3, c1, c0, 0 @ Initialize TEEHBR to 0
526 mrc p14, 6, r0, c0, c0, 0 @ load TEECR
527 orr r0, r0, #1 @ set the 1st bit in order to
528 mcr p14, 6, r0, c0, c0, 0 @ stop userspace TEEHBR access
5291:
530#endif
531 adr r3, v7_crval
532 ldmia r3, {r3, r6}
533 ARM_BE8(orr r6, r6, #1 << 25) @ big-endian page tables
534#ifdef CONFIG_SWP_EMULATE
535 orr r3, r3, #(1 << 10) @ set SW bit in "clear"
536 bic r6, r6, #(1 << 10) @ clear it in "mmuset"
537#endif
538 mrc p15, 0, r0, c1, c0, 0 @ read control register
539 bic r0, r0, r3 @ clear bits them
540 orr r0, r0, r6 @ set them
541 THUMB( orr r0, r0, #1 << 30 ) @ Thumb exceptions
542 ret lr @ return to head.S:__ret
543
544 .align 2
545__v7_setup_stack_ptr:
546 .word PHYS_RELATIVE(__v7_setup_stack, .)
547ENDPROC(__v7_setup)
548
549 .bss
550 .align 2
551__v7_setup_stack:
552 .space 4 * 7 @ 7 registers
553
554 __INITDATA
555
556 .weak cpu_v7_bugs_init
557
558 @ define struct processor (see <asm/proc-fns.h> and proc-macros.S)
559 define_processor_functions v7, dabort=v7_early_abort, pabort=v7_pabort, suspend=1, bugs=cpu_v7_bugs_init
560
561#ifdef CONFIG_HARDEN_BRANCH_PREDICTOR
562 @ generic v7 bpiall on context switch
563 globl_equ cpu_v7_bpiall_proc_init, cpu_v7_proc_init
564 globl_equ cpu_v7_bpiall_proc_fin, cpu_v7_proc_fin
565 globl_equ cpu_v7_bpiall_reset, cpu_v7_reset
566 globl_equ cpu_v7_bpiall_do_idle, cpu_v7_do_idle
567 globl_equ cpu_v7_bpiall_dcache_clean_area, cpu_v7_dcache_clean_area
568 globl_equ cpu_v7_bpiall_set_pte_ext, cpu_v7_set_pte_ext
569 globl_equ cpu_v7_bpiall_suspend_size, cpu_v7_suspend_size
570#ifdef CONFIG_ARM_CPU_SUSPEND
571 globl_equ cpu_v7_bpiall_do_suspend, cpu_v7_do_suspend
572 globl_equ cpu_v7_bpiall_do_resume, cpu_v7_do_resume
573#endif
574 define_processor_functions v7_bpiall, dabort=v7_early_abort, pabort=v7_pabort, suspend=1, bugs=cpu_v7_bugs_init
575
576#define HARDENED_BPIALL_PROCESSOR_FUNCTIONS v7_bpiall_processor_functions
577#else
578#define HARDENED_BPIALL_PROCESSOR_FUNCTIONS v7_processor_functions
579#endif
580
581#ifndef CONFIG_ARM_LPAE
582 @ Cortex-A8 - always needs bpiall switch_mm implementation
583 globl_equ cpu_ca8_proc_init, cpu_v7_proc_init
584 globl_equ cpu_ca8_proc_fin, cpu_v7_proc_fin
585 globl_equ cpu_ca8_reset, cpu_v7_reset
586 globl_equ cpu_ca8_do_idle, cpu_v7_do_idle
587 globl_equ cpu_ca8_dcache_clean_area, cpu_v7_dcache_clean_area
588 globl_equ cpu_ca8_set_pte_ext, cpu_v7_set_pte_ext
589 globl_equ cpu_ca8_switch_mm, cpu_v7_bpiall_switch_mm
590 globl_equ cpu_ca8_suspend_size, cpu_v7_suspend_size
591#ifdef CONFIG_ARM_CPU_SUSPEND
592 globl_equ cpu_ca8_do_suspend, cpu_v7_do_suspend
593 globl_equ cpu_ca8_do_resume, cpu_v7_do_resume
594#endif
595 define_processor_functions ca8, dabort=v7_early_abort, pabort=v7_pabort, suspend=1, bugs=cpu_v7_ca8_ibe
596
597 @ Cortex-A9 - needs more registers preserved across suspend/resume
598 @ and bpiall switch_mm for hardening
599 globl_equ cpu_ca9mp_proc_init, cpu_v7_proc_init
600 globl_equ cpu_ca9mp_proc_fin, cpu_v7_proc_fin
601 globl_equ cpu_ca9mp_reset, cpu_v7_reset
602 globl_equ cpu_ca9mp_do_idle, cpu_v7_do_idle
603 globl_equ cpu_ca9mp_dcache_clean_area, cpu_v7_dcache_clean_area
604#ifdef CONFIG_HARDEN_BRANCH_PREDICTOR
605 globl_equ cpu_ca9mp_switch_mm, cpu_v7_bpiall_switch_mm
606#else
607 globl_equ cpu_ca9mp_switch_mm, cpu_v7_switch_mm
608#endif
609 globl_equ cpu_ca9mp_set_pte_ext, cpu_v7_set_pte_ext
610 define_processor_functions ca9mp, dabort=v7_early_abort, pabort=v7_pabort, suspend=1, bugs=cpu_v7_bugs_init
611#endif
612
613 @ Cortex-A15 - needs iciallu switch_mm for hardening
614 globl_equ cpu_ca15_proc_init, cpu_v7_proc_init
615 globl_equ cpu_ca15_proc_fin, cpu_v7_proc_fin
616 globl_equ cpu_ca15_reset, cpu_v7_reset
617 globl_equ cpu_ca15_do_idle, cpu_v7_do_idle
618 globl_equ cpu_ca15_dcache_clean_area, cpu_v7_dcache_clean_area
619#ifdef CONFIG_HARDEN_BRANCH_PREDICTOR
620 globl_equ cpu_ca15_switch_mm, cpu_v7_iciallu_switch_mm
621#else
622 globl_equ cpu_ca15_switch_mm, cpu_v7_switch_mm
623#endif
624 globl_equ cpu_ca15_set_pte_ext, cpu_v7_set_pte_ext
625 globl_equ cpu_ca15_suspend_size, cpu_v7_suspend_size
626 globl_equ cpu_ca15_do_suspend, cpu_v7_do_suspend
627 globl_equ cpu_ca15_do_resume, cpu_v7_do_resume
628 define_processor_functions ca15, dabort=v7_early_abort, pabort=v7_pabort, suspend=1, bugs=cpu_v7_ca15_ibe
629#ifdef CONFIG_CPU_PJ4B
630 define_processor_functions pj4b, dabort=v7_early_abort, pabort=v7_pabort, suspend=1
631#endif
632
633 .section ".rodata"
634
635 string cpu_arch_name, "armv7"
636 string cpu_elf_name, "v7"
637 .align
638
639 .section ".proc.info.init", #alloc
640
641 /*
642 * Standard v7 proc info content
643 */
644.macro __v7_proc name, initfunc, mm_mmuflags = 0, io_mmuflags = 0, hwcaps = 0, proc_fns = v7_processor_functions
645 ALT_SMP(.long PMD_TYPE_SECT | PMD_SECT_AP_WRITE | PMD_SECT_AP_READ | \
646 PMD_SECT_AF | PMD_FLAGS_SMP | \mm_mmuflags)
647 ALT_UP(.long PMD_TYPE_SECT | PMD_SECT_AP_WRITE | PMD_SECT_AP_READ | \
648 PMD_SECT_AF | PMD_FLAGS_UP | \mm_mmuflags)
649 .long PMD_TYPE_SECT | PMD_SECT_AP_WRITE | \
650 PMD_SECT_AP_READ | PMD_SECT_AF | \io_mmuflags
651 initfn \initfunc, \name
652 .long cpu_arch_name
653 .long cpu_elf_name
654 .long HWCAP_SWP | HWCAP_HALF | HWCAP_THUMB | HWCAP_FAST_MULT | \
655 HWCAP_EDSP | HWCAP_TLS | \hwcaps
656 .long cpu_v7_name
657 .long \proc_fns
658 .long v7wbi_tlb_fns
659 .long v6_user_fns
660 .long v7_cache_fns
661.endm
662
663#ifndef CONFIG_ARM_LPAE
664 /*
665 * ARM Ltd. Cortex A5 processor.
666 */
667 .type __v7_ca5mp_proc_info, #object
668__v7_ca5mp_proc_info:
669 .long 0x410fc050
670 .long 0xff0ffff0
671 __v7_proc __v7_ca5mp_proc_info, __v7_ca5mp_setup
672 .size __v7_ca5mp_proc_info, . - __v7_ca5mp_proc_info
673
674 /*
675 * ARM Ltd. Cortex A9 processor.
676 */
677 .type __v7_ca9mp_proc_info, #object
678__v7_ca9mp_proc_info:
679 .long 0x410fc090
680 .long 0xff0ffff0
681 __v7_proc __v7_ca9mp_proc_info, __v7_ca9mp_setup, proc_fns = ca9mp_processor_functions
682 .size __v7_ca9mp_proc_info, . - __v7_ca9mp_proc_info
683
684 /*
685 * ARM Ltd. Cortex A8 processor.
686 */
687 .type __v7_ca8_proc_info, #object
688__v7_ca8_proc_info:
689 .long 0x410fc080
690 .long 0xff0ffff0
691 __v7_proc __v7_ca8_proc_info, __v7_setup, proc_fns = ca8_processor_functions
692 .size __v7_ca8_proc_info, . - __v7_ca8_proc_info
693
694#endif /* CONFIG_ARM_LPAE */
695
696 /*
697 * Marvell PJ4B processor.
698 */
699#ifdef CONFIG_CPU_PJ4B
700 .type __v7_pj4b_proc_info, #object
701__v7_pj4b_proc_info:
702 .long 0x560f5800
703 .long 0xff0fff00
704 __v7_proc __v7_pj4b_proc_info, __v7_pj4b_setup, proc_fns = pj4b_processor_functions
705 .size __v7_pj4b_proc_info, . - __v7_pj4b_proc_info
706#endif
707
708 /*
709 * ARM Ltd. Cortex R7 processor.
710 */
711 .type __v7_cr7mp_proc_info, #object
712__v7_cr7mp_proc_info:
713 .long 0x410fc170
714 .long 0xff0ffff0
715 __v7_proc __v7_cr7mp_proc_info, __v7_cr7mp_setup
716 .size __v7_cr7mp_proc_info, . - __v7_cr7mp_proc_info
717
718 /*
719 * ARM Ltd. Cortex A7 processor.
720 */
721 .type __v7_ca7mp_proc_info, #object
722__v7_ca7mp_proc_info:
723 .long 0x410fc070
724 .long 0xff0ffff0
725 __v7_proc __v7_ca7mp_proc_info, __v7_ca7mp_setup
726 .size __v7_ca7mp_proc_info, . - __v7_ca7mp_proc_info
727
728 /*
729 * ARM Ltd. Cortex A12 processor.
730 */
731 .type __v7_ca12mp_proc_info, #object
732__v7_ca12mp_proc_info:
733 .long 0x410fc0d0
734 .long 0xff0ffff0
735 __v7_proc __v7_ca12mp_proc_info, __v7_ca12mp_setup, proc_fns = HARDENED_BPIALL_PROCESSOR_FUNCTIONS
736 .size __v7_ca12mp_proc_info, . - __v7_ca12mp_proc_info
737
738 /*
739 * ARM Ltd. Cortex A15 processor.
740 */
741 .type __v7_ca15mp_proc_info, #object
742__v7_ca15mp_proc_info:
743 .long 0x410fc0f0
744 .long 0xff0ffff0
745 __v7_proc __v7_ca15mp_proc_info, __v7_ca15mp_setup, proc_fns = ca15_processor_functions
746 .size __v7_ca15mp_proc_info, . - __v7_ca15mp_proc_info
747
748 /*
749 * Broadcom Corporation Brahma-B15 processor.
750 */
751 .type __v7_b15mp_proc_info, #object
752__v7_b15mp_proc_info:
753 .long 0x420f00f0
754 .long 0xff0ffff0
755 __v7_proc __v7_b15mp_proc_info, __v7_b15mp_setup, proc_fns = ca15_processor_functions
756 .size __v7_b15mp_proc_info, . - __v7_b15mp_proc_info
757
758 /*
759 * ARM Ltd. Cortex A17 processor.
760 */
761 .type __v7_ca17mp_proc_info, #object
762__v7_ca17mp_proc_info:
763 .long 0x410fc0e0
764 .long 0xff0ffff0
765 __v7_proc __v7_ca17mp_proc_info, __v7_ca17mp_setup, proc_fns = HARDENED_BPIALL_PROCESSOR_FUNCTIONS
766 .size __v7_ca17mp_proc_info, . - __v7_ca17mp_proc_info
767
768 /* ARM Ltd. Cortex A73 processor */
769 .type __v7_ca73_proc_info, #object
770__v7_ca73_proc_info:
771 .long 0x410fd090
772 .long 0xff0ffff0
773 __v7_proc __v7_ca73_proc_info, __v7_setup, proc_fns = HARDENED_BPIALL_PROCESSOR_FUNCTIONS
774 .size __v7_ca73_proc_info, . - __v7_ca73_proc_info
775
776 /* ARM Ltd. Cortex A75 processor */
777 .type __v7_ca75_proc_info, #object
778__v7_ca75_proc_info:
779 .long 0x410fd0a0
780 .long 0xff0ffff0
781 __v7_proc __v7_ca75_proc_info, __v7_setup, proc_fns = HARDENED_BPIALL_PROCESSOR_FUNCTIONS
782 .size __v7_ca75_proc_info, . - __v7_ca75_proc_info
783
784 /*
785 * Qualcomm Inc. Krait processors.
786 */
787 .type __krait_proc_info, #object
788__krait_proc_info:
789 .long 0x510f0400 @ Required ID value
790 .long 0xff0ffc00 @ Mask for ID
791 /*
792 * Some Krait processors don't indicate support for SDIV and UDIV
793 * instructions in the ARM instruction set, even though they actually
794 * do support them. They also don't indicate support for fused multiply
795 * instructions even though they actually do support them.
796 */
797 __v7_proc __krait_proc_info, __v7_setup, hwcaps = HWCAP_IDIV | HWCAP_VFPv4
798 .size __krait_proc_info, . - __krait_proc_info
799
800 /*
801 * Match any ARMv7 processor core.
802 */
803 .type __v7_proc_info, #object
804__v7_proc_info:
805 .long 0x000f0000 @ Required ID value
806 .long 0x000f0000 @ Mask for ID
807 __v7_proc __v7_proc_info, __v7_setup
808 .size __v7_proc_info, . - __v7_proc_info