blob: 3adc0eecb76fbdf65425c85b77321a7d8eba344b [file] [log] [blame]
rjw1f884582022-01-06 17:20:42 +08001/*
2 * Copyright (c) 2014 Travis Geiselbrecht
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining
5 * a copy of this software and associated documentation files
6 * (the "Software"), to deal in the Software without restriction,
7 * including without limitation the rights to use, copy, modify, merge,
8 * publish, distribute, sublicense, and/or sell copies of the Software,
9 * and to permit persons to whom the Software is furnished to do so,
10 * subject to the following conditions:
11 *
12 * The above copyright notice and this permission notice shall be
13 * included in all copies or substantial portions of the Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
16 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
17 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
18 * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
19 * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
20 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
21 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
22 */
23#include <asm.h>
24#include <arch/asm_macros.h>
25
26/* use x9 ~ x15 as scratch registers */
27tmp .req x9
28
29/* void arm64_context_switch(vaddr_t *old_sp, vaddr_t new_sp); */
30FUNCTION(arm64_context_switch)
31 /* save old frame */
32 push x28, x29
33 push x26, x27
34 push x24, x25
35 push x22, x23
36 push x20, x21
37 push x18, x19
38 str x30, [sp,#-16]!
39
40 /* save old sp */
41 mov x15, sp
42 str x15, [x0]
43
44 /* load new sp */
45 mov sp, x1
46
47 /* restore new frame */
48 ldr x30, [sp], #16
49 pop x18, x19
50 pop x20, x21
51 pop x22, x23
52 pop x24, x25
53 pop x26, x27
54 pop x28, x29
55
56 ret
57
58FUNCTION(arm64_chain_load)
59 /* shuffle the args around */
60 mov x5, x0
61 mov x0, x1
62 mov x1, x2
63 mov x2, x3
64 mov x3, x4
65 mov x4, x5
66
67#if WITH_KERNEL_VM
68 /* disable MMU */
69 mrs x5, sctlr_el1
70 bic x5, x5, #0x1
71 msr sctlr_el1, x5
72 isb
73#endif
74
75 tlbi vmalle1
76 br x4
77
78FUNCTION(arm64_elX_to_el1)
79 mrs tmp, CurrentEL
80
81 cmp tmp, #(0b01 << 2)
82 bne .notEL1
83 /* Already in EL1 */
84 ret
85
86.notEL1:
87 cmp tmp, #(0b10 << 2)
88 beq .inEL2
89
90
91 /* set EL2 to 64bit */
92 mrs tmp, scr_el3
93 orr tmp, tmp, #(1<<10)
94 msr scr_el3, tmp
95
96
97 adr tmp, .Ltarget
98 msr elr_el3, tmp
99
100 mov tmp, #((0b1111 << 6) | (0b0101)) /* EL1h runlevel */
101 msr spsr_el3, tmp
102 b .confEL1
103
104.inEL2:
105 adr tmp, .Ltarget
106 msr elr_el2, tmp
107 mov tmp, #((0b1111 << 6) | (0b0101)) /* EL1h runlevel */
108 msr spsr_el2, tmp
109
110
111
112.confEL1:
113 /* disable EL2 coprocessor traps */
114 mov tmp, #0x33ff
115 msr cptr_el2, tmp
116
117 /* set EL1 to 64bit */
118 mov tmp, #(1<<31)
119 msr hcr_el2, tmp
120
121 /* disable EL1 FPU traps */
122 mov tmp, #(0b11<<20)
123 msr cpacr_el1, tmp
124
125 /* set up the EL1 bounce interrupt */
126 mov tmp, sp
127 msr sp_el1, tmp
128
129 isb
130 eret
131
132
133.Ltarget:
134 ret