1/*
2 * Copyright 2022 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *     https://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17/**
18 * Saves the volatile registers onto the stack. This currently takes 14
19 * instructions, so it can be used in exception handlers with 18 instructions
20 * left.
21 *
22 * On return, x0 and x1 are initialised to elr_el2 and spsr_el2 respectively,
23 * which can be used as the first and second arguments of a subsequent call.
24 */
25.macro save_volatile_to_stack
26	/* Reserve stack space and save registers x0-x18, x29 & x30. */
27	stp x0, x1, [sp, #-(8 * 24)]!
28	stp x2, x3, [sp, #8 * 2]
29	stp x4, x5, [sp, #8 * 4]
30	stp x6, x7, [sp, #8 * 6]
31	stp x8, x9, [sp, #8 * 8]
32	stp x10, x11, [sp, #8 * 10]
33	stp x12, x13, [sp, #8 * 12]
34	stp x14, x15, [sp, #8 * 14]
35	stp x16, x17, [sp, #8 * 16]
36	str x18, [sp, #8 * 18]
37	stp x29, x30, [sp, #8 * 20]
38
39	/*
40	 * Save elr_el1 & spsr_el1. This such that we can take nested exception
41	 * and still be able to unwind.
42	 */
43	mrs x0, elr_el1
44	mrs x1, spsr_el1
45	stp x0, x1, [sp, #8 * 22]
46.endm
47
48/**
49 * Restores the volatile registers from the stack. This currently takes 14
50 * instructions, so it can be used in exception handlers while still leaving 18
51 * instructions left; if paired with save_volatile_to_stack, there are 4
52 * instructions to spare.
53 */
54.macro restore_volatile_from_stack
55	/* Restore registers x2-x18, x29 & x30. */
56	ldp x2, x3, [sp, #8 * 2]
57	ldp x4, x5, [sp, #8 * 4]
58	ldp x6, x7, [sp, #8 * 6]
59	ldp x8, x9, [sp, #8 * 8]
60	ldp x10, x11, [sp, #8 * 10]
61	ldp x12, x13, [sp, #8 * 12]
62	ldp x14, x15, [sp, #8 * 14]
63	ldp x16, x17, [sp, #8 * 16]
64	ldr x18, [sp, #8 * 18]
65	ldp x29, x30, [sp, #8 * 20]
66
67	/* Restore registers elr_el1 & spsr_el1, using x0 & x1 as scratch. */
68	ldp x0, x1, [sp, #8 * 22]
69	msr elr_el1, x0
70	msr spsr_el1, x1
71
72	/* Restore x0 & x1, and release stack space. */
73	ldp x0, x1, [sp], #8 * 24
74.endm
75
76/**
77 * This is a generic handler for exceptions taken at the current EL while using
78 * SP0. It behaves similarly to the SPx case by first switching to SPx, doing
79 * the work, then switching back to SP0 before returning.
80 *
81 * Switching to SPx and calling the Rust handler takes 16 instructions. To
82 * restore and return we need an additional 16 instructions, so we can implement
83 * the whole handler within the allotted 32 instructions.
84 */
85.macro current_exception_sp0 handler:req
86	msr spsel, #1
87	save_volatile_to_stack
88	bl \handler
89	restore_volatile_from_stack
90	msr spsel, #0
91	eret
92.endm
93
94/**
95 * This is a generic handler for exceptions taken at the current EL while using
96 * SPx. It saves volatile registers, calls the Rust handler, restores volatile
97 * registers, then returns.
98 *
99 * This also works for exceptions taken from EL0, if we don't care about
100 * non-volatile registers.
101 *
102 * Saving state and jumping to the Rust handler takes 15 instructions, and
103 * restoring and returning also takes 15 instructions, so we can fit the whole
104 * handler in 30 instructions, under the limit of 32.
105 */
106.macro current_exception_spx handler:req
107	save_volatile_to_stack
108	bl \handler
109	restore_volatile_from_stack
110	eret
111.endm
112
113.section .text.vector_table_el1, "ax"
114.global vector_table_el1
115.balign 0x800
116vector_table_el1:
117sync_cur_sp0:
118	current_exception_sp0 sync_exception_current
119
120.balign 0x80
121irq_cur_sp0:
122	current_exception_sp0 irq_current
123
124.balign 0x80
125fiq_cur_sp0:
126	current_exception_sp0 fiq_current
127
128.balign 0x80
129serr_cur_sp0:
130	current_exception_sp0 serr_current
131
132.balign 0x80
133sync_cur_spx:
134	current_exception_spx sync_exception_current
135
136.balign 0x80
137irq_cur_spx:
138	current_exception_spx irq_current
139
140.balign 0x80
141fiq_cur_spx:
142	current_exception_spx fiq_current
143
144.balign 0x80
145serr_cur_spx:
146	current_exception_spx serr_current
147
148.balign 0x80
149sync_lower_64:
150	current_exception_spx sync_lower
151
152.balign 0x80
153irq_lower_64:
154	current_exception_spx irq_lower
155
156.balign 0x80
157fiq_lower_64:
158	current_exception_spx fiq_lower
159
160.balign 0x80
161serr_lower_64:
162	current_exception_spx serr_lower
163
164.balign 0x80
165sync_lower_32:
166	current_exception_spx sync_lower
167
168.balign 0x80
169irq_lower_32:
170	current_exception_spx irq_lower
171
172.balign 0x80
173fiq_lower_32:
174	current_exception_spx fiq_lower
175
176.balign 0x80
177serr_lower_32:
178	current_exception_spx serr_lower
179