1/*
2 * Copyright 2022 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *     https://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include <common.h>
18
19.set .L_MAIR_DEV_nGnRE,	0x04
20.set .L_MAIR_MEM_WBWA,	0xff
21.set .Lmairval, .L_MAIR_DEV_nGnRE | (.L_MAIR_MEM_WBWA << 8)
22
23/* 4 KiB granule size for TTBR0_EL1. */
24.set .L_TCR_TG0_4KB, 0x0 << 14
25/* 4 KiB granule size for TTBR1_EL1. */
26.set .L_TCR_TG1_4KB, 0x2 << 30
27/* Disable translation table walk for TTBR1_EL1, generating a translation fault instead. */
28.set .L_TCR_EPD1, 0x1 << 23
29/* Translation table walks for TTBR0_EL1 are inner sharable. */
30.set .L_TCR_SH_INNER, 0x3 << 12
31/*
32 * Translation table walks for TTBR0_EL1 are outer write-back read-allocate write-allocate
33 * cacheable.
34 */
35.set .L_TCR_RGN_OWB, 0x1 << 10
36/*
37 * Translation table walks for TTBR0_EL1 are inner write-back read-allocate write-allocate
38 * cacheable.
39 */
40.set .L_TCR_RGN_IWB, 0x1 << 8
41/* Size offset for TTBR0_EL1 is 2**39 bytes (512 GiB). */
42.set .L_TCR_T0SZ_512, 64 - 39
43.set .Ltcrval, .L_TCR_TG0_4KB | .L_TCR_TG1_4KB | .L_TCR_EPD1 | .L_TCR_RGN_OWB
44.set .Ltcrval, .Ltcrval | .L_TCR_RGN_IWB | .L_TCR_SH_INNER | .L_TCR_T0SZ_512
45
46/* Stage 1 instruction access cacheability is unaffected. */
47.set .L_SCTLR_ELx_I, 0x1 << 12
48/* SP alignment fault if SP is not aligned to a 16 byte boundary. */
49.set .L_SCTLR_ELx_SA, 0x1 << 3
50/* Stage 1 data access cacheability is unaffected. */
51.set .L_SCTLR_ELx_C, 0x1 << 2
52/* EL0 and EL1 stage 1 MMU enabled. */
53.set .L_SCTLR_ELx_M, 0x1 << 0
54/* Privileged Access Never is unchanged on taking an exception to EL1. */
55.set .L_SCTLR_EL1_SPAN, 0x1 << 23
56/* All writable memory regions are treated as XN. */
57.set .L_SCTLR_EL1_WXN, 0x1 << 19
58/* SETEND instruction disabled at EL0 in aarch32 mode. */
59.set .L_SCTLR_EL1_SED, 0x1 << 8
60/* Various IT instructions are disabled at EL0 in aarch32 mode. */
61.set .L_SCTLR_EL1_ITD, 0x1 << 7
62.set .L_SCTLR_EL1_RES1, (0x1 << 11) | (0x1 << 20) | (0x1 << 22) | (0x1 << 28) | (0x1 << 29)
63.set .Lsctlrval, .L_SCTLR_ELx_M | .L_SCTLR_ELx_C | .L_SCTLR_ELx_SA | .L_SCTLR_EL1_ITD | .L_SCTLR_EL1_SED
64.set .Lsctlrval, .Lsctlrval | .L_SCTLR_ELx_I | .L_SCTLR_EL1_SPAN | .L_SCTLR_EL1_RES1 | .L_SCTLR_EL1_WXN
65
66/**
67 * This is a generic entry point for an image. It carries out the operations required to prepare the
68 * loaded image to be run. Specifically, it zeroes the bss section using registers x25 and above,
69 * prepares the stack, enables floating point, and sets up the exception vector. It preserves x0-x3
70 * for the Rust entry point, as these may contain boot parameters.
71 */
72.section .init.entry, "ax"
73.global entry
74entry:
75	/* Load and apply the memory management configuration, ready to enable MMU and caches. */
76
77	adr x30, vector_table_panic
78	msr vbar_el1, x30
79
80	/*
81	 * Our load address is set by the host so validate it before proceeding.
82	 */
83	adr x30, entry
84	mov_i x29, entry
85	cmp x29, x30
86	b.eq 1f
87	reset_or_hang
881:
89
90	adrp x30, idmap
91	msr ttbr0_el1, x30
92
93	mov_i x30, .Lmairval
94	msr mair_el1, x30
95
96	mov_i x30, .Ltcrval
97	/* Copy the supported PA range into TCR_EL1.IPS. */
98	mrs x29, id_aa64mmfr0_el1
99	bfi x30, x29, #32, #4
100
101	msr tcr_el1, x30
102
103	mov_i x30, .Lsctlrval
104
105	/*
106	 * Ensure everything before this point has completed, then invalidate any potentially stale
107	 * local TLB entries before they start being used.
108	 */
109	isb
110	tlbi vmalle1
111	ic iallu
112	dsb nsh
113	isb
114
115	/*
116	 * Configure sctlr_el1 to enable MMU and cache and don't proceed until this has completed.
117	 */
118	msr sctlr_el1, x30
119	isb
120
121	/* Disable trapping floating point access in EL1. */
122	mrs x30, cpacr_el1
123	orr x30, x30, #(0x3 << 20)
124	msr cpacr_el1, x30
125	isb
126
127	/* Zero out the bss section. */
128	adr_l x29, bss_begin
129	adr_l x30, bss_end
1300:	cmp x29, x30
131	b.hs 1f
132	stp xzr, xzr, [x29], #16
133	b 0b
134
1351:	/* Copy the data section. */
136	adr_l x28, data_begin
137	adr_l x29, data_end
138	adr_l x30, data_lma
1392:	cmp x28, x29
140	b.ge 3f
141	ldp q0, q1, [x30], #32
142	stp q0, q1, [x28], #32
143	b 2b
144
1453:	/* Prepare the exception handler stack (SP_EL1). */
146	adr_l x30, init_eh_stack_pointer
147	msr spsel, #1
148	mov sp, x30
149
150	/* Prepare the main thread stack (SP_EL0). */
151	adr_l x30, init_stack_pointer
152	msr spsel, #0
153	mov sp, x30
154
155	/* Set up exception vector. */
156	adr x30, vector_table_el1
157	msr vbar_el1, x30
158
159	/*
160	 * Set up Bionic-compatible thread-local storage.
161	 *
162	 * Note that TPIDR_EL0 can't be configured from rust_entry because the
163	 * compiler will dereference it during function entry to access
164	 * __stack_chk_guard and Rust doesn't support LLVM's
165	 * __attribute__((no_stack_protector)).
166	 */
167	adr_l x30, __bionic_tls
168	msr tpidr_el0, x30
169
170	/* Call into Rust code. */
171	bl rust_entry
172
173	/* Loop forever waiting for interrupts. */
1744:	wfi
175	b 4b
176
177