1#------------------------------------------------------------------------------
2#
3# Copyright (c) 2008 - 2010, Apple Inc. All rights reserved.<BR>
4# Copyright (c) 2011 - 2014, ARM Limited. All rights reserved.
5#
6# This program and the accompanying materials
7# are licensed and made available under the terms and conditions of the BSD License
8# which accompanies this distribution.  The full text of the license may be found at
9# http://opensource.org/licenses/bsd-license.php
10#
11# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
12# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
13#
14#------------------------------------------------------------------------------
15
16#include <Chipset/AArch64.h>
17#include <AsmMacroIoLibV8.h>
18
19.text
20.align 3
21
22GCC_ASM_EXPORT (ArmInvalidateInstructionCache)
23GCC_ASM_EXPORT (ArmInvalidateDataCacheEntryByMVA)
24GCC_ASM_EXPORT (ArmCleanDataCacheEntryByMVA)
25GCC_ASM_EXPORT (ArmCleanDataCacheEntryToPoUByMVA)
26GCC_ASM_EXPORT (ArmCleanInvalidateDataCacheEntryByMVA)
27GCC_ASM_EXPORT (ArmInvalidateDataCacheEntryBySetWay)
28GCC_ASM_EXPORT (ArmCleanDataCacheEntryBySetWay)
29GCC_ASM_EXPORT (ArmCleanInvalidateDataCacheEntryBySetWay)
30GCC_ASM_EXPORT (ArmEnableMmu)
31GCC_ASM_EXPORT (ArmDisableMmu)
32GCC_ASM_EXPORT (ArmDisableCachesAndMmu)
33GCC_ASM_EXPORT (ArmMmuEnabled)
34GCC_ASM_EXPORT (ArmEnableDataCache)
35GCC_ASM_EXPORT (ArmDisableDataCache)
36GCC_ASM_EXPORT (ArmEnableInstructionCache)
37GCC_ASM_EXPORT (ArmDisableInstructionCache)
38GCC_ASM_EXPORT (ArmDisableAlignmentCheck)
39GCC_ASM_EXPORT (ArmEnableAlignmentCheck)
40GCC_ASM_EXPORT (ArmEnableBranchPrediction)
41GCC_ASM_EXPORT (ArmDisableBranchPrediction)
42GCC_ASM_EXPORT (AArch64AllDataCachesOperation)
43GCC_ASM_EXPORT (ArmDataMemoryBarrier)
44GCC_ASM_EXPORT (ArmDataSynchronizationBarrier)
45GCC_ASM_EXPORT (ArmInstructionSynchronizationBarrier)
46GCC_ASM_EXPORT (ArmWriteVBar)
47GCC_ASM_EXPORT (ArmReadVBar)
48GCC_ASM_EXPORT (ArmEnableVFP)
49GCC_ASM_EXPORT (ArmCallWFI)
50GCC_ASM_EXPORT (ArmReadMpidr)
51GCC_ASM_EXPORT (ArmReadTpidrurw)
52GCC_ASM_EXPORT (ArmWriteTpidrurw)
53GCC_ASM_EXPORT (ArmIsArchTimerImplemented)
54GCC_ASM_EXPORT (ArmReadIdPfr0)
55GCC_ASM_EXPORT (ArmReadIdPfr1)
56GCC_ASM_EXPORT (ArmWriteHcr)
57GCC_ASM_EXPORT (ArmReadCurrentEL)
58
59.set CTRL_M_BIT,      (1 << 0)
60.set CTRL_A_BIT,      (1 << 1)
61.set CTRL_C_BIT,      (1 << 2)
62.set CTRL_I_BIT,      (1 << 12)
63.set CTRL_V_BIT,      (1 << 12)
64.set CPACR_VFP_BITS,  (3 << 20)
65
66ASM_PFX(ArmInvalidateDataCacheEntryByMVA):
67  dc      ivac, x0    // Invalidate single data cache line
68  ret
69
70
71ASM_PFX(ArmCleanDataCacheEntryByMVA):
72  dc      cvac, x0    // Clean single data cache line
73  ret
74
75
76ASM_PFX(ArmCleanDataCacheEntryToPoUByMVA):
77  dc      cvau, x0    // Clean single data cache line to PoU
78  ret
79
80
81ASM_PFX(ArmCleanInvalidateDataCacheEntryByMVA):
82  dc      civac, x0   // Clean and invalidate single data cache line
83  ret
84
85
86ASM_PFX(ArmInvalidateDataCacheEntryBySetWay):
87  dc      isw, x0     // Invalidate this line
88  ret
89
90
91ASM_PFX(ArmCleanInvalidateDataCacheEntryBySetWay):
92  dc      cisw, x0    // Clean and Invalidate this line
93  ret
94
95
96ASM_PFX(ArmCleanDataCacheEntryBySetWay):
97  dc      csw, x0     // Clean this line
98  ret
99
100
101ASM_PFX(ArmInvalidateInstructionCache):
102  ic      iallu       // Invalidate entire instruction cache
103  dsb     sy
104  isb
105  ret
106
107
108ASM_PFX(ArmEnableMmu):
109   EL1_OR_EL2_OR_EL3(x1)
1101: mrs     x0, sctlr_el1       // Read System control register EL1
111   b       4f
1122: mrs     x0, sctlr_el2       // Read System control register EL2
113   b       4f
1143: mrs     x0, sctlr_el3       // Read System control register EL3
1154: orr     x0, x0, #CTRL_M_BIT // Set MMU enable bit
116   EL1_OR_EL2_OR_EL3(x1)
1171: tlbi    vmalle1
118   dsb     nsh
119   isb
120   msr     sctlr_el1, x0       // Write back
121   b       4f
1222: tlbi    alle2
123   dsb     nsh
124   isb
125   msr     sctlr_el2, x0       // Write back
126   b       4f
1273: tlbi    alle3
128   dsb     nsh
129   isb
130   msr     sctlr_el3, x0       // Write back
1314: isb
132   ret
133
134
135ASM_PFX(ArmDisableMmu):
136   EL1_OR_EL2_OR_EL3(x1)
1371: mrs     x0, sctlr_el1        // Read System Control Register EL1
138   b       4f
1392: mrs     x0, sctlr_el2        // Read System Control Register EL2
140   b       4f
1413: mrs     x0, sctlr_el3        // Read System Control Register EL3
1424: and     x0, x0, #~CTRL_M_BIT  // Clear MMU enable bit
143   EL1_OR_EL2_OR_EL3(x1)
1441: msr     sctlr_el1, x0        // Write back
145   tlbi    vmalle1
146   b       4f
1472: msr     sctlr_el2, x0        // Write back
148   tlbi    alle2
149   b       4f
1503: msr     sctlr_el3, x0        // Write back
151   tlbi    alle3
1524: dsb     sy
153   isb
154   ret
155
156
157ASM_PFX(ArmDisableCachesAndMmu):
158   EL1_OR_EL2_OR_EL3(x1)
1591: mrs     x0, sctlr_el1        // Get control register EL1
160   b       4f
1612: mrs     x0, sctlr_el2        // Get control register EL2
162   b       4f
1633: mrs     x0, sctlr_el3        // Get control register EL3
1644: mov     x1, #~(CTRL_M_BIT | CTRL_C_BIT | CTRL_I_BIT)  // Disable MMU, D & I caches
165   and     x0, x0, x1
166   EL1_OR_EL2_OR_EL3(x1)
1671: msr     sctlr_el1, x0        // Write back control register
168   b       4f
1692: msr     sctlr_el2, x0        // Write back control register
170   b       4f
1713: msr     sctlr_el3, x0        // Write back control register
1724: dsb     sy
173   isb
174   ret
175
176
177ASM_PFX(ArmMmuEnabled):
178   EL1_OR_EL2_OR_EL3(x1)
1791: mrs     x0, sctlr_el1        // Get control register EL1
180   b       4f
1812: mrs     x0, sctlr_el2        // Get control register EL2
182   b       4f
1833: mrs     x0, sctlr_el3        // Get control register EL3
1844: and     x0, x0, #CTRL_M_BIT
185   ret
186
187
188ASM_PFX(ArmEnableDataCache):
189   EL1_OR_EL2_OR_EL3(x1)
1901: mrs     x0, sctlr_el1        // Get control register EL1
191   b       4f
1922: mrs     x0, sctlr_el2        // Get control register EL2
193   b       4f
1943: mrs     x0, sctlr_el3        // Get control register EL3
1954: orr     x0, x0, #CTRL_C_BIT  // Set C bit
196   EL1_OR_EL2_OR_EL3(x1)
1971: msr     sctlr_el1, x0        // Write back control register
198   b       4f
1992: msr     sctlr_el2, x0        // Write back control register
200   b       4f
2013: msr     sctlr_el3, x0        // Write back control register
2024: dsb     sy
203   isb
204   ret
205
206
207ASM_PFX(ArmDisableDataCache):
208   EL1_OR_EL2_OR_EL3(x1)
2091: mrs     x0, sctlr_el1        // Get control register EL1
210   b       4f
2112: mrs     x0, sctlr_el2        // Get control register EL2
212   b       4f
2133: mrs     x0, sctlr_el3        // Get control register EL3
2144: and     x0, x0, #~CTRL_C_BIT  // Clear C bit
215   EL1_OR_EL2_OR_EL3(x1)
2161: msr     sctlr_el1, x0        // Write back control register
217   b       4f
2182: msr     sctlr_el2, x0        // Write back control register
219   b       4f
2203: msr     sctlr_el3, x0        // Write back control register
2214: dsb     sy
222   isb
223   ret
224
225
226ASM_PFX(ArmEnableInstructionCache):
227   EL1_OR_EL2_OR_EL3(x1)
2281: mrs     x0, sctlr_el1        // Get control register EL1
229   b       4f
2302: mrs     x0, sctlr_el2        // Get control register EL2
231   b       4f
2323: mrs     x0, sctlr_el3        // Get control register EL3
2334: orr     x0, x0, #CTRL_I_BIT  // Set I bit
234   EL1_OR_EL2_OR_EL3(x1)
2351: msr     sctlr_el1, x0        // Write back control register
236   b       4f
2372: msr     sctlr_el2, x0        // Write back control register
238   b       4f
2393: msr     sctlr_el3, x0        // Write back control register
2404: dsb     sy
241   isb
242   ret
243
244
245ASM_PFX(ArmDisableInstructionCache):
246   EL1_OR_EL2_OR_EL3(x1)
2471: mrs     x0, sctlr_el1        // Get control register EL1
248   b       4f
2492: mrs     x0, sctlr_el2        // Get control register EL2
250   b       4f
2513: mrs     x0, sctlr_el3        // Get control register EL3
2524: and     x0, x0, #~CTRL_I_BIT  // Clear I bit
253   EL1_OR_EL2_OR_EL3(x1)
2541: msr     sctlr_el1, x0        // Write back control register
255   b       4f
2562: msr     sctlr_el2, x0        // Write back control register
257   b       4f
2583: msr     sctlr_el3, x0        // Write back control register
2594: dsb     sy
260   isb
261   ret
262
263
264ASM_PFX(ArmEnableAlignmentCheck):
265   EL1_OR_EL2(x1)
2661: mrs     x0, sctlr_el1        // Get control register EL1
267   b       3f
2682: mrs     x0, sctlr_el2        // Get control register EL2
2693: orr     x0, x0, #CTRL_A_BIT  // Set A (alignment check) bit
270   EL1_OR_EL2(x1)
2711: msr     sctlr_el1, x0        // Write back control register
272   b       3f
2732: msr     sctlr_el2, x0        // Write back control register
2743: dsb     sy
275   isb
276   ret
277
278
279ASM_PFX(ArmDisableAlignmentCheck):
280   EL1_OR_EL2_OR_EL3(x1)
2811: mrs     x0, sctlr_el1        // Get control register EL1
282   b       4f
2832: mrs     x0, sctlr_el2        // Get control register EL2
284   b       4f
2853: mrs     x0, sctlr_el3        // Get control register EL3
2864: and     x0, x0, #~CTRL_A_BIT  // Clear A (alignment check) bit
287   EL1_OR_EL2_OR_EL3(x1)
2881: msr     sctlr_el1, x0        // Write back control register
289   b       4f
2902: msr     sctlr_el2, x0        // Write back control register
291   b       4f
2923: msr     sctlr_el3, x0        // Write back control register
2934: dsb     sy
294   isb
295   ret
296
297
298// Always turned on in AArch64. Else implementation specific. Leave in for C compatibility for now
299ASM_PFX(ArmEnableBranchPrediction):
300  ret
301
302
303// Always turned on in AArch64. Else implementation specific. Leave in for C compatibility for now.
304ASM_PFX(ArmDisableBranchPrediction):
305  ret
306
307
308ASM_PFX(AArch64AllDataCachesOperation):
309// We can use regs 0-7 and 9-15 without having to save/restore.
310// Save our link register on the stack. - The stack must always be quad-word aligned
311  str   x30, [sp, #-16]!
312  mov   x1, x0                  // Save Function call in x1
313  mrs   x6, clidr_el1           // Read EL1 CLIDR
314  and   x3, x6, #0x7000000      // Mask out all but Level of Coherency (LoC)
315  lsr   x3, x3, #23             // Left align cache level value - the level is shifted by 1 to the
316                                // right to ease the access to CSSELR and the Set/Way operation.
317  cbz   x3, L_Finished          // No need to clean if LoC is 0
318  mov   x10, #0                 // Start clean at cache level 0
319
320Loop1:
321  add   x2, x10, x10, lsr #1    // Work out 3x cachelevel for cache info
322  lsr   x12, x6, x2             // bottom 3 bits are the Cache type for this level
323  and   x12, x12, #7            // get those 3 bits alone
324  cmp   x12, #2                 // what cache at this level?
325  b.lt  L_Skip                  // no cache or only instruction cache at this level
326  msr   csselr_el1, x10         // write the Cache Size selection register with current level (CSSELR)
327  isb                           // isb to sync the change to the CacheSizeID reg
328  mrs   x12, ccsidr_el1         // reads current Cache Size ID register (CCSIDR)
329  and   x2, x12, #0x7           // extract the line length field
330  add   x2, x2, #4              // add 4 for the line length offset (log2 16 bytes)
331  mov   x4, #0x400
332  sub   x4, x4, #1
333  and   x4, x4, x12, lsr #3     // x4 is the max number on the way size (right aligned)
334  clz   w5, w4                  // w5 is the bit position of the way size increment
335  mov   x7, #0x00008000
336  sub   x7, x7, #1
337  and   x7, x7, x12, lsr #13    // x7 is the max number of the index size (right aligned)
338
339Loop2:
340  mov   x9, x4                  // x9 working copy of the max way size (right aligned)
341
342Loop3:
343  lsl   x11, x9, x5
344  orr   x0, x10, x11            // factor in the way number and cache number
345  lsl   x11, x7, x2
346  orr   x0, x0, x11             // factor in the index number
347
348  blr   x1                      // Goto requested cache operation
349
350  subs  x9, x9, #1              // decrement the way number
351  b.ge  Loop3
352  subs  x7, x7, #1              // decrement the index
353  b.ge  Loop2
354L_Skip:
355  add   x10, x10, #2            // increment the cache number
356  cmp   x3, x10
357  b.gt  Loop1
358
359L_Finished:
360  dsb   sy
361  isb
362  ldr   x30, [sp], #0x10
363  ret
364
365
366ASM_PFX(ArmDataMemoryBarrier):
367  dmb   sy
368  ret
369
370
371ASM_PFX(ArmDataSynchronizationBarrier):
372  dsb   sy
373  ret
374
375
376ASM_PFX(ArmInstructionSynchronizationBarrier):
377  isb
378  ret
379
380
381ASM_PFX(ArmWriteVBar):
382   EL1_OR_EL2_OR_EL3(x1)
3831: msr   vbar_el1, x0            // Set the Address of the EL1 Vector Table in the VBAR register
384   b     4f
3852: msr   vbar_el2, x0            // Set the Address of the EL2 Vector Table in the VBAR register
386   b     4f
3873: msr   vbar_el3, x0            // Set the Address of the EL3 Vector Table in the VBAR register
3884: isb
389   ret
390
391ASM_PFX(ArmReadVBar):
392   EL1_OR_EL2_OR_EL3(x1)
3931: mrs   x0, vbar_el1            // Set the Address of the EL1 Vector Table in the VBAR register
394   ret
3952: mrs   x0, vbar_el2            // Set the Address of the EL2 Vector Table in the VBAR register
396   ret
3973: mrs   x0, vbar_el3            // Set the Address of the EL3 Vector Table in the VBAR register
398   ret
399
400
401ASM_PFX(ArmEnableVFP):
402  // Check whether floating-point is implemented in the processor.
403  mov   x1, x30                 // Save LR
404  bl    ArmReadIdPfr0           // Read EL1 Processor Feature Register (PFR0)
405  mov   x30, x1                 // Restore LR
406  ands  x0, x0, #AARCH64_PFR0_FP// Extract bits indicating VFP implementation
407  cmp   x0, #0                  // VFP is implemented if '0'.
408  b.ne  4f                      // Exit if VFP not implemented.
409  // FVP is implemented.
410  // Make sure VFP exceptions are not trapped (to any exception level).
411  mrs   x0, cpacr_el1           // Read EL1 Coprocessor Access Control Register (CPACR)
412  orr   x0, x0, #CPACR_VFP_BITS // Disable FVP traps to EL1
413  msr   cpacr_el1, x0           // Write back EL1 Coprocessor Access Control Register (CPACR)
414  mov   x1, #AARCH64_CPTR_TFP   // TFP Bit for trapping VFP Exceptions
415  EL1_OR_EL2_OR_EL3(x2)
4161:ret                           // Not configurable in EL1
4172:mrs   x0, cptr_el2            // Disable VFP traps to EL2
418  bic   x0, x0, x1
419  msr   cptr_el2, x0
420  ret
4213:mrs   x0, cptr_el3            // Disable VFP traps to EL3
422  bic   x0, x0, x1
423  msr   cptr_el3, x0
4244:ret
425
426
427ASM_PFX(ArmCallWFI):
428  wfi
429  ret
430
431
432ASM_PFX(ArmReadMpidr):
433  mrs   x0, mpidr_el1           // read EL1 MPIDR
434  ret
435
436
437// Keep old function names for C compatibilty for now. Change later?
438ASM_PFX(ArmReadTpidrurw):
439  mrs   x0, tpidr_el0           // read tpidr_el0 (v7 TPIDRURW) -> (v8 TPIDR_EL0)
440  ret
441
442
443// Keep old function names for C compatibilty for now. Change later?
444ASM_PFX(ArmWriteTpidrurw):
445  msr   tpidr_el0, x0           // write tpidr_el0 (v7 TPIDRURW) -> (v8 TPIDR_EL0)
446  ret
447
448
449// Arch timers are mandatory on AArch64
450ASM_PFX(ArmIsArchTimerImplemented):
451  mov   x0, #1
452  ret
453
454
455ASM_PFX(ArmReadIdPfr0):
456  mrs   x0, id_aa64pfr0_el1   // Read ID_AA64PFR0 Register
457  ret
458
459
460// Q: id_aa64pfr1_el1 not defined yet. What does this funtion want to access?
461// A: used to setup arch timer. Check if we have security extensions, permissions to set stuff.
462//    See: ArmPkg/Library/ArmArchTimerLib/AArch64/ArmArchTimerLib.c
463//    Not defined yet, but stick in here for now, should read all zeros.
464ASM_PFX(ArmReadIdPfr1):
465  mrs   x0, id_aa64pfr1_el1   // Read ID_PFR1 Register
466  ret
467
468// VOID ArmWriteHcr(UINTN Hcr)
469ASM_PFX(ArmWriteHcr):
470  msr   hcr_el2, x0        // Write the passed HCR value
471  ret
472
473// UINTN ArmReadCurrentEL(VOID)
474ASM_PFX(ArmReadCurrentEL):
475  mrs   x0, CurrentEL
476  ret
477
478ASM_FUNCTION_REMOVE_IF_UNREFERENCED
479