1 /*
2  * Copyright (C) 2014 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "assembler_arm64.h"
18 #include "base/logging.h"
19 #include "entrypoints/quick/quick_entrypoints.h"
20 #include "offsets.h"
21 #include "thread.h"
22 
23 using namespace vixl::aarch64;  // NOLINT(build/namespaces)
24 
25 namespace art {
26 namespace arm64 {
27 
28 #ifdef ___
29 #error "ARM64 Assembler macro already defined."
30 #else
31 #define ___   vixl_masm_.
32 #endif
33 
FinalizeCode()34 void Arm64Assembler::FinalizeCode() {
35   ___ FinalizeCode();
36 }
37 
CodeSize() const38 size_t Arm64Assembler::CodeSize() const {
39   return vixl_masm_.GetSizeOfCodeGenerated();
40 }
41 
CodeBufferBaseAddress() const42 const uint8_t* Arm64Assembler::CodeBufferBaseAddress() const {
43   return vixl_masm_.GetBuffer().GetStartAddress<const uint8_t*>();
44 }
45 
FinalizeInstructions(const MemoryRegion & region)46 void Arm64Assembler::FinalizeInstructions(const MemoryRegion& region) {
47   // Copy the instructions from the buffer.
48   MemoryRegion from(vixl_masm_.GetBuffer()->GetStartAddress<void*>(), CodeSize());
49   region.CopyFrom(0, from);
50 }
51 
LoadRawPtr(ManagedRegister m_dst,ManagedRegister m_base,Offset offs)52 void Arm64Assembler::LoadRawPtr(ManagedRegister m_dst, ManagedRegister m_base, Offset offs) {
53   Arm64ManagedRegister dst = m_dst.AsArm64();
54   Arm64ManagedRegister base = m_base.AsArm64();
55   CHECK(dst.IsXRegister() && base.IsXRegister());
56   // Remove dst and base form the temp list - higher level API uses IP1, IP0.
57   UseScratchRegisterScope temps(&vixl_masm_);
58   temps.Exclude(reg_x(dst.AsXRegister()), reg_x(base.AsXRegister()));
59   ___ Ldr(reg_x(dst.AsXRegister()), MEM_OP(reg_x(base.AsXRegister()), offs.Int32Value()));
60 }
61 
JumpTo(ManagedRegister m_base,Offset offs,ManagedRegister m_scratch)62 void Arm64Assembler::JumpTo(ManagedRegister m_base, Offset offs, ManagedRegister m_scratch) {
63   Arm64ManagedRegister base = m_base.AsArm64();
64   Arm64ManagedRegister scratch = m_scratch.AsArm64();
65   CHECK(base.IsXRegister()) << base;
66   CHECK(scratch.IsXRegister()) << scratch;
67   // Remove base and scratch form the temp list - higher level API uses IP1, IP0.
68   UseScratchRegisterScope temps(&vixl_masm_);
69   temps.Exclude(reg_x(base.AsXRegister()), reg_x(scratch.AsXRegister()));
70   ___ Ldr(reg_x(scratch.AsXRegister()), MEM_OP(reg_x(base.AsXRegister()), offs.Int32Value()));
71   ___ Br(reg_x(scratch.AsXRegister()));
72 }
73 
DWARFReg(CPURegister reg)74 static inline dwarf::Reg DWARFReg(CPURegister reg) {
75   if (reg.IsFPRegister()) {
76     return dwarf::Reg::Arm64Fp(reg.GetCode());
77   } else {
78     DCHECK_LT(reg.GetCode(), 31u);  // X0 - X30.
79     return dwarf::Reg::Arm64Core(reg.GetCode());
80   }
81 }
82 
SpillRegisters(CPURegList registers,int offset)83 void Arm64Assembler::SpillRegisters(CPURegList registers, int offset) {
84   int size = registers.GetRegisterSizeInBytes();
85   const Register sp = vixl_masm_.StackPointer();
86   // Since we are operating on register pairs, we would like to align on
87   // double the standard size; on the other hand, we don't want to insert
88   // an extra store, which will happen if the number of registers is even.
89   if (!IsAlignedParam(offset, 2 * size) && registers.GetCount() % 2 != 0) {
90     const CPURegister& dst0 = registers.PopLowestIndex();
91     ___ Str(dst0, MemOperand(sp, offset));
92     cfi_.RelOffset(DWARFReg(dst0), offset);
93     offset += size;
94   }
95   while (registers.GetCount() >= 2) {
96     const CPURegister& dst0 = registers.PopLowestIndex();
97     const CPURegister& dst1 = registers.PopLowestIndex();
98     ___ Stp(dst0, dst1, MemOperand(sp, offset));
99     cfi_.RelOffset(DWARFReg(dst0), offset);
100     cfi_.RelOffset(DWARFReg(dst1), offset + size);
101     offset += 2 * size;
102   }
103   if (!registers.IsEmpty()) {
104     const CPURegister& dst0 = registers.PopLowestIndex();
105     ___ Str(dst0, MemOperand(sp, offset));
106     cfi_.RelOffset(DWARFReg(dst0), offset);
107   }
108   DCHECK(registers.IsEmpty());
109 }
110 
UnspillRegisters(CPURegList registers,int offset)111 void Arm64Assembler::UnspillRegisters(CPURegList registers, int offset) {
112   int size = registers.GetRegisterSizeInBytes();
113   const Register sp = vixl_masm_.StackPointer();
114   // Be consistent with the logic for spilling registers.
115   if (!IsAlignedParam(offset, 2 * size) && registers.GetCount() % 2 != 0) {
116     const CPURegister& dst0 = registers.PopLowestIndex();
117     ___ Ldr(dst0, MemOperand(sp, offset));
118     cfi_.Restore(DWARFReg(dst0));
119     offset += size;
120   }
121   while (registers.GetCount() >= 2) {
122     const CPURegister& dst0 = registers.PopLowestIndex();
123     const CPURegister& dst1 = registers.PopLowestIndex();
124     ___ Ldp(dst0, dst1, MemOperand(sp, offset));
125     cfi_.Restore(DWARFReg(dst0));
126     cfi_.Restore(DWARFReg(dst1));
127     offset += 2 * size;
128   }
129   if (!registers.IsEmpty()) {
130     const CPURegister& dst0 = registers.PopLowestIndex();
131     ___ Ldr(dst0, MemOperand(sp, offset));
132     cfi_.Restore(DWARFReg(dst0));
133   }
134   DCHECK(registers.IsEmpty());
135 }
136 
PoisonHeapReference(Register reg)137 void Arm64Assembler::PoisonHeapReference(Register reg) {
138   DCHECK(reg.IsW());
139   // reg = -reg.
140   ___ Neg(reg, Operand(reg));
141 }
142 
UnpoisonHeapReference(Register reg)143 void Arm64Assembler::UnpoisonHeapReference(Register reg) {
144   DCHECK(reg.IsW());
145   // reg = -reg.
146   ___ Neg(reg, Operand(reg));
147 }
148 
MaybePoisonHeapReference(Register reg)149 void Arm64Assembler::MaybePoisonHeapReference(Register reg) {
150   if (kPoisonHeapReferences) {
151     PoisonHeapReference(reg);
152   }
153 }
154 
MaybeUnpoisonHeapReference(Register reg)155 void Arm64Assembler::MaybeUnpoisonHeapReference(Register reg) {
156   if (kPoisonHeapReferences) {
157     UnpoisonHeapReference(reg);
158   }
159 }
160 
161 #undef ___
162 
163 }  // namespace arm64
164 }  // namespace art
165