1 /*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "base/logging.h"
18 #include "calling_convention_arm64.h"
19 #include "utils/arm64/managed_register_arm64.h"
20
21 namespace art {
22 namespace arm64 {
23
24 static const Register kCoreArgumentRegisters[] = {
25 X0, X1, X2, X3, X4, X5, X6, X7
26 };
27
28 static const WRegister kWArgumentRegisters[] = {
29 W0, W1, W2, W3, W4, W5, W6, W7
30 };
31
32 static const DRegister kDArgumentRegisters[] = {
33 D0, D1, D2, D3, D4, D5, D6, D7
34 };
35
36 static const SRegister kSArgumentRegisters[] = {
37 S0, S1, S2, S3, S4, S5, S6, S7
38 };
39
40 // Calling convention
InterproceduralScratchRegister()41 ManagedRegister Arm64ManagedRuntimeCallingConvention::InterproceduralScratchRegister() {
42 return Arm64ManagedRegister::FromCoreRegister(X20); // saved on entry restored on exit
43 }
44
InterproceduralScratchRegister()45 ManagedRegister Arm64JniCallingConvention::InterproceduralScratchRegister() {
46 return Arm64ManagedRegister::FromCoreRegister(X20); // saved on entry restored on exit
47 }
48
ReturnRegisterForShorty(const char * shorty)49 static ManagedRegister ReturnRegisterForShorty(const char* shorty) {
50 if (shorty[0] == 'F') {
51 return Arm64ManagedRegister::FromSRegister(S0);
52 } else if (shorty[0] == 'D') {
53 return Arm64ManagedRegister::FromDRegister(D0);
54 } else if (shorty[0] == 'J') {
55 return Arm64ManagedRegister::FromCoreRegister(X0);
56 } else if (shorty[0] == 'V') {
57 return Arm64ManagedRegister::NoRegister();
58 } else {
59 return Arm64ManagedRegister::FromWRegister(W0);
60 }
61 }
62
ReturnRegister()63 ManagedRegister Arm64ManagedRuntimeCallingConvention::ReturnRegister() {
64 return ReturnRegisterForShorty(GetShorty());
65 }
66
ReturnRegister()67 ManagedRegister Arm64JniCallingConvention::ReturnRegister() {
68 return ReturnRegisterForShorty(GetShorty());
69 }
70
IntReturnRegister()71 ManagedRegister Arm64JniCallingConvention::IntReturnRegister() {
72 return Arm64ManagedRegister::FromWRegister(W0);
73 }
74
75 // Managed runtime calling convention
76
MethodRegister()77 ManagedRegister Arm64ManagedRuntimeCallingConvention::MethodRegister() {
78 return Arm64ManagedRegister::FromCoreRegister(X0);
79 }
80
IsCurrentParamInRegister()81 bool Arm64ManagedRuntimeCallingConvention::IsCurrentParamInRegister() {
82 return false; // Everything moved to stack on entry.
83 }
84
IsCurrentParamOnStack()85 bool Arm64ManagedRuntimeCallingConvention::IsCurrentParamOnStack() {
86 return true;
87 }
88
CurrentParamRegister()89 ManagedRegister Arm64ManagedRuntimeCallingConvention::CurrentParamRegister() {
90 LOG(FATAL) << "Should not reach here";
91 return ManagedRegister::NoRegister();
92 }
93
CurrentParamStackOffset()94 FrameOffset Arm64ManagedRuntimeCallingConvention::CurrentParamStackOffset() {
95 CHECK(IsCurrentParamOnStack());
96 FrameOffset result =
97 FrameOffset(displacement_.Int32Value() + // displacement
98 sizeof(StackReference<mirror::ArtMethod>) + // Method ref
99 (itr_slots_ * sizeof(uint32_t))); // offset into in args
100 return result;
101 }
102
EntrySpills()103 const ManagedRegisterEntrySpills& Arm64ManagedRuntimeCallingConvention::EntrySpills() {
104 // We spill the argument registers on ARM64 to free them up for scratch use, we then assume
105 // all arguments are on the stack.
106 if ((entry_spills_.size() == 0) && (NumArgs() > 0)) {
107 int gp_reg_index = 1; // we start from X1/W1, X0 holds ArtMethod*.
108 int fp_reg_index = 0; // D0/S0.
109
110 // We need to choose the correct register (D/S or X/W) since the managed
111 // stack uses 32bit stack slots.
112 ResetIterator(FrameOffset(0));
113 while (HasNext()) {
114 if (IsCurrentParamAFloatOrDouble()) { // FP regs.
115 if (fp_reg_index < 8) {
116 if (!IsCurrentParamADouble()) {
117 entry_spills_.push_back(Arm64ManagedRegister::FromSRegister(kSArgumentRegisters[fp_reg_index]));
118 } else {
119 entry_spills_.push_back(Arm64ManagedRegister::FromDRegister(kDArgumentRegisters[fp_reg_index]));
120 }
121 fp_reg_index++;
122 } else { // just increase the stack offset.
123 if (!IsCurrentParamADouble()) {
124 entry_spills_.push_back(ManagedRegister::NoRegister(), 4);
125 } else {
126 entry_spills_.push_back(ManagedRegister::NoRegister(), 8);
127 }
128 }
129 } else { // GP regs.
130 if (gp_reg_index < 8) {
131 if (IsCurrentParamALong() && (!IsCurrentParamAReference())) {
132 entry_spills_.push_back(Arm64ManagedRegister::FromCoreRegister(kCoreArgumentRegisters[gp_reg_index]));
133 } else {
134 entry_spills_.push_back(Arm64ManagedRegister::FromWRegister(kWArgumentRegisters[gp_reg_index]));
135 }
136 gp_reg_index++;
137 } else { // just increase the stack offset.
138 if (IsCurrentParamALong() && (!IsCurrentParamAReference())) {
139 entry_spills_.push_back(ManagedRegister::NoRegister(), 8);
140 } else {
141 entry_spills_.push_back(ManagedRegister::NoRegister(), 4);
142 }
143 }
144 }
145 Next();
146 }
147 }
148 return entry_spills_;
149 }
150
151 // JNI calling convention
Arm64JniCallingConvention(bool is_static,bool is_synchronized,const char * shorty)152 Arm64JniCallingConvention::Arm64JniCallingConvention(bool is_static, bool is_synchronized,
153 const char* shorty)
154 : JniCallingConvention(is_static, is_synchronized, shorty, kFramePointerSize) {
155 // TODO: Ugly hard code...
156 // Should generate these according to the spill mask automatically.
157 callee_save_regs_.push_back(Arm64ManagedRegister::FromCoreRegister(X20));
158 callee_save_regs_.push_back(Arm64ManagedRegister::FromCoreRegister(X21));
159 callee_save_regs_.push_back(Arm64ManagedRegister::FromCoreRegister(X22));
160 callee_save_regs_.push_back(Arm64ManagedRegister::FromCoreRegister(X23));
161 callee_save_regs_.push_back(Arm64ManagedRegister::FromCoreRegister(X24));
162 callee_save_regs_.push_back(Arm64ManagedRegister::FromCoreRegister(X25));
163 callee_save_regs_.push_back(Arm64ManagedRegister::FromCoreRegister(X26));
164 callee_save_regs_.push_back(Arm64ManagedRegister::FromCoreRegister(X27));
165 callee_save_regs_.push_back(Arm64ManagedRegister::FromCoreRegister(X28));
166 callee_save_regs_.push_back(Arm64ManagedRegister::FromCoreRegister(X29));
167 callee_save_regs_.push_back(Arm64ManagedRegister::FromCoreRegister(X30));
168 }
169
CoreSpillMask() const170 uint32_t Arm64JniCallingConvention::CoreSpillMask() const {
171 // Compute spill mask to agree with callee saves initialized in the constructor
172 // Note: The native jni function may call to some VM runtime functions which may suspend
173 // or trigger GC. And the jni method frame will become top quick frame in those cases.
174 // So we need to satisfy GC to save LR and callee-save registers which is similar to
175 // CalleeSaveMethod(RefOnly) frame.
176 // Jni function is the native function which the java code wants to call.
177 // Jni method is the method that compiled by jni compiler.
178 // Call chain: managed code(java) --> jni method --> jni function.
179 // Thread register(X18, scratched by aapcs64) is not saved on stack, it is saved in ETR(X21).
180 // Suspend register(x19) is preserved by aapcs64 and it is not used in Jni method.
181 return 1 << X20 | 1 << X21 | 1 << X22 | 1 << X23 | 1 << X24 | 1 << X25 |
182 1 << X26 | 1 << X27 | 1 << X28 | 1 << X29 | 1 << LR;
183 }
184
FpSpillMask() const185 uint32_t Arm64JniCallingConvention::FpSpillMask() const {
186 // Compute spill mask to agree with callee saves initialized in the constructor
187 // Note: All callee-save fp registers will be preserved by aapcs64. And they are not used
188 // in the jni method.
189 return 0;
190 }
191
ReturnScratchRegister() const192 ManagedRegister Arm64JniCallingConvention::ReturnScratchRegister() const {
193 return ManagedRegister::NoRegister();
194 }
195
FrameSize()196 size_t Arm64JniCallingConvention::FrameSize() {
197 // Method*, callee save area size, local reference segment state
198 size_t frame_data_size = sizeof(StackReference<mirror::ArtMethod>) +
199 CalleeSaveRegisters().size() * kFramePointerSize + sizeof(uint32_t);
200 // References plus 2 words for HandleScope header
201 size_t handle_scope_size = HandleScope::SizeOf(kFramePointerSize, ReferenceCount());
202 // Plus return value spill area size
203 return RoundUp(frame_data_size + handle_scope_size + SizeOfReturnValue(), kStackAlignment);
204 }
205
OutArgSize()206 size_t Arm64JniCallingConvention::OutArgSize() {
207 return RoundUp(NumberOfOutgoingStackArgs() * kFramePointerSize, kStackAlignment);
208 }
209
IsCurrentParamInRegister()210 bool Arm64JniCallingConvention::IsCurrentParamInRegister() {
211 if (IsCurrentParamAFloatOrDouble()) {
212 return (itr_float_and_doubles_ < 8);
213 } else {
214 return ((itr_args_ - itr_float_and_doubles_) < 8);
215 }
216 }
217
IsCurrentParamOnStack()218 bool Arm64JniCallingConvention::IsCurrentParamOnStack() {
219 return !IsCurrentParamInRegister();
220 }
221
CurrentParamRegister()222 ManagedRegister Arm64JniCallingConvention::CurrentParamRegister() {
223 CHECK(IsCurrentParamInRegister());
224 if (IsCurrentParamAFloatOrDouble()) {
225 CHECK_LT(itr_float_and_doubles_, 8u);
226 if (IsCurrentParamADouble()) {
227 return Arm64ManagedRegister::FromDRegister(kDArgumentRegisters[itr_float_and_doubles_]);
228 } else {
229 return Arm64ManagedRegister::FromSRegister(kSArgumentRegisters[itr_float_and_doubles_]);
230 }
231 } else {
232 int gp_reg = itr_args_ - itr_float_and_doubles_;
233 CHECK_LT(static_cast<unsigned int>(gp_reg), 8u);
234 if (IsCurrentParamALong() || IsCurrentParamAReference() || IsCurrentParamJniEnv()) {
235 return Arm64ManagedRegister::FromCoreRegister(kCoreArgumentRegisters[gp_reg]);
236 } else {
237 return Arm64ManagedRegister::FromWRegister(kWArgumentRegisters[gp_reg]);
238 }
239 }
240 }
241
CurrentParamStackOffset()242 FrameOffset Arm64JniCallingConvention::CurrentParamStackOffset() {
243 CHECK(IsCurrentParamOnStack());
244 size_t args_on_stack = itr_args_
245 - std::min(8u, itr_float_and_doubles_)
246 - std::min(8u, (itr_args_ - itr_float_and_doubles_));
247 size_t offset = displacement_.Int32Value() - OutArgSize() + (args_on_stack * kFramePointerSize);
248 CHECK_LT(offset, OutArgSize());
249 return FrameOffset(offset);
250 }
251
NumberOfOutgoingStackArgs()252 size_t Arm64JniCallingConvention::NumberOfOutgoingStackArgs() {
253 // all arguments including JNI args
254 size_t all_args = NumArgs() + NumberOfExtraArgumentsForJni();
255
256 size_t all_stack_args = all_args -
257 std::min(8u, static_cast<unsigned int>(NumFloatOrDoubleArgs())) -
258 std::min(8u, static_cast<unsigned int>((all_args - NumFloatOrDoubleArgs())));
259
260 return all_stack_args;
261 }
262
263 } // namespace arm64
264 } // namespace art
265