1 /*
2  * Copyright (C) 2014 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "calling_convention_arm64.h"
18 
19 #include <android-base/logging.h>
20 
21 #include "arch/arm64/jni_frame_arm64.h"
22 #include "arch/instruction_set.h"
23 #include "utils/arm64/managed_register_arm64.h"
24 
25 namespace art {
26 namespace arm64 {
27 
28 static const XRegister kXArgumentRegisters[] = {
29   X0, X1, X2, X3, X4, X5, X6, X7
30 };
31 static_assert(kMaxIntLikeRegisterArguments == arraysize(kXArgumentRegisters));
32 
33 static const WRegister kWArgumentRegisters[] = {
34   W0, W1, W2, W3, W4, W5, W6, W7
35 };
36 static_assert(kMaxIntLikeRegisterArguments == arraysize(kWArgumentRegisters));
37 
38 static const DRegister kDArgumentRegisters[] = {
39   D0, D1, D2, D3, D4, D5, D6, D7
40 };
41 static_assert(kMaxFloatOrDoubleRegisterArguments == arraysize(kDArgumentRegisters));
42 
43 static const SRegister kSArgumentRegisters[] = {
44   S0, S1, S2, S3, S4, S5, S6, S7
45 };
46 static_assert(kMaxFloatOrDoubleRegisterArguments == arraysize(kSArgumentRegisters));
47 
48 static constexpr ManagedRegister kCalleeSaveRegisters[] = {
49     // Core registers.
50     // Note: The native jni function may call to some VM runtime functions which may suspend
51     // or trigger GC. And the jni method frame will become top quick frame in those cases.
52     // So we need to satisfy GC to save LR and callee-save registers which is similar to
53     // CalleeSaveMethod(RefOnly) frame.
54     // Jni function is the native function which the java code wants to call.
55     // Jni method is the method that is compiled by jni compiler.
56     // Call chain: managed code(java) --> jni method --> jni function.
57     // This does not apply to the @CriticalNative.
58 
59     // Thread register(X19) is saved on stack.
60     Arm64ManagedRegister::FromXRegister(X19),
61     Arm64ManagedRegister::FromXRegister(X20),
62     Arm64ManagedRegister::FromXRegister(X21),
63     Arm64ManagedRegister::FromXRegister(X22),
64     Arm64ManagedRegister::FromXRegister(X23),
65     Arm64ManagedRegister::FromXRegister(X24),
66     Arm64ManagedRegister::FromXRegister(X25),
67     Arm64ManagedRegister::FromXRegister(X26),
68     Arm64ManagedRegister::FromXRegister(X27),
69     Arm64ManagedRegister::FromXRegister(X28),
70     Arm64ManagedRegister::FromXRegister(X29),
71     Arm64ManagedRegister::FromXRegister(LR),
72     // Hard float registers.
73     // Considering the case, java_method_1 --> jni method --> jni function --> java_method_2,
74     // we may break on java_method_2 and we still need to find out the values of DEX registers
75     // in java_method_1. So all callee-saves(in managed code) need to be saved.
76     Arm64ManagedRegister::FromDRegister(D8),
77     Arm64ManagedRegister::FromDRegister(D9),
78     Arm64ManagedRegister::FromDRegister(D10),
79     Arm64ManagedRegister::FromDRegister(D11),
80     Arm64ManagedRegister::FromDRegister(D12),
81     Arm64ManagedRegister::FromDRegister(D13),
82     Arm64ManagedRegister::FromDRegister(D14),
83     Arm64ManagedRegister::FromDRegister(D15),
84 };
85 
86 template <size_t size>
CalculateCoreCalleeSpillMask(const ManagedRegister (& callee_saves)[size])87 static constexpr uint32_t CalculateCoreCalleeSpillMask(
88     const ManagedRegister (&callee_saves)[size]) {
89   uint32_t result = 0u;
90   for (auto&& r : callee_saves) {
91     if (r.AsArm64().IsXRegister()) {
92       result |= (1u << r.AsArm64().AsXRegister());
93     }
94   }
95   return result;
96 }
97 
98 template <size_t size>
CalculateFpCalleeSpillMask(const ManagedRegister (& callee_saves)[size])99 static constexpr uint32_t CalculateFpCalleeSpillMask(const ManagedRegister (&callee_saves)[size]) {
100   uint32_t result = 0u;
101   for (auto&& r : callee_saves) {
102     if (r.AsArm64().IsDRegister()) {
103       result |= (1u << r.AsArm64().AsDRegister());
104     }
105   }
106   return result;
107 }
108 
109 static constexpr uint32_t kCoreCalleeSpillMask = CalculateCoreCalleeSpillMask(kCalleeSaveRegisters);
110 static constexpr uint32_t kFpCalleeSpillMask = CalculateFpCalleeSpillMask(kCalleeSaveRegisters);
111 
112 static constexpr ManagedRegister kAapcs64CalleeSaveRegisters[] = {
113     // Core registers.
114     Arm64ManagedRegister::FromXRegister(X19),
115     Arm64ManagedRegister::FromXRegister(X20),
116     Arm64ManagedRegister::FromXRegister(X21),
117     Arm64ManagedRegister::FromXRegister(X22),
118     Arm64ManagedRegister::FromXRegister(X23),
119     Arm64ManagedRegister::FromXRegister(X24),
120     Arm64ManagedRegister::FromXRegister(X25),
121     Arm64ManagedRegister::FromXRegister(X26),
122     Arm64ManagedRegister::FromXRegister(X27),
123     Arm64ManagedRegister::FromXRegister(X28),
124     Arm64ManagedRegister::FromXRegister(X29),
125     Arm64ManagedRegister::FromXRegister(LR),
126     // Hard float registers.
127     Arm64ManagedRegister::FromDRegister(D8),
128     Arm64ManagedRegister::FromDRegister(D9),
129     Arm64ManagedRegister::FromDRegister(D10),
130     Arm64ManagedRegister::FromDRegister(D11),
131     Arm64ManagedRegister::FromDRegister(D12),
132     Arm64ManagedRegister::FromDRegister(D13),
133     Arm64ManagedRegister::FromDRegister(D14),
134     Arm64ManagedRegister::FromDRegister(D15),
135 };
136 
137 static constexpr uint32_t kAapcs64CoreCalleeSpillMask =
138     CalculateCoreCalleeSpillMask(kAapcs64CalleeSaveRegisters);
139 static constexpr uint32_t kAapcs64FpCalleeSpillMask =
140     CalculateFpCalleeSpillMask(kAapcs64CalleeSaveRegisters);
141 
142 // Calling convention
ReturnRegisterForShorty(const char * shorty)143 static ManagedRegister ReturnRegisterForShorty(const char* shorty) {
144   if (shorty[0] == 'F') {
145     return Arm64ManagedRegister::FromSRegister(S0);
146   } else if (shorty[0] == 'D') {
147     return Arm64ManagedRegister::FromDRegister(D0);
148   } else if (shorty[0] == 'J') {
149     return Arm64ManagedRegister::FromXRegister(X0);
150   } else if (shorty[0] == 'V') {
151     return Arm64ManagedRegister::NoRegister();
152   } else {
153     return Arm64ManagedRegister::FromWRegister(W0);
154   }
155 }
156 
ReturnRegister()157 ManagedRegister Arm64ManagedRuntimeCallingConvention::ReturnRegister() {
158   return ReturnRegisterForShorty(GetShorty());
159 }
160 
ReturnRegister()161 ManagedRegister Arm64JniCallingConvention::ReturnRegister() {
162   return ReturnRegisterForShorty(GetShorty());
163 }
164 
IntReturnRegister()165 ManagedRegister Arm64JniCallingConvention::IntReturnRegister() {
166   return Arm64ManagedRegister::FromWRegister(W0);
167 }
168 
169 // Managed runtime calling convention
170 
MethodRegister()171 ManagedRegister Arm64ManagedRuntimeCallingConvention::MethodRegister() {
172   return Arm64ManagedRegister::FromXRegister(X0);
173 }
174 
IsCurrentParamInRegister()175 bool Arm64ManagedRuntimeCallingConvention::IsCurrentParamInRegister() {
176   if (IsCurrentParamAFloatOrDouble()) {
177     return itr_float_and_doubles_ < kMaxFloatOrDoubleRegisterArguments;
178   } else {
179     size_t non_fp_arg_number = itr_args_ - itr_float_and_doubles_;
180     return /* method */ 1u + non_fp_arg_number < kMaxIntLikeRegisterArguments;
181   }
182 }
183 
IsCurrentParamOnStack()184 bool Arm64ManagedRuntimeCallingConvention::IsCurrentParamOnStack() {
185   return !IsCurrentParamInRegister();
186 }
187 
CurrentParamRegister()188 ManagedRegister Arm64ManagedRuntimeCallingConvention::CurrentParamRegister() {
189   DCHECK(IsCurrentParamInRegister());
190   if (IsCurrentParamAFloatOrDouble()) {
191     if (IsCurrentParamADouble()) {
192       return Arm64ManagedRegister::FromDRegister(kDArgumentRegisters[itr_float_and_doubles_]);
193     } else {
194       return Arm64ManagedRegister::FromSRegister(kSArgumentRegisters[itr_float_and_doubles_]);
195     }
196   } else {
197     size_t non_fp_arg_number = itr_args_ - itr_float_and_doubles_;
198     if (IsCurrentParamALong()) {
199       XRegister x_reg = kXArgumentRegisters[/* method */ 1u + non_fp_arg_number];
200       return Arm64ManagedRegister::FromXRegister(x_reg);
201     } else {
202       WRegister w_reg = kWArgumentRegisters[/* method */ 1u + non_fp_arg_number];
203       return Arm64ManagedRegister::FromWRegister(w_reg);
204     }
205   }
206 }
207 
CurrentParamStackOffset()208 FrameOffset Arm64ManagedRuntimeCallingConvention::CurrentParamStackOffset() {
209   return FrameOffset(displacement_.Int32Value() +  // displacement
210                      kFramePointerSize +  // Method ref
211                      (itr_slots_ * sizeof(uint32_t)));  // offset into in args
212 }
213 
214 // JNI calling convention
215 
Arm64JniCallingConvention(bool is_static,bool is_synchronized,bool is_critical_native,const char * shorty)216 Arm64JniCallingConvention::Arm64JniCallingConvention(bool is_static,
217                                                      bool is_synchronized,
218                                                      bool is_critical_native,
219                                                      const char* shorty)
220     : JniCallingConvention(is_static,
221                            is_synchronized,
222                            is_critical_native,
223                            shorty,
224                            kArm64PointerSize) {
225 }
226 
CoreSpillMask() const227 uint32_t Arm64JniCallingConvention::CoreSpillMask() const {
228   return is_critical_native_ ? 0u : kCoreCalleeSpillMask;
229 }
230 
FpSpillMask() const231 uint32_t Arm64JniCallingConvention::FpSpillMask() const {
232   return is_critical_native_ ? 0u : kFpCalleeSpillMask;
233 }
234 
SavedLocalReferenceCookieRegister() const235 ManagedRegister Arm64JniCallingConvention::SavedLocalReferenceCookieRegister() const {
236   // The w21 is callee-save register in both managed and native ABIs.
237   // It is saved in the stack frame and it has no special purpose like `tr`.
238   static_assert((kCoreCalleeSpillMask & (1u << W21)) != 0u);  // Managed callee save register.
239   return Arm64ManagedRegister::FromWRegister(W21);
240 }
241 
ReturnScratchRegister() const242 ManagedRegister Arm64JniCallingConvention::ReturnScratchRegister() const {
243   return ManagedRegister::NoRegister();
244 }
245 
FrameSize() const246 size_t Arm64JniCallingConvention::FrameSize() const {
247   if (is_critical_native_) {
248     CHECK(!SpillsMethod());
249     CHECK(!HasLocalReferenceSegmentState());
250     CHECK(!SpillsReturnValue());
251     return 0u;  // There is no managed frame for @CriticalNative.
252   }
253 
254   // Method*, callee save area size, local reference segment state
255   DCHECK(SpillsMethod());
256   size_t method_ptr_size = static_cast<size_t>(kFramePointerSize);
257   size_t callee_save_area_size = CalleeSaveRegisters().size() * kFramePointerSize;
258   size_t total_size = method_ptr_size + callee_save_area_size;
259 
260   DCHECK(HasLocalReferenceSegmentState());
261   // Cookie is saved in one of the spilled registers.
262 
263   // Plus return value spill area size
264   if (SpillsReturnValue()) {
265     // No padding between the method pointer and the return value on arm64.
266     DCHECK_EQ(ReturnValueSaveLocation().SizeValue(), method_ptr_size);
267     total_size += SizeOfReturnValue();
268   }
269 
270   return RoundUp(total_size, kStackAlignment);
271 }
272 
OutFrameSize() const273 size_t Arm64JniCallingConvention::OutFrameSize() const {
274   // Count param args, including JNIEnv* and jclass*.
275   size_t all_args = NumberOfExtraArgumentsForJni() + NumArgs();
276   size_t num_fp_args = NumFloatOrDoubleArgs();
277   DCHECK_GE(all_args, num_fp_args);
278   size_t num_non_fp_args = all_args - num_fp_args;
279   // The size of outgoing arguments.
280   size_t size = GetNativeOutArgsSize(num_fp_args, num_non_fp_args);
281 
282   // @CriticalNative can use tail call as all managed callee saves are preserved by AAPCS64.
283   static_assert((kCoreCalleeSpillMask & ~kAapcs64CoreCalleeSpillMask) == 0u);
284   static_assert((kFpCalleeSpillMask & ~kAapcs64FpCalleeSpillMask) == 0u);
285 
286   // For @CriticalNative, we can make a tail call if there are no stack args and
287   // we do not need to extend the result. Otherwise, add space for return PC.
288   if (is_critical_native_ && (size != 0u || RequiresSmallResultTypeExtension())) {
289     size += kFramePointerSize;  // We need to spill LR with the args.
290   }
291   size_t out_args_size = RoundUp(size, kAapcs64StackAlignment);
292   if (UNLIKELY(IsCriticalNative())) {
293     DCHECK_EQ(out_args_size, GetCriticalNativeStubFrameSize(GetShorty(), NumArgs() + 1u));
294   }
295   return out_args_size;
296 }
297 
CalleeSaveRegisters() const298 ArrayRef<const ManagedRegister> Arm64JniCallingConvention::CalleeSaveRegisters() const {
299   if (UNLIKELY(IsCriticalNative())) {
300     if (UseTailCall()) {
301       return ArrayRef<const ManagedRegister>();  // Do not spill anything.
302     } else {
303       // Spill LR with out args.
304       static_assert((kCoreCalleeSpillMask >> LR) == 1u);  // Contains LR as the highest bit.
305       constexpr size_t lr_index = POPCOUNT(kCoreCalleeSpillMask) - 1u;
306       static_assert(kCalleeSaveRegisters[lr_index].Equals(
307                         Arm64ManagedRegister::FromXRegister(LR)));
308       return ArrayRef<const ManagedRegister>(kCalleeSaveRegisters).SubArray(
309           /*pos*/ lr_index, /*length=*/ 1u);
310     }
311   } else {
312     return ArrayRef<const ManagedRegister>(kCalleeSaveRegisters);
313   }
314 }
315 
IsCurrentParamInRegister()316 bool Arm64JniCallingConvention::IsCurrentParamInRegister() {
317   if (IsCurrentParamAFloatOrDouble()) {
318     return (itr_float_and_doubles_ < kMaxFloatOrDoubleRegisterArguments);
319   } else {
320     return ((itr_args_ - itr_float_and_doubles_) < kMaxIntLikeRegisterArguments);
321   }
322   // TODO: Can we just call CurrentParamRegister to figure this out?
323 }
324 
IsCurrentParamOnStack()325 bool Arm64JniCallingConvention::IsCurrentParamOnStack() {
326   // Is this ever not the same for all the architectures?
327   return !IsCurrentParamInRegister();
328 }
329 
CurrentParamRegister()330 ManagedRegister Arm64JniCallingConvention::CurrentParamRegister() {
331   CHECK(IsCurrentParamInRegister());
332   if (IsCurrentParamAFloatOrDouble()) {
333     CHECK_LT(itr_float_and_doubles_, kMaxFloatOrDoubleRegisterArguments);
334     if (IsCurrentParamADouble()) {
335       return Arm64ManagedRegister::FromDRegister(kDArgumentRegisters[itr_float_and_doubles_]);
336     } else {
337       return Arm64ManagedRegister::FromSRegister(kSArgumentRegisters[itr_float_and_doubles_]);
338     }
339   } else {
340     int gp_reg = itr_args_ - itr_float_and_doubles_;
341     CHECK_LT(static_cast<unsigned int>(gp_reg), kMaxIntLikeRegisterArguments);
342     if (IsCurrentParamALong() || IsCurrentParamAReference() || IsCurrentParamJniEnv())  {
343       return Arm64ManagedRegister::FromXRegister(kXArgumentRegisters[gp_reg]);
344     } else {
345       return Arm64ManagedRegister::FromWRegister(kWArgumentRegisters[gp_reg]);
346     }
347   }
348 }
349 
CurrentParamStackOffset()350 FrameOffset Arm64JniCallingConvention::CurrentParamStackOffset() {
351   CHECK(IsCurrentParamOnStack());
352   size_t args_on_stack = itr_args_
353                   - std::min(kMaxFloatOrDoubleRegisterArguments,
354                              static_cast<size_t>(itr_float_and_doubles_))
355                   - std::min(kMaxIntLikeRegisterArguments,
356                              static_cast<size_t>(itr_args_ - itr_float_and_doubles_));
357   size_t offset = displacement_.Int32Value() - OutFrameSize() + (args_on_stack * kFramePointerSize);
358   CHECK_LT(offset, OutFrameSize());
359   return FrameOffset(offset);
360 }
361 
HiddenArgumentRegister() const362 ManagedRegister Arm64JniCallingConvention::HiddenArgumentRegister() const {
363   CHECK(IsCriticalNative());
364   // X15 is neither managed callee-save, nor argument register, nor scratch register.
365   // TODO: Change to static_assert; std::none_of should be constexpr since C++20.
366   DCHECK(std::none_of(kCalleeSaveRegisters,
367                       kCalleeSaveRegisters + std::size(kCalleeSaveRegisters),
368                       [](ManagedRegister callee_save) constexpr {
369                         return callee_save.Equals(Arm64ManagedRegister::FromXRegister(X15));
370                       }));
371   DCHECK(std::none_of(kXArgumentRegisters,
372                       kXArgumentRegisters + std::size(kXArgumentRegisters),
373                       [](XRegister reg) { return reg == X15; }));
374   return Arm64ManagedRegister::FromXRegister(X15);
375 }
376 
377 // Whether to use tail call (used only for @CriticalNative).
UseTailCall() const378 bool Arm64JniCallingConvention::UseTailCall() const {
379   CHECK(IsCriticalNative());
380   return OutFrameSize() == 0u;
381 }
382 
383 }  // namespace arm64
384 }  // namespace art
385