1 /*
2  * Copyright (C) 2014 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "calling_convention_x86_64.h"
18 
19 #include <android-base/logging.h>
20 
21 #include "arch/instruction_set.h"
22 #include "arch/x86_64/jni_frame_x86_64.h"
23 #include "base/bit_utils.h"
24 #include "utils/x86_64/managed_register_x86_64.h"
25 
26 namespace art {
27 namespace x86_64 {
28 
29 static constexpr Register kCoreArgumentRegisters[] = {
30     RDI, RSI, RDX, RCX, R8, R9
31 };
32 static_assert(kMaxIntLikeRegisterArguments == arraysize(kCoreArgumentRegisters));
33 
34 static constexpr ManagedRegister kCalleeSaveRegisters[] = {
35     // Core registers.
36     X86_64ManagedRegister::FromCpuRegister(RBX),
37     X86_64ManagedRegister::FromCpuRegister(RBP),
38     X86_64ManagedRegister::FromCpuRegister(R12),
39     X86_64ManagedRegister::FromCpuRegister(R13),
40     X86_64ManagedRegister::FromCpuRegister(R14),
41     X86_64ManagedRegister::FromCpuRegister(R15),
42     // Hard float registers.
43     X86_64ManagedRegister::FromXmmRegister(XMM12),
44     X86_64ManagedRegister::FromXmmRegister(XMM13),
45     X86_64ManagedRegister::FromXmmRegister(XMM14),
46     X86_64ManagedRegister::FromXmmRegister(XMM15),
47 };
48 
49 template <size_t size>
CalculateCoreCalleeSpillMask(const ManagedRegister (& callee_saves)[size])50 static constexpr uint32_t CalculateCoreCalleeSpillMask(
51     const ManagedRegister (&callee_saves)[size]) {
52   // The spilled PC gets a special marker.
53   uint32_t result = 1u << kNumberOfCpuRegisters;
54   for (auto&& r : callee_saves) {
55     if (r.AsX86_64().IsCpuRegister()) {
56       result |= (1u << r.AsX86_64().AsCpuRegister().AsRegister());
57     }
58   }
59   return result;
60 }
61 
62 template <size_t size>
CalculateFpCalleeSpillMask(const ManagedRegister (& callee_saves)[size])63 static constexpr uint32_t CalculateFpCalleeSpillMask(const ManagedRegister (&callee_saves)[size]) {
64   uint32_t result = 0u;
65   for (auto&& r : callee_saves) {
66     if (r.AsX86_64().IsXmmRegister()) {
67       result |= (1u << r.AsX86_64().AsXmmRegister().AsFloatRegister());
68     }
69   }
70   return result;
71 }
72 
73 static constexpr uint32_t kCoreCalleeSpillMask = CalculateCoreCalleeSpillMask(kCalleeSaveRegisters);
74 static constexpr uint32_t kFpCalleeSpillMask = CalculateFpCalleeSpillMask(kCalleeSaveRegisters);
75 
76 static constexpr ManagedRegister kNativeCalleeSaveRegisters[] = {
77     // Core registers.
78     X86_64ManagedRegister::FromCpuRegister(RBX),
79     X86_64ManagedRegister::FromCpuRegister(RBP),
80     X86_64ManagedRegister::FromCpuRegister(R12),
81     X86_64ManagedRegister::FromCpuRegister(R13),
82     X86_64ManagedRegister::FromCpuRegister(R14),
83     X86_64ManagedRegister::FromCpuRegister(R15),
84     // No callee-save float registers.
85 };
86 
87 static constexpr uint32_t kNativeCoreCalleeSpillMask =
88     CalculateCoreCalleeSpillMask(kNativeCalleeSaveRegisters);
89 static constexpr uint32_t kNativeFpCalleeSpillMask =
90     CalculateFpCalleeSpillMask(kNativeCalleeSaveRegisters);
91 
92 // Calling convention
93 
SavedLocalReferenceCookieRegister() const94 ManagedRegister X86_64JniCallingConvention::SavedLocalReferenceCookieRegister() const {
95   // The RBX is callee-save register in both managed and native ABIs.
96   // It is saved in the stack frame and it has no special purpose like `tr` on arm/arm64.
97   static_assert((kCoreCalleeSpillMask & (1u << RBX)) != 0u);  // Managed callee save register.
98   return X86_64ManagedRegister::FromCpuRegister(RBX);
99 }
100 
ReturnScratchRegister() const101 ManagedRegister X86_64JniCallingConvention::ReturnScratchRegister() const {
102   return ManagedRegister::NoRegister();  // No free regs, so assembler uses push/pop
103 }
104 
ReturnRegisterForShorty(const char * shorty,bool jni ATTRIBUTE_UNUSED)105 static ManagedRegister ReturnRegisterForShorty(const char* shorty, bool jni ATTRIBUTE_UNUSED) {
106   if (shorty[0] == 'F' || shorty[0] == 'D') {
107     return X86_64ManagedRegister::FromXmmRegister(XMM0);
108   } else if (shorty[0] == 'J') {
109     return X86_64ManagedRegister::FromCpuRegister(RAX);
110   } else if (shorty[0] == 'V') {
111     return ManagedRegister::NoRegister();
112   } else {
113     return X86_64ManagedRegister::FromCpuRegister(RAX);
114   }
115 }
116 
ReturnRegister()117 ManagedRegister X86_64ManagedRuntimeCallingConvention::ReturnRegister() {
118   return ReturnRegisterForShorty(GetShorty(), false);
119 }
120 
ReturnRegister()121 ManagedRegister X86_64JniCallingConvention::ReturnRegister() {
122   return ReturnRegisterForShorty(GetShorty(), true);
123 }
124 
IntReturnRegister()125 ManagedRegister X86_64JniCallingConvention::IntReturnRegister() {
126   return X86_64ManagedRegister::FromCpuRegister(RAX);
127 }
128 
129 // Managed runtime calling convention
130 
MethodRegister()131 ManagedRegister X86_64ManagedRuntimeCallingConvention::MethodRegister() {
132   return X86_64ManagedRegister::FromCpuRegister(RDI);
133 }
134 
IsCurrentParamInRegister()135 bool X86_64ManagedRuntimeCallingConvention::IsCurrentParamInRegister() {
136   if (IsCurrentParamAFloatOrDouble()) {
137     return itr_float_and_doubles_ < kMaxFloatOrDoubleRegisterArguments;
138   } else {
139     size_t non_fp_arg_number = itr_args_ - itr_float_and_doubles_;
140     return /* method */ 1u + non_fp_arg_number < kMaxIntLikeRegisterArguments;
141   }
142 }
143 
IsCurrentParamOnStack()144 bool X86_64ManagedRuntimeCallingConvention::IsCurrentParamOnStack() {
145   return !IsCurrentParamInRegister();
146 }
147 
CurrentParamRegister()148 ManagedRegister X86_64ManagedRuntimeCallingConvention::CurrentParamRegister() {
149   DCHECK(IsCurrentParamInRegister());
150   if (IsCurrentParamAFloatOrDouble()) {
151     // First eight float parameters are passed via XMM0..XMM7
152     FloatRegister fp_reg = static_cast<FloatRegister>(XMM0 + itr_float_and_doubles_);
153     return X86_64ManagedRegister::FromXmmRegister(fp_reg);
154   } else {
155     size_t non_fp_arg_number = itr_args_ - itr_float_and_doubles_;
156     Register core_reg = kCoreArgumentRegisters[/* method */ 1u + non_fp_arg_number];
157     return X86_64ManagedRegister::FromCpuRegister(core_reg);
158   }
159 }
160 
CurrentParamStackOffset()161 FrameOffset X86_64ManagedRuntimeCallingConvention::CurrentParamStackOffset() {
162   return FrameOffset(displacement_.Int32Value() +  // displacement
163                      static_cast<size_t>(kX86_64PointerSize) +  // Method ref
164                      itr_slots_ * sizeof(uint32_t));  // offset into in args
165 }
166 
167 // JNI calling convention
168 
X86_64JniCallingConvention(bool is_static,bool is_synchronized,bool is_critical_native,const char * shorty)169 X86_64JniCallingConvention::X86_64JniCallingConvention(bool is_static,
170                                                        bool is_synchronized,
171                                                        bool is_critical_native,
172                                                        const char* shorty)
173     : JniCallingConvention(is_static,
174                            is_synchronized,
175                            is_critical_native,
176                            shorty,
177                            kX86_64PointerSize) {
178 }
179 
CoreSpillMask() const180 uint32_t X86_64JniCallingConvention::CoreSpillMask() const {
181   return is_critical_native_ ? 0u : kCoreCalleeSpillMask;
182 }
183 
FpSpillMask() const184 uint32_t X86_64JniCallingConvention::FpSpillMask() const {
185   return is_critical_native_ ? 0u : kFpCalleeSpillMask;
186 }
187 
FrameSize() const188 size_t X86_64JniCallingConvention::FrameSize() const {
189   if (is_critical_native_) {
190     CHECK(!SpillsMethod());
191     CHECK(!HasLocalReferenceSegmentState());
192     CHECK(!SpillsReturnValue());
193     return 0u;  // There is no managed frame for @CriticalNative.
194   }
195 
196   // Method*, PC return address and callee save area size, local reference segment state
197   DCHECK(SpillsMethod());
198   const size_t method_ptr_size = static_cast<size_t>(kX86_64PointerSize);
199   const size_t pc_return_addr_size = kFramePointerSize;
200   const size_t callee_save_area_size = CalleeSaveRegisters().size() * kFramePointerSize;
201   size_t total_size = method_ptr_size + pc_return_addr_size + callee_save_area_size;
202 
203   DCHECK(HasLocalReferenceSegmentState());
204   // Cookie is saved in one of the spilled registers.
205 
206   // Plus return value spill area size
207   if (SpillsReturnValue()) {
208     // No padding between the method pointer and the return value on arm64.
209     DCHECK_EQ(ReturnValueSaveLocation().SizeValue(), method_ptr_size);
210     total_size += SizeOfReturnValue();
211   }
212 
213   return RoundUp(total_size, kStackAlignment);
214 }
215 
OutFrameSize() const216 size_t X86_64JniCallingConvention::OutFrameSize() const {
217   // Count param args, including JNIEnv* and jclass*.
218   size_t all_args = NumberOfExtraArgumentsForJni() + NumArgs();
219   size_t num_fp_args = NumFloatOrDoubleArgs();
220   DCHECK_GE(all_args, num_fp_args);
221   size_t num_non_fp_args = all_args - num_fp_args;
222   // The size of outgoing arguments.
223   size_t size = GetNativeOutArgsSize(num_fp_args, num_non_fp_args);
224 
225   if (UNLIKELY(IsCriticalNative())) {
226     // We always need to spill xmm12-xmm15 as they are managed callee-saves
227     // but not native callee-saves.
228     static_assert((kCoreCalleeSpillMask & ~kNativeCoreCalleeSpillMask) == 0u);
229     static_assert((kFpCalleeSpillMask & ~kNativeFpCalleeSpillMask) != 0u);
230     static_assert(
231         kAlwaysSpilledMmxRegisters == POPCOUNT(kFpCalleeSpillMask & ~kNativeFpCalleeSpillMask));
232     size += kAlwaysSpilledMmxRegisters * kMmxSpillSize;
233     // Add return address size for @CriticalNative
234     // For normal native the return PC is part of the managed stack frame instead of out args.
235     size += kFramePointerSize;
236   }
237 
238   size_t out_args_size = RoundUp(size, kNativeStackAlignment);
239   if (UNLIKELY(IsCriticalNative())) {
240     DCHECK_EQ(out_args_size, GetCriticalNativeStubFrameSize(GetShorty(), NumArgs() + 1u));
241   }
242   return out_args_size;
243 }
244 
CalleeSaveRegisters() const245 ArrayRef<const ManagedRegister> X86_64JniCallingConvention::CalleeSaveRegisters() const {
246   if (UNLIKELY(IsCriticalNative())) {
247     DCHECK(!UseTailCall());
248     static_assert(std::size(kCalleeSaveRegisters) > std::size(kNativeCalleeSaveRegisters));
249     // TODO: Change to static_assert; std::equal should be constexpr since C++20.
250     DCHECK(std::equal(kCalleeSaveRegisters,
251                       kCalleeSaveRegisters + std::size(kNativeCalleeSaveRegisters),
252                       kNativeCalleeSaveRegisters,
253                       [](ManagedRegister lhs, ManagedRegister rhs) { return lhs.Equals(rhs); }));
254     return ArrayRef<const ManagedRegister>(kCalleeSaveRegisters).SubArray(
255         /*pos=*/ std::size(kNativeCalleeSaveRegisters));
256   } else {
257     return ArrayRef<const ManagedRegister>(kCalleeSaveRegisters);
258   }
259 }
260 
IsCurrentParamInRegister()261 bool X86_64JniCallingConvention::IsCurrentParamInRegister() {
262   return !IsCurrentParamOnStack();
263 }
264 
IsCurrentParamOnStack()265 bool X86_64JniCallingConvention::IsCurrentParamOnStack() {
266   return CurrentParamRegister().IsNoRegister();
267 }
268 
CurrentParamRegister()269 ManagedRegister X86_64JniCallingConvention::CurrentParamRegister() {
270   ManagedRegister res = ManagedRegister::NoRegister();
271   if (!IsCurrentParamAFloatOrDouble()) {
272     switch (itr_args_ - itr_float_and_doubles_) {
273     case 0: res = X86_64ManagedRegister::FromCpuRegister(RDI); break;
274     case 1: res = X86_64ManagedRegister::FromCpuRegister(RSI); break;
275     case 2: res = X86_64ManagedRegister::FromCpuRegister(RDX); break;
276     case 3: res = X86_64ManagedRegister::FromCpuRegister(RCX); break;
277     case 4: res = X86_64ManagedRegister::FromCpuRegister(R8); break;
278     case 5: res = X86_64ManagedRegister::FromCpuRegister(R9); break;
279     static_assert(5u == kMaxIntLikeRegisterArguments - 1, "Missing case statement(s)");
280     }
281   } else if (itr_float_and_doubles_ < kMaxFloatOrDoubleRegisterArguments) {
282     // First eight float parameters are passed via XMM0..XMM7
283     res = X86_64ManagedRegister::FromXmmRegister(
284                                  static_cast<FloatRegister>(XMM0 + itr_float_and_doubles_));
285   }
286   return res;
287 }
288 
CurrentParamStackOffset()289 FrameOffset X86_64JniCallingConvention::CurrentParamStackOffset() {
290   CHECK(IsCurrentParamOnStack());
291   size_t args_on_stack = itr_args_
292       - std::min(kMaxFloatOrDoubleRegisterArguments,
293                  static_cast<size_t>(itr_float_and_doubles_))
294           // Float arguments passed through Xmm0..Xmm7
295       - std::min(kMaxIntLikeRegisterArguments,
296                  static_cast<size_t>(itr_args_ - itr_float_and_doubles_));
297           // Integer arguments passed through GPR
298   size_t offset = displacement_.Int32Value() - OutFrameSize() + (args_on_stack * kFramePointerSize);
299   CHECK_LT(offset, OutFrameSize());
300   return FrameOffset(offset);
301 }
302 
HiddenArgumentRegister() const303 ManagedRegister X86_64JniCallingConvention::HiddenArgumentRegister() const {
304   CHECK(IsCriticalNative());
305   // RAX is neither managed callee-save, nor argument register, nor scratch register.
306   DCHECK(std::none_of(kCalleeSaveRegisters,
307                       kCalleeSaveRegisters + std::size(kCalleeSaveRegisters),
308                       [](ManagedRegister callee_save) constexpr {
309                         return callee_save.Equals(X86_64ManagedRegister::FromCpuRegister(RAX));
310                       }));
311   return X86_64ManagedRegister::FromCpuRegister(RAX);
312 }
313 
314 // Whether to use tail call (used only for @CriticalNative).
UseTailCall() const315 bool X86_64JniCallingConvention::UseTailCall() const {
316   CHECK(IsCriticalNative());
317   // We always need to spill xmm12-xmm15 as they are managed callee-saves
318   // but not native callee-saves, so we can never use a tail call.
319   return false;
320 }
321 
322 }  // namespace x86_64
323 }  // namespace art
324