1 /*
2  * Copyright (C) 2011 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ART_COMPILER_JNI_QUICK_CALLING_CONVENTION_H_
18 #define ART_COMPILER_JNI_QUICK_CALLING_CONVENTION_H_
19 
20 #include "base/arena_object.h"
21 #include "base/array_ref.h"
22 #include "base/enums.h"
23 #include "dex/primitive.h"
24 #include "handle_scope.h"
25 #include "thread.h"
26 #include "utils/managed_register.h"
27 
28 namespace art {
29 
30 enum class InstructionSet;
31 
32 // Top-level abstraction for different calling conventions.
33 class CallingConvention : public DeletableArenaObject<kArenaAllocCallingConvention> {
34  public:
IsReturnAReference()35   bool IsReturnAReference() const { return shorty_[0] == 'L'; }
36 
GetReturnType()37   Primitive::Type GetReturnType() const {
38     return Primitive::GetType(shorty_[0]);
39   }
40 
SizeOfReturnValue()41   size_t SizeOfReturnValue() const {
42     size_t result = Primitive::ComponentSize(Primitive::GetType(shorty_[0]));
43     if (result >= 1 && result < 4) {
44       result = 4;
45     }
46     return result;
47   }
48 
49   // Register that holds result of this method invocation.
50   virtual ManagedRegister ReturnRegister() = 0;
51   // Register reserved for scratch usage during procedure calls.
52   virtual ManagedRegister InterproceduralScratchRegister() const = 0;
53 
54   // Iterator interface
55 
56   // Place iterator at start of arguments. The displacement is applied to
57   // frame offset methods to account for frames which may be on the stack
58   // below the one being iterated over.
ResetIterator(FrameOffset displacement)59   void ResetIterator(FrameOffset displacement) {
60     displacement_ = displacement;
61     itr_slots_ = 0;
62     itr_args_ = 0;
63     itr_refs_ = 0;
64     itr_longs_and_doubles_ = 0;
65     itr_float_and_doubles_ = 0;
66   }
67 
GetDisplacement()68   FrameOffset GetDisplacement() const {
69     return displacement_;
70   }
71 
GetFramePointerSize()72   PointerSize GetFramePointerSize() const {
73     return frame_pointer_size_;
74   }
75 
~CallingConvention()76   virtual ~CallingConvention() {}
77 
78  protected:
CallingConvention(bool is_static,bool is_synchronized,const char * shorty,PointerSize frame_pointer_size)79   CallingConvention(bool is_static,
80                     bool is_synchronized,
81                     const char* shorty,
82                     PointerSize frame_pointer_size)
83       : itr_slots_(0), itr_refs_(0), itr_args_(0), itr_longs_and_doubles_(0),
84         itr_float_and_doubles_(0), displacement_(0),
85         frame_pointer_size_(frame_pointer_size),
86         handle_scope_pointer_size_(sizeof(StackReference<mirror::Object>)),
87         is_static_(is_static), is_synchronized_(is_synchronized),
88         shorty_(shorty) {
89     num_args_ = (is_static ? 0 : 1) + strlen(shorty) - 1;
90     num_ref_args_ = is_static ? 0 : 1;  // The implicit this pointer.
91     num_float_or_double_args_ = 0;
92     num_long_or_double_args_ = 0;
93     for (size_t i = 1; i < strlen(shorty); i++) {
94       char ch = shorty_[i];
95       switch (ch) {
96       case 'L':
97         num_ref_args_++;
98         break;
99       case 'J':
100         num_long_or_double_args_++;
101         break;
102       case 'D':
103         num_long_or_double_args_++;
104         num_float_or_double_args_++;
105         break;
106       case 'F':
107         num_float_or_double_args_++;
108         break;
109       }
110     }
111   }
112 
IsStatic()113   bool IsStatic() const {
114     return is_static_;
115   }
IsSynchronized()116   bool IsSynchronized() const {
117     return is_synchronized_;
118   }
IsParamALongOrDouble(unsigned int param)119   bool IsParamALongOrDouble(unsigned int param) const {
120     DCHECK_LT(param, NumArgs());
121     if (IsStatic()) {
122       param++;  // 0th argument must skip return value at start of the shorty
123     } else if (param == 0) {
124       return false;  // this argument
125     }
126     char ch = shorty_[param];
127     return (ch == 'J' || ch == 'D');
128   }
IsParamAFloatOrDouble(unsigned int param)129   bool IsParamAFloatOrDouble(unsigned int param) const {
130     DCHECK_LT(param, NumArgs());
131     if (IsStatic()) {
132       param++;  // 0th argument must skip return value at start of the shorty
133     } else if (param == 0) {
134       return false;  // this argument
135     }
136     char ch = shorty_[param];
137     return (ch == 'F' || ch == 'D');
138   }
IsParamADouble(unsigned int param)139   bool IsParamADouble(unsigned int param) const {
140     DCHECK_LT(param, NumArgs());
141     if (IsStatic()) {
142       param++;  // 0th argument must skip return value at start of the shorty
143     } else if (param == 0) {
144       return false;  // this argument
145     }
146     return shorty_[param] == 'D';
147   }
IsParamALong(unsigned int param)148   bool IsParamALong(unsigned int param) const {
149     DCHECK_LT(param, NumArgs());
150     if (IsStatic()) {
151       param++;  // 0th argument must skip return value at start of the shorty
152     } else if (param == 0) {
153       return false;  // this argument
154     }
155     return shorty_[param] == 'J';
156   }
IsParamAReference(unsigned int param)157   bool IsParamAReference(unsigned int param) const {
158     DCHECK_LT(param, NumArgs());
159     if (IsStatic()) {
160       param++;  // 0th argument must skip return value at start of the shorty
161     } else if (param == 0) {
162       return true;  // this argument
163     }
164     return shorty_[param] == 'L';
165   }
NumArgs()166   size_t NumArgs() const {
167     return num_args_;
168   }
169   // Implicit argument count: 1 for instance functions, 0 for static functions.
170   // (The implicit argument is only relevant to the shorty, i.e.
171   // the 0th arg is not in the shorty if it's implicit).
NumImplicitArgs()172   size_t NumImplicitArgs() const {
173     return IsStatic() ? 0 : 1;
174   }
NumLongOrDoubleArgs()175   size_t NumLongOrDoubleArgs() const {
176     return num_long_or_double_args_;
177   }
NumFloatOrDoubleArgs()178   size_t NumFloatOrDoubleArgs() const {
179     return num_float_or_double_args_;
180   }
NumReferenceArgs()181   size_t NumReferenceArgs() const {
182     return num_ref_args_;
183   }
ParamSize(unsigned int param)184   size_t ParamSize(unsigned int param) const {
185     DCHECK_LT(param, NumArgs());
186     if (IsStatic()) {
187       param++;  // 0th argument must skip return value at start of the shorty
188     } else if (param == 0) {
189       return sizeof(mirror::HeapReference<mirror::Object>);  // this argument
190     }
191     size_t result = Primitive::ComponentSize(Primitive::GetType(shorty_[param]));
192     if (result >= 1 && result < 4) {
193       result = 4;
194     }
195     return result;
196   }
GetShorty()197   const char* GetShorty() const {
198     return shorty_.c_str();
199   }
200   // The slot number for current calling_convention argument.
201   // Note that each slot is 32-bit. When the current argument is bigger
202   // than 32 bits, return the first slot number for this argument.
203   unsigned int itr_slots_;
204   // The number of references iterated past.
205   unsigned int itr_refs_;
206   // The argument number along argument list for current argument.
207   unsigned int itr_args_;
208   // Number of longs and doubles seen along argument list.
209   unsigned int itr_longs_and_doubles_;
210   // Number of float and doubles seen along argument list.
211   unsigned int itr_float_and_doubles_;
212   // Space for frames below this on the stack.
213   FrameOffset displacement_;
214   // The size of a pointer.
215   const PointerSize frame_pointer_size_;
216   // The size of a reference entry within the handle scope.
217   const size_t handle_scope_pointer_size_;
218 
219  private:
220   const bool is_static_;
221   const bool is_synchronized_;
222   std::string shorty_;
223   size_t num_args_;
224   size_t num_ref_args_;
225   size_t num_float_or_double_args_;
226   size_t num_long_or_double_args_;
227 };
228 
229 // Abstraction for managed code's calling conventions
230 // | { Incoming stack args } |
231 // | { Prior Method* }       | <-- Prior SP
232 // | { Return address }      |
233 // | { Callee saves }        |
234 // | { Spills ... }          |
235 // | { Outgoing stack args } |
236 // | { Method* }             | <-- SP
237 class ManagedRuntimeCallingConvention : public CallingConvention {
238  public:
239   static std::unique_ptr<ManagedRuntimeCallingConvention> Create(ArenaAllocator* allocator,
240                                                                  bool is_static,
241                                                                  bool is_synchronized,
242                                                                  const char* shorty,
243                                                                  InstructionSet instruction_set);
244 
245   // Offset of Method within the managed frame.
MethodStackOffset()246   FrameOffset MethodStackOffset() {
247     return FrameOffset(0u);
248   }
249 
250   // Register that holds the incoming method argument
251   virtual ManagedRegister MethodRegister() = 0;
252 
253   // Iterator interface
254   bool HasNext();
255   void Next();
256   bool IsCurrentParamAReference();
257   bool IsCurrentParamAFloatOrDouble();
258   bool IsCurrentParamADouble();
259   bool IsCurrentParamALong();
260   bool IsCurrentArgExplicit();  // ie a non-implict argument such as this
261   bool IsCurrentArgPossiblyNull();
262   size_t CurrentParamSize();
263   virtual bool IsCurrentParamInRegister() = 0;
264   virtual bool IsCurrentParamOnStack() = 0;
265   virtual ManagedRegister CurrentParamRegister() = 0;
266   virtual FrameOffset CurrentParamStackOffset() = 0;
267 
~ManagedRuntimeCallingConvention()268   virtual ~ManagedRuntimeCallingConvention() {}
269 
270   // Registers to spill to caller's out registers on entry.
271   virtual const ManagedRegisterEntrySpills& EntrySpills() = 0;
272 
273  protected:
ManagedRuntimeCallingConvention(bool is_static,bool is_synchronized,const char * shorty,PointerSize frame_pointer_size)274   ManagedRuntimeCallingConvention(bool is_static,
275                                   bool is_synchronized,
276                                   const char* shorty,
277                                   PointerSize frame_pointer_size)
278       : CallingConvention(is_static, is_synchronized, shorty, frame_pointer_size) {}
279 };
280 
281 // Abstraction for JNI calling conventions
282 // | { Incoming stack args }         | <-- Prior SP
283 // | { Return address }              |
284 // | { Callee saves }                |     ([1])
285 // | { Return value spill }          |     (live on return slow paths)
286 // | { Local Ref. Table State }      |
287 // | { Stack Indirect Ref. Table     |
288 // |   num. refs./link }             |     (here to prior SP is frame size)
289 // | { Method* }                     | <-- Anchor SP written to thread
290 // | { Outgoing stack args }         | <-- SP at point of call
291 // | Native frame                    |
292 //
293 // [1] We must save all callee saves here to enable any exception throws to restore
294 // callee saves for frames above this one.
295 class JniCallingConvention : public CallingConvention {
296  public:
297   static std::unique_ptr<JniCallingConvention> Create(ArenaAllocator* allocator,
298                                                       bool is_static,
299                                                       bool is_synchronized,
300                                                       bool is_critical_native,
301                                                       const char* shorty,
302                                                       InstructionSet instruction_set);
303 
304   // Size of frame excluding space for outgoing args (its assumed Method* is
305   // always at the bottom of a frame, but this doesn't work for outgoing
306   // native args). Includes alignment.
307   virtual size_t FrameSize() const = 0;
308   // Size of outgoing arguments (stack portion), including alignment.
309   // -- Arguments that are passed via registers are excluded from this size.
310   virtual size_t OutArgSize() const = 0;
311   // Number of references in stack indirect reference table
312   size_t ReferenceCount() const;
313   // Location where the segment state of the local indirect reference table is saved
314   FrameOffset SavedLocalReferenceCookieOffset() const;
315   // Location where the return value of a call can be squirreled if another
316   // call is made following the native call
317   FrameOffset ReturnValueSaveLocation() const;
318   // Register that holds result if it is integer.
319   virtual ManagedRegister IntReturnRegister() = 0;
320   // Whether the compiler needs to ensure zero-/sign-extension of a small result type
321   virtual bool RequiresSmallResultTypeExtension() const = 0;
322 
323   // Callee save registers to spill prior to native code (which may clobber)
324   virtual ArrayRef<const ManagedRegister> CalleeSaveRegisters() const = 0;
325 
326   // Spill mask values
327   virtual uint32_t CoreSpillMask() const = 0;
328   virtual uint32_t FpSpillMask() const = 0;
329 
330   // An extra scratch register live after the call
331   virtual ManagedRegister ReturnScratchRegister() const = 0;
332 
333   // Iterator interface
334   bool HasNext();
335   virtual void Next();
336   bool IsCurrentParamAReference();
337   bool IsCurrentParamAFloatOrDouble();
338   bool IsCurrentParamADouble();
339   bool IsCurrentParamALong();
IsCurrentParamALongOrDouble()340   bool IsCurrentParamALongOrDouble() {
341     return IsCurrentParamALong() || IsCurrentParamADouble();
342   }
343   bool IsCurrentParamJniEnv();
344   size_t CurrentParamSize() const;
345   virtual bool IsCurrentParamInRegister() = 0;
346   virtual bool IsCurrentParamOnStack() = 0;
347   virtual ManagedRegister CurrentParamRegister() = 0;
348   virtual FrameOffset CurrentParamStackOffset() = 0;
349 
350   // Iterator interface extension for JNI
351   FrameOffset CurrentParamHandleScopeEntryOffset();
352 
353   // Position of handle scope and interior fields
HandleScopeOffset()354   FrameOffset HandleScopeOffset() const {
355     return FrameOffset(this->displacement_.Int32Value() + static_cast<size_t>(frame_pointer_size_));
356     // above Method reference
357   }
358 
HandleScopeLinkOffset()359   FrameOffset HandleScopeLinkOffset() const {
360     return FrameOffset(HandleScopeOffset().Int32Value() +
361                        HandleScope::LinkOffset(frame_pointer_size_));
362   }
363 
HandleScopeNumRefsOffset()364   FrameOffset HandleScopeNumRefsOffset() const {
365     return FrameOffset(HandleScopeOffset().Int32Value() +
366                        HandleScope::NumberOfReferencesOffset(frame_pointer_size_));
367   }
368 
HandleReferencesOffset()369   FrameOffset HandleReferencesOffset() const {
370     return FrameOffset(HandleScopeOffset().Int32Value() +
371                        HandleScope::ReferencesOffset(frame_pointer_size_));
372   }
373 
~JniCallingConvention()374   virtual ~JniCallingConvention() {}
375 
IsCriticalNative()376   bool IsCriticalNative() const {
377     return is_critical_native_;
378   }
379 
380   // Does the transition have a method pointer in the stack frame?
SpillsMethod()381   bool SpillsMethod() const {
382     // Exclude method pointer for @CriticalNative methods for optimization speed.
383     return !IsCriticalNative();
384   }
385 
386   // Hidden argument register, used to pass the method pointer for @CriticalNative call.
387   virtual ManagedRegister HiddenArgumentRegister() const = 0;
388 
389   // Whether to use tail call (used only for @CriticalNative).
390   virtual bool UseTailCall() const = 0;
391 
392   // Whether the return type is small. Used for RequiresSmallResultTypeExtension()
393   // on architectures that require the sign/zero extension.
HasSmallReturnType()394   bool HasSmallReturnType() const {
395     Primitive::Type return_type = GetReturnType();
396     return return_type == Primitive::kPrimByte ||
397            return_type == Primitive::kPrimShort ||
398            return_type == Primitive::kPrimBoolean ||
399            return_type == Primitive::kPrimChar;
400   }
401 
402  protected:
403   // Named iterator positions
404   enum IteratorPos {
405     kJniEnv = 0,
406     kObjectOrClass = 1
407   };
408 
JniCallingConvention(bool is_static,bool is_synchronized,bool is_critical_native,const char * shorty,PointerSize frame_pointer_size)409   JniCallingConvention(bool is_static,
410                        bool is_synchronized,
411                        bool is_critical_native,
412                        const char* shorty,
413                        PointerSize frame_pointer_size)
414       : CallingConvention(is_static, is_synchronized, shorty, frame_pointer_size),
415         is_critical_native_(is_critical_native) {}
416 
417  protected:
418   size_t NumberOfExtraArgumentsForJni() const;
419 
420   // Does the transition have a StackHandleScope?
HasHandleScope()421   bool HasHandleScope() const {
422     // Exclude HandleScope for @CriticalNative methods for optimization speed.
423     return !IsCriticalNative();
424   }
425 
426   // Does the transition have a local reference segment state?
HasLocalReferenceSegmentState()427   bool HasLocalReferenceSegmentState() const {
428     // Exclude local reference segment states for @CriticalNative methods for optimization speed.
429     return !IsCriticalNative();
430   }
431 
432   // Does the transition back spill the return value in the stack frame?
SpillsReturnValue()433   bool SpillsReturnValue() const {
434     // Exclude return value for @CriticalNative methods for optimization speed.
435     return !IsCriticalNative();
436   }
437 
438   // Are there extra JNI arguments (JNIEnv* and maybe jclass)?
HasExtraArgumentsForJni()439   bool HasExtraArgumentsForJni() const {
440     // @CriticalNative jni implementations exclude both JNIEnv* and the jclass/jobject parameters.
441     return !IsCriticalNative();
442   }
443 
444   // Has a JNIEnv* parameter implicitly?
HasJniEnv()445   bool HasJniEnv() const {
446     // Exclude "JNIEnv*" parameter for @CriticalNative methods.
447     return HasExtraArgumentsForJni();
448   }
449 
450   // Has a 'jclass' parameter implicitly?
451   bool HasSelfClass() const;
452 
453   // Returns the position of itr_args_, fixed up by removing the offset of extra JNI arguments.
454   unsigned int GetIteratorPositionWithinShorty() const;
455 
456   // Is the current argument (at the iterator) an extra argument for JNI?
457   bool IsCurrentArgExtraForJni() const;
458 
459   const bool is_critical_native_;
460 
461  private:
462   // Shorthand for switching on the switch value but only IF there are extra JNI arguments.
463   //
464   // Puts the case value into return_value.
465   // * (switch_value == kJniEnv) => case_jni_env
466   // * (switch_value == kObjectOrClass) => case_object_or_class
467   //
468   // Returns false otherwise (or if there are no extra JNI arguments).
469   bool SwitchExtraJniArguments(size_t switch_value,
470                                bool case_jni_env,
471                                bool case_object_or_class,
472                                /* out parameters */
473                                bool* return_value) const;
474 };
475 
476 }  // namespace art
477 
478 #endif  // ART_COMPILER_JNI_QUICK_CALLING_CONVENTION_H_
479