1 /*
2  * Copyright (C) 2011 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ART_RUNTIME_ART_METHOD_H_
18 #define ART_RUNTIME_ART_METHOD_H_
19 
20 #include <cstddef>
21 
22 #include <android-base/logging.h>
23 #include <jni.h>
24 
25 #include "base/array_ref.h"
26 #include "base/bit_utils.h"
27 #include "base/casts.h"
28 #include "base/enums.h"
29 #include "base/macros.h"
30 #include "base/runtime_debug.h"
31 #include "dex/code_item_accessors.h"
32 #include "dex/dex_file_structs.h"
33 #include "dex/dex_instruction_iterator.h"
34 #include "dex/modifiers.h"
35 #include "dex/primitive.h"
36 #include "dex/signature.h"
37 #include "gc_root.h"
38 #include "obj_ptr.h"
39 #include "offsets.h"
40 #include "read_barrier_option.h"
41 
42 namespace art {
43 
44 class DexFile;
45 template<class T> class Handle;
46 class ImtConflictTable;
47 enum InvokeType : uint32_t;
48 union JValue;
49 class OatQuickMethodHeader;
50 class ProfilingInfo;
51 class ScopedObjectAccessAlreadyRunnable;
52 class ShadowFrame;
53 
54 namespace mirror {
55 class Array;
56 class Class;
57 class ClassLoader;
58 class DexCache;
59 class IfTable;
60 class Object;
61 template <typename MirrorType> class ObjectArray;
62 class PointerArray;
63 class String;
64 
65 template <typename T> struct NativeDexCachePair;
66 using MethodDexCachePair = NativeDexCachePair<ArtMethod>;
67 using MethodDexCacheType = std::atomic<MethodDexCachePair>;
68 }  // namespace mirror
69 
70 class ArtMethod final {
71  public:
72   // Should the class state be checked on sensitive operations?
73   DECLARE_RUNTIME_DEBUG_FLAG(kCheckDeclaringClassState);
74 
75   // The runtime dex_method_index is kDexNoIndex. To lower dependencies, we use this
76   // constexpr, and ensure that the value is correct in art_method.cc.
77   static constexpr uint32_t kRuntimeMethodDexMethodIndex = 0xFFFFFFFF;
78 
ArtMethod()79   ArtMethod() : access_flags_(0), dex_code_item_offset_(0), dex_method_index_(0),
80       method_index_(0), hotness_count_(0) { }
81 
ArtMethod(ArtMethod * src,PointerSize image_pointer_size)82   ArtMethod(ArtMethod* src, PointerSize image_pointer_size) {
83     CopyFrom(src, image_pointer_size);
84   }
85 
86   static ArtMethod* FromReflectedMethod(const ScopedObjectAccessAlreadyRunnable& soa,
87                                         jobject jlr_method)
88       REQUIRES_SHARED(Locks::mutator_lock_);
89 
90   template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
91   ALWAYS_INLINE ObjPtr<mirror::Class> GetDeclaringClass() REQUIRES_SHARED(Locks::mutator_lock_);
92 
93   template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
94   ALWAYS_INLINE ObjPtr<mirror::Class> GetDeclaringClassUnchecked()
95       REQUIRES_SHARED(Locks::mutator_lock_);
96 
GetDeclaringClassAddressWithoutBarrier()97   mirror::CompressedReference<mirror::Object>* GetDeclaringClassAddressWithoutBarrier() {
98     return declaring_class_.AddressWithoutBarrier();
99   }
100 
101   void SetDeclaringClass(ObjPtr<mirror::Class> new_declaring_class)
102       REQUIRES_SHARED(Locks::mutator_lock_);
103 
104   bool CASDeclaringClass(ObjPtr<mirror::Class> expected_class, ObjPtr<mirror::Class> desired_class)
105       REQUIRES_SHARED(Locks::mutator_lock_);
106 
DeclaringClassOffset()107   static constexpr MemberOffset DeclaringClassOffset() {
108     return MemberOffset(OFFSETOF_MEMBER(ArtMethod, declaring_class_));
109   }
110 
GetAccessFlags()111   uint32_t GetAccessFlags() {
112     return access_flags_.load(std::memory_order_relaxed);
113   }
114 
115   // This version should only be called when it's certain there is no
116   // concurrency so there is no need to guarantee atomicity. For example,
117   // before the method is linked.
SetAccessFlags(uint32_t new_access_flags)118   void SetAccessFlags(uint32_t new_access_flags) {
119     access_flags_.store(new_access_flags, std::memory_order_relaxed);
120   }
121 
AccessFlagsOffset()122   static constexpr MemberOffset AccessFlagsOffset() {
123     return MemberOffset(OFFSETOF_MEMBER(ArtMethod, access_flags_));
124   }
125 
126   // Approximate what kind of method call would be used for this method.
127   InvokeType GetInvokeType() REQUIRES_SHARED(Locks::mutator_lock_);
128 
129   // Returns true if the method is declared public.
IsPublic()130   bool IsPublic() {
131     return (GetAccessFlags() & kAccPublic) != 0;
132   }
133 
134   // Returns true if the method is declared private.
IsPrivate()135   bool IsPrivate() {
136     return (GetAccessFlags() & kAccPrivate) != 0;
137   }
138 
139   // Returns true if the method is declared static.
IsStatic()140   bool IsStatic() {
141     return (GetAccessFlags() & kAccStatic) != 0;
142   }
143 
144   // Returns true if the method is a constructor according to access flags.
IsConstructor()145   bool IsConstructor() {
146     return (GetAccessFlags() & kAccConstructor) != 0;
147   }
148 
149   // Returns true if the method is a class initializer according to access flags.
IsClassInitializer()150   bool IsClassInitializer() {
151     return IsConstructor() && IsStatic();
152   }
153 
154   // Returns true if the method is static, private, or a constructor.
IsDirect()155   bool IsDirect() {
156     return IsDirect(GetAccessFlags());
157   }
158 
IsDirect(uint32_t access_flags)159   static bool IsDirect(uint32_t access_flags) {
160     constexpr uint32_t direct = kAccStatic | kAccPrivate | kAccConstructor;
161     return (access_flags & direct) != 0;
162   }
163 
164   // Returns true if the method is declared synchronized.
IsSynchronized()165   bool IsSynchronized() {
166     constexpr uint32_t synchonized = kAccSynchronized | kAccDeclaredSynchronized;
167     return (GetAccessFlags() & synchonized) != 0;
168   }
169 
IsFinal()170   bool IsFinal() {
171     return (GetAccessFlags() & kAccFinal) != 0;
172   }
173 
IsIntrinsic()174   bool IsIntrinsic() {
175     return (GetAccessFlags() & kAccIntrinsic) != 0;
176   }
177 
178   ALWAYS_INLINE void SetIntrinsic(uint32_t intrinsic) REQUIRES_SHARED(Locks::mutator_lock_);
179 
GetIntrinsic()180   uint32_t GetIntrinsic() {
181     static const int kAccFlagsShift = CTZ(kAccIntrinsicBits);
182     static_assert(IsPowerOfTwo((kAccIntrinsicBits >> kAccFlagsShift) + 1),
183                   "kAccIntrinsicBits are not continuous");
184     static_assert((kAccIntrinsic & kAccIntrinsicBits) == 0,
185                   "kAccIntrinsic overlaps kAccIntrinsicBits");
186     DCHECK(IsIntrinsic());
187     return (GetAccessFlags() & kAccIntrinsicBits) >> kAccFlagsShift;
188   }
189 
190   void SetNotIntrinsic() REQUIRES_SHARED(Locks::mutator_lock_);
191 
IsCopied()192   bool IsCopied() {
193     static_assert((kAccCopied & (kAccIntrinsic | kAccIntrinsicBits)) == 0,
194                   "kAccCopied conflicts with intrinsic modifier");
195     const bool copied = (GetAccessFlags() & kAccCopied) != 0;
196     // (IsMiranda() || IsDefaultConflicting()) implies copied
197     DCHECK(!(IsMiranda() || IsDefaultConflicting()) || copied)
198         << "Miranda or default-conflict methods must always be copied.";
199     return copied;
200   }
201 
IsMiranda()202   bool IsMiranda() {
203     // The kAccMiranda flag value is used with a different meaning for native methods,
204     // so we need to check the kAccNative flag as well.
205     return (GetAccessFlags() & (kAccNative | kAccMiranda)) == kAccMiranda;
206   }
207 
208   // Returns true if invoking this method will not throw an AbstractMethodError or
209   // IncompatibleClassChangeError.
IsInvokable()210   bool IsInvokable() {
211     return !IsAbstract() && !IsDefaultConflicting();
212   }
213 
IsCompilable()214   bool IsCompilable() {
215     if (IsIntrinsic()) {
216       // kAccCompileDontBother overlaps with kAccIntrinsicBits.
217       return true;
218     }
219     return (GetAccessFlags() & kAccCompileDontBother) == 0;
220   }
221 
SetDontCompile()222   void SetDontCompile() {
223     AddAccessFlags(kAccCompileDontBother);
224   }
225 
226   // A default conflict method is a special sentinel method that stands for a conflict between
227   // multiple default methods. It cannot be invoked, throwing an IncompatibleClassChangeError if one
228   // attempts to do so.
IsDefaultConflicting()229   bool IsDefaultConflicting() {
230     if (IsIntrinsic()) {
231       return false;
232     }
233     return (GetAccessFlags() & kAccDefaultConflict) != 0u;
234   }
235 
236   // This is set by the class linker.
IsDefault()237   bool IsDefault() {
238     static_assert((kAccDefault & (kAccIntrinsic | kAccIntrinsicBits)) == 0,
239                   "kAccDefault conflicts with intrinsic modifier");
240     return (GetAccessFlags() & kAccDefault) != 0;
241   }
242 
IsObsolete()243   bool IsObsolete() {
244     return (GetAccessFlags() & kAccObsoleteMethod) != 0;
245   }
246 
SetIsObsolete()247   void SetIsObsolete() {
248     AddAccessFlags(kAccObsoleteMethod);
249   }
250 
IsNative()251   bool IsNative() {
252     return (GetAccessFlags() & kAccNative) != 0;
253   }
254 
255   // Checks to see if the method was annotated with @dalvik.annotation.optimization.FastNative.
IsFastNative()256   bool IsFastNative() {
257     // The presence of the annotation is checked by ClassLinker and recorded in access flags.
258     // The kAccFastNative flag value is used with a different meaning for non-native methods,
259     // so we need to check the kAccNative flag as well.
260     constexpr uint32_t mask = kAccFastNative | kAccNative;
261     return (GetAccessFlags() & mask) == mask;
262   }
263 
264   // Checks to see if the method was annotated with @dalvik.annotation.optimization.CriticalNative.
IsCriticalNative()265   bool IsCriticalNative() {
266     // The presence of the annotation is checked by ClassLinker and recorded in access flags.
267     // The kAccCriticalNative flag value is used with a different meaning for non-native methods,
268     // so we need to check the kAccNative flag as well.
269     constexpr uint32_t mask = kAccCriticalNative | kAccNative;
270     return (GetAccessFlags() & mask) == mask;
271   }
272 
IsAbstract()273   bool IsAbstract() {
274     return (GetAccessFlags() & kAccAbstract) != 0;
275   }
276 
IsSynthetic()277   bool IsSynthetic() {
278     return (GetAccessFlags() & kAccSynthetic) != 0;
279   }
280 
IsVarargs()281   bool IsVarargs() {
282     return (GetAccessFlags() & kAccVarargs) != 0;
283   }
284 
285   bool IsProxyMethod() REQUIRES_SHARED(Locks::mutator_lock_);
286 
287   bool IsPolymorphicSignature() REQUIRES_SHARED(Locks::mutator_lock_);
288 
UseFastInterpreterToInterpreterInvoke()289   bool UseFastInterpreterToInterpreterInvoke() {
290     // The bit is applicable only if the method is not intrinsic.
291     constexpr uint32_t mask = kAccFastInterpreterToInterpreterInvoke | kAccIntrinsic;
292     return (GetAccessFlags() & mask) == kAccFastInterpreterToInterpreterInvoke;
293   }
294 
SetFastInterpreterToInterpreterInvokeFlag()295   void SetFastInterpreterToInterpreterInvokeFlag() {
296     DCHECK(!IsIntrinsic());
297     AddAccessFlags(kAccFastInterpreterToInterpreterInvoke);
298   }
299 
ClearFastInterpreterToInterpreterInvokeFlag()300   void ClearFastInterpreterToInterpreterInvokeFlag() {
301     if (!IsIntrinsic()) {
302       ClearAccessFlags(kAccFastInterpreterToInterpreterInvoke);
303     }
304   }
305 
SkipAccessChecks()306   bool SkipAccessChecks() {
307     // The kAccSkipAccessChecks flag value is used with a different meaning for native methods,
308     // so we need to check the kAccNative flag as well.
309     return (GetAccessFlags() & (kAccSkipAccessChecks | kAccNative)) == kAccSkipAccessChecks;
310   }
311 
SetSkipAccessChecks()312   void SetSkipAccessChecks() {
313     // SkipAccessChecks() is applicable only to non-native methods.
314     DCHECK(!IsNative());
315     AddAccessFlags(kAccSkipAccessChecks);
316   }
317 
PreviouslyWarm()318   bool PreviouslyWarm() {
319     if (IsIntrinsic()) {
320       // kAccPreviouslyWarm overlaps with kAccIntrinsicBits.
321       return true;
322     }
323     return (GetAccessFlags() & kAccPreviouslyWarm) != 0;
324   }
325 
SetPreviouslyWarm()326   void SetPreviouslyWarm() {
327     if (IsIntrinsic()) {
328       // kAccPreviouslyWarm overlaps with kAccIntrinsicBits.
329       return;
330     }
331     AddAccessFlags(kAccPreviouslyWarm);
332   }
333 
334   // Should this method be run in the interpreter and count locks (e.g., failed structured-
335   // locking verification)?
MustCountLocks()336   bool MustCountLocks() {
337     if (IsIntrinsic()) {
338       return false;
339     }
340     return (GetAccessFlags() & kAccMustCountLocks) != 0;
341   }
342 
SetMustCountLocks()343   void SetMustCountLocks() {
344     AddAccessFlags(kAccMustCountLocks);
345   }
346 
347   // Returns true if this method could be overridden by a default method.
348   bool IsOverridableByDefaultMethod() REQUIRES_SHARED(Locks::mutator_lock_);
349 
350   bool CheckIncompatibleClassChange(InvokeType type) REQUIRES_SHARED(Locks::mutator_lock_);
351 
352   // Throws the error that would result from trying to invoke this method (i.e.
353   // IncompatibleClassChangeError or AbstractMethodError). Only call if !IsInvokable();
354   void ThrowInvocationTimeError() REQUIRES_SHARED(Locks::mutator_lock_);
355 
356   uint16_t GetMethodIndex() REQUIRES_SHARED(Locks::mutator_lock_);
357 
358   // Doesn't do erroneous / unresolved class checks.
359   uint16_t GetMethodIndexDuringLinking() REQUIRES_SHARED(Locks::mutator_lock_);
360 
GetVtableIndex()361   size_t GetVtableIndex() REQUIRES_SHARED(Locks::mutator_lock_) {
362     return GetMethodIndex();
363   }
364 
SetMethodIndex(uint16_t new_method_index)365   void SetMethodIndex(uint16_t new_method_index) REQUIRES_SHARED(Locks::mutator_lock_) {
366     // Not called within a transaction.
367     method_index_ = new_method_index;
368   }
369 
DexMethodIndexOffset()370   static constexpr MemberOffset DexMethodIndexOffset() {
371     return MemberOffset(OFFSETOF_MEMBER(ArtMethod, dex_method_index_));
372   }
373 
MethodIndexOffset()374   static constexpr MemberOffset MethodIndexOffset() {
375     return MemberOffset(OFFSETOF_MEMBER(ArtMethod, method_index_));
376   }
377 
GetCodeItemOffset()378   uint32_t GetCodeItemOffset() {
379     return dex_code_item_offset_;
380   }
381 
SetCodeItemOffset(uint32_t new_code_off)382   void SetCodeItemOffset(uint32_t new_code_off) {
383     // Not called within a transaction.
384     dex_code_item_offset_ = new_code_off;
385   }
386 
387   // Number of 32bit registers that would be required to hold all the arguments
388   static size_t NumArgRegisters(const char* shorty);
389 
GetDexMethodIndex()390   ALWAYS_INLINE uint32_t GetDexMethodIndex() {
391     return dex_method_index_;
392   }
393 
SetDexMethodIndex(uint32_t new_idx)394   void SetDexMethodIndex(uint32_t new_idx) {
395     // Not called within a transaction.
396     dex_method_index_ = new_idx;
397   }
398 
399   // Lookup the Class from the type index into this method's dex cache.
400   ObjPtr<mirror::Class> LookupResolvedClassFromTypeIndex(dex::TypeIndex type_idx)
401       REQUIRES_SHARED(Locks::mutator_lock_);
402   // Resolve the Class from the type index into this method's dex cache.
403   ObjPtr<mirror::Class> ResolveClassFromTypeIndex(dex::TypeIndex type_idx)
404       REQUIRES_SHARED(Locks::mutator_lock_);
405 
406   // Returns true if this method has the same name and signature of the other method.
407   bool HasSameNameAndSignature(ArtMethod* other) REQUIRES_SHARED(Locks::mutator_lock_);
408 
409   // Find the method that this method overrides.
410   ArtMethod* FindOverriddenMethod(PointerSize pointer_size)
411       REQUIRES_SHARED(Locks::mutator_lock_);
412 
413   // Find the method index for this method within other_dexfile. If this method isn't present then
414   // return dex::kDexNoIndex. The name_and_signature_idx MUST refer to a MethodId with the same
415   // name and signature in the other_dexfile, such as the method index used to resolve this method
416   // in the other_dexfile.
417   uint32_t FindDexMethodIndexInOtherDexFile(const DexFile& other_dexfile,
418                                             uint32_t name_and_signature_idx)
419       REQUIRES_SHARED(Locks::mutator_lock_);
420 
421   void Invoke(Thread* self, uint32_t* args, uint32_t args_size, JValue* result, const char* shorty)
422       REQUIRES_SHARED(Locks::mutator_lock_);
423 
GetEntryPointFromQuickCompiledCode()424   const void* GetEntryPointFromQuickCompiledCode() {
425     return GetEntryPointFromQuickCompiledCodePtrSize(kRuntimePointerSize);
426   }
GetEntryPointFromQuickCompiledCodePtrSize(PointerSize pointer_size)427   ALWAYS_INLINE const void* GetEntryPointFromQuickCompiledCodePtrSize(PointerSize pointer_size) {
428     return GetNativePointer<const void*>(
429         EntryPointFromQuickCompiledCodeOffset(pointer_size), pointer_size);
430   }
431 
SetEntryPointFromQuickCompiledCode(const void * entry_point_from_quick_compiled_code)432   void SetEntryPointFromQuickCompiledCode(const void* entry_point_from_quick_compiled_code) {
433     SetEntryPointFromQuickCompiledCodePtrSize(entry_point_from_quick_compiled_code,
434                                               kRuntimePointerSize);
435   }
SetEntryPointFromQuickCompiledCodePtrSize(const void * entry_point_from_quick_compiled_code,PointerSize pointer_size)436   ALWAYS_INLINE void SetEntryPointFromQuickCompiledCodePtrSize(
437       const void* entry_point_from_quick_compiled_code, PointerSize pointer_size) {
438     SetNativePointer(EntryPointFromQuickCompiledCodeOffset(pointer_size),
439                      entry_point_from_quick_compiled_code,
440                      pointer_size);
441     // We might want to invoke compiled code, so don't use the fast path.
442     ClearFastInterpreterToInterpreterInvokeFlag();
443   }
444 
445   // Registers the native method and returns the new entry point. NB The returned entry point might
446   // be different from the native_method argument if some MethodCallback modifies it.
447   const void* RegisterNative(const void* native_method)
448       REQUIRES_SHARED(Locks::mutator_lock_) WARN_UNUSED;
449 
450   void UnregisterNative() REQUIRES_SHARED(Locks::mutator_lock_);
451 
DataOffset(PointerSize pointer_size)452   static constexpr MemberOffset DataOffset(PointerSize pointer_size) {
453     return MemberOffset(PtrSizedFieldsOffset(pointer_size) + OFFSETOF_MEMBER(
454         PtrSizedFields, data_) / sizeof(void*) * static_cast<size_t>(pointer_size));
455   }
456 
EntryPointFromJniOffset(PointerSize pointer_size)457   static constexpr MemberOffset EntryPointFromJniOffset(PointerSize pointer_size) {
458     return DataOffset(pointer_size);
459   }
460 
EntryPointFromQuickCompiledCodeOffset(PointerSize pointer_size)461   static constexpr MemberOffset EntryPointFromQuickCompiledCodeOffset(PointerSize pointer_size) {
462     return MemberOffset(PtrSizedFieldsOffset(pointer_size) + OFFSETOF_MEMBER(
463         PtrSizedFields, entry_point_from_quick_compiled_code_) / sizeof(void*)
464             * static_cast<size_t>(pointer_size));
465   }
466 
GetImtConflictTable(PointerSize pointer_size)467   ImtConflictTable* GetImtConflictTable(PointerSize pointer_size) {
468     DCHECK(IsRuntimeMethod());
469     return reinterpret_cast<ImtConflictTable*>(GetDataPtrSize(pointer_size));
470   }
471 
SetImtConflictTable(ImtConflictTable * table,PointerSize pointer_size)472   ALWAYS_INLINE void SetImtConflictTable(ImtConflictTable* table, PointerSize pointer_size) {
473     DCHECK(IsRuntimeMethod());
474     SetDataPtrSize(table, pointer_size);
475   }
476 
GetProfilingInfo(PointerSize pointer_size)477   ProfilingInfo* GetProfilingInfo(PointerSize pointer_size) REQUIRES_SHARED(Locks::mutator_lock_) {
478     if (UNLIKELY(IsNative() || IsProxyMethod() || !IsInvokable())) {
479       return nullptr;
480     }
481     return reinterpret_cast<ProfilingInfo*>(GetDataPtrSize(pointer_size));
482   }
483 
SetProfilingInfo(ProfilingInfo * info)484   ALWAYS_INLINE void SetProfilingInfo(ProfilingInfo* info) {
485     SetDataPtrSize(info, kRuntimePointerSize);
486   }
487 
SetProfilingInfoPtrSize(ProfilingInfo * info,PointerSize pointer_size)488   ALWAYS_INLINE void SetProfilingInfoPtrSize(ProfilingInfo* info, PointerSize pointer_size) {
489     SetDataPtrSize(info, pointer_size);
490   }
491 
ProfilingInfoOffset()492   static MemberOffset ProfilingInfoOffset() {
493     DCHECK(IsImagePointerSize(kRuntimePointerSize));
494     return DataOffset(kRuntimePointerSize);
495   }
496 
497   template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
498   ALWAYS_INLINE bool HasSingleImplementation() REQUIRES_SHARED(Locks::mutator_lock_);
499 
SetHasSingleImplementation(bool single_impl)500   ALWAYS_INLINE void SetHasSingleImplementation(bool single_impl) {
501     DCHECK(!IsIntrinsic()) << "conflict with intrinsic bits";
502     if (single_impl) {
503       AddAccessFlags(kAccSingleImplementation);
504     } else {
505       ClearAccessFlags(kAccSingleImplementation);
506     }
507   }
508 
509   // Takes a method and returns a 'canonical' one if the method is default (and therefore
510   // potentially copied from some other class). For example, this ensures that the debugger does not
511   // get confused as to which method we are in.
512   ArtMethod* GetCanonicalMethod(PointerSize pointer_size = kRuntimePointerSize)
513       REQUIRES_SHARED(Locks::mutator_lock_);
514 
515   ArtMethod* GetSingleImplementation(PointerSize pointer_size);
516 
SetSingleImplementation(ArtMethod * method,PointerSize pointer_size)517   ALWAYS_INLINE void SetSingleImplementation(ArtMethod* method, PointerSize pointer_size) {
518     DCHECK(!IsNative());
519     // Non-abstract method's single implementation is just itself.
520     DCHECK(IsAbstract());
521     SetDataPtrSize(method, pointer_size);
522   }
523 
GetEntryPointFromJni()524   void* GetEntryPointFromJni() {
525     DCHECK(IsNative());
526     return GetEntryPointFromJniPtrSize(kRuntimePointerSize);
527   }
528 
GetEntryPointFromJniPtrSize(PointerSize pointer_size)529   ALWAYS_INLINE void* GetEntryPointFromJniPtrSize(PointerSize pointer_size) {
530     return GetDataPtrSize(pointer_size);
531   }
532 
SetEntryPointFromJni(const void * entrypoint)533   void SetEntryPointFromJni(const void* entrypoint) {
534     DCHECK(IsNative());
535     SetEntryPointFromJniPtrSize(entrypoint, kRuntimePointerSize);
536   }
537 
SetEntryPointFromJniPtrSize(const void * entrypoint,PointerSize pointer_size)538   ALWAYS_INLINE void SetEntryPointFromJniPtrSize(const void* entrypoint, PointerSize pointer_size) {
539     SetDataPtrSize(entrypoint, pointer_size);
540   }
541 
GetDataPtrSize(PointerSize pointer_size)542   ALWAYS_INLINE void* GetDataPtrSize(PointerSize pointer_size) {
543     DCHECK(IsImagePointerSize(pointer_size));
544     return GetNativePointer<void*>(DataOffset(pointer_size), pointer_size);
545   }
546 
SetDataPtrSize(const void * data,PointerSize pointer_size)547   ALWAYS_INLINE void SetDataPtrSize(const void* data, PointerSize pointer_size) {
548     DCHECK(IsImagePointerSize(pointer_size));
549     SetNativePointer(DataOffset(pointer_size), data, pointer_size);
550   }
551 
552   // Is this a CalleSaveMethod or ResolutionMethod and therefore doesn't adhere to normal
553   // conventions for a method of managed code. Returns false for Proxy methods.
IsRuntimeMethod()554   ALWAYS_INLINE bool IsRuntimeMethod() {
555     return dex_method_index_ == kRuntimeMethodDexMethodIndex;
556   }
557 
558   // Is this a hand crafted method used for something like describing callee saves?
559   bool IsCalleeSaveMethod() REQUIRES_SHARED(Locks::mutator_lock_);
560 
561   bool IsResolutionMethod() REQUIRES_SHARED(Locks::mutator_lock_);
562 
563   bool IsImtUnimplementedMethod() REQUIRES_SHARED(Locks::mutator_lock_);
564 
565   // Find the catch block for the given exception type and dex_pc. When a catch block is found,
566   // indicates whether the found catch block is responsible for clearing the exception or whether
567   // a move-exception instruction is present.
568   uint32_t FindCatchBlock(Handle<mirror::Class> exception_type, uint32_t dex_pc,
569                           bool* has_no_move_exception)
570       REQUIRES_SHARED(Locks::mutator_lock_);
571 
572   // NO_THREAD_SAFETY_ANALYSIS since we don't know what the callback requires.
573   template<ReadBarrierOption kReadBarrierOption = kWithReadBarrier, typename RootVisitorType>
574   void VisitRoots(RootVisitorType& visitor, PointerSize pointer_size) NO_THREAD_SAFETY_ANALYSIS;
575 
576   const DexFile* GetDexFile() REQUIRES_SHARED(Locks::mutator_lock_);
577 
578   const char* GetDeclaringClassDescriptor() REQUIRES_SHARED(Locks::mutator_lock_);
579 
580   ALWAYS_INLINE const char* GetShorty() REQUIRES_SHARED(Locks::mutator_lock_);
581 
582   const char* GetShorty(uint32_t* out_length) REQUIRES_SHARED(Locks::mutator_lock_);
583 
584   const Signature GetSignature() REQUIRES_SHARED(Locks::mutator_lock_);
585 
586   ALWAYS_INLINE const char* GetName() REQUIRES_SHARED(Locks::mutator_lock_);
587 
588   ALWAYS_INLINE std::string_view GetNameView() REQUIRES_SHARED(Locks::mutator_lock_);
589 
590   ObjPtr<mirror::String> ResolveNameString() REQUIRES_SHARED(Locks::mutator_lock_);
591 
592   const dex::CodeItem* GetCodeItem() REQUIRES_SHARED(Locks::mutator_lock_);
593 
594   bool IsResolvedTypeIdx(dex::TypeIndex type_idx) REQUIRES_SHARED(Locks::mutator_lock_);
595 
596   int32_t GetLineNumFromDexPC(uint32_t dex_pc) REQUIRES_SHARED(Locks::mutator_lock_);
597 
598   const dex::ProtoId& GetPrototype() REQUIRES_SHARED(Locks::mutator_lock_);
599 
600   const dex::TypeList* GetParameterTypeList() REQUIRES_SHARED(Locks::mutator_lock_);
601 
602   const char* GetDeclaringClassSourceFile() REQUIRES_SHARED(Locks::mutator_lock_);
603 
604   uint16_t GetClassDefIndex() REQUIRES_SHARED(Locks::mutator_lock_);
605 
606   const dex::ClassDef& GetClassDef() REQUIRES_SHARED(Locks::mutator_lock_);
607 
608   ALWAYS_INLINE size_t GetNumberOfParameters() REQUIRES_SHARED(Locks::mutator_lock_);
609 
610   const char* GetReturnTypeDescriptor() REQUIRES_SHARED(Locks::mutator_lock_);
611 
612   ALWAYS_INLINE Primitive::Type GetReturnTypePrimitive() REQUIRES_SHARED(Locks::mutator_lock_);
613 
614   const char* GetTypeDescriptorFromTypeIdx(dex::TypeIndex type_idx)
615       REQUIRES_SHARED(Locks::mutator_lock_);
616 
617   // Lookup return type.
618   ObjPtr<mirror::Class> LookupResolvedReturnType() REQUIRES_SHARED(Locks::mutator_lock_);
619   // Resolve return type. May cause thread suspension due to GetClassFromTypeIdx
620   // calling ResolveType this caused a large number of bugs at call sites.
621   ObjPtr<mirror::Class> ResolveReturnType() REQUIRES_SHARED(Locks::mutator_lock_);
622 
623   ObjPtr<mirror::ClassLoader> GetClassLoader() REQUIRES_SHARED(Locks::mutator_lock_);
624 
625   template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
626   ObjPtr<mirror::DexCache> GetDexCache() REQUIRES_SHARED(Locks::mutator_lock_);
627   ObjPtr<mirror::DexCache> GetObsoleteDexCache() REQUIRES_SHARED(Locks::mutator_lock_);
628 
629   ALWAYS_INLINE ArtMethod* GetInterfaceMethodForProxyUnchecked(PointerSize pointer_size)
630       REQUIRES_SHARED(Locks::mutator_lock_);
631   ALWAYS_INLINE ArtMethod* GetInterfaceMethodIfProxy(PointerSize pointer_size)
632       REQUIRES_SHARED(Locks::mutator_lock_);
633 
634   ArtMethod* GetNonObsoleteMethod() REQUIRES_SHARED(Locks::mutator_lock_);
635 
636   // May cause thread suspension due to class resolution.
637   bool EqualParameters(Handle<mirror::ObjectArray<mirror::Class>> params)
638       REQUIRES_SHARED(Locks::mutator_lock_);
639 
640   // Size of an instance of this native class.
Size(PointerSize pointer_size)641   static size_t Size(PointerSize pointer_size) {
642     return PtrSizedFieldsOffset(pointer_size) +
643         (sizeof(PtrSizedFields) / sizeof(void*)) * static_cast<size_t>(pointer_size);
644   }
645 
646   // Alignment of an instance of this native class.
Alignment(PointerSize pointer_size)647   static size_t Alignment(PointerSize pointer_size) {
648     // The ArtMethod alignment is the same as image pointer size. This differs from
649     // alignof(ArtMethod) if cross-compiling with pointer_size != sizeof(void*).
650     return static_cast<size_t>(pointer_size);
651   }
652 
653   void CopyFrom(ArtMethod* src, PointerSize image_pointer_size)
654       REQUIRES_SHARED(Locks::mutator_lock_);
655 
656   ALWAYS_INLINE void SetCounter(int16_t hotness_count) REQUIRES_SHARED(Locks::mutator_lock_);
657 
658   ALWAYS_INLINE uint16_t GetCounter() REQUIRES_SHARED(Locks::mutator_lock_);
659 
660   ALWAYS_INLINE uint32_t GetImtIndex() REQUIRES_SHARED(Locks::mutator_lock_);
661 
662   void CalculateAndSetImtIndex() REQUIRES_SHARED(Locks::mutator_lock_);
663 
HotnessCountOffset()664   static constexpr MemberOffset HotnessCountOffset() {
665     return MemberOffset(OFFSETOF_MEMBER(ArtMethod, hotness_count_));
666   }
667 
668   ArrayRef<const uint8_t> GetQuickenedInfo() REQUIRES_SHARED(Locks::mutator_lock_);
669   uint16_t GetIndexFromQuickening(uint32_t dex_pc) REQUIRES_SHARED(Locks::mutator_lock_);
670 
671   // Returns the method header for the compiled code containing 'pc'. Note that runtime
672   // methods will return null for this method, as they are not oat based.
673   const OatQuickMethodHeader* GetOatQuickMethodHeader(uintptr_t pc)
674       REQUIRES_SHARED(Locks::mutator_lock_);
675 
676   // Get compiled code for the method, return null if no code exists.
677   const void* GetOatMethodQuickCode(PointerSize pointer_size)
678       REQUIRES_SHARED(Locks::mutator_lock_);
679 
680   // Returns whether the method has any compiled code, JIT or AOT.
681   bool HasAnyCompiledCode() REQUIRES_SHARED(Locks::mutator_lock_);
682 
683   // Returns a human-readable signature for 'm'. Something like "a.b.C.m" or
684   // "a.b.C.m(II)V" (depending on the value of 'with_signature').
685   static std::string PrettyMethod(ArtMethod* m, bool with_signature = true)
686       REQUIRES_SHARED(Locks::mutator_lock_);
687   std::string PrettyMethod(bool with_signature = true)
688       REQUIRES_SHARED(Locks::mutator_lock_);
689   // Returns the JNI native function name for the non-overloaded method 'm'.
690   std::string JniShortName()
691       REQUIRES_SHARED(Locks::mutator_lock_);
692   // Returns the JNI native function name for the overloaded method 'm'.
693   std::string JniLongName()
694       REQUIRES_SHARED(Locks::mutator_lock_);
695 
696   // Update heap objects and non-entrypoint pointers by the passed in visitor for image relocation.
697   // Does not use read barrier.
698   template <typename Visitor>
699   ALWAYS_INLINE void UpdateObjectsForImageRelocation(const Visitor& visitor)
700       REQUIRES_SHARED(Locks::mutator_lock_);
701 
702   // Update entry points by passing them through the visitor.
703   template <typename Visitor>
704   ALWAYS_INLINE void UpdateEntrypoints(const Visitor& visitor, PointerSize pointer_size);
705 
706   // Visit the individual members of an ArtMethod.  Used by imgdiag.
707   // As imgdiag does not support mixing instruction sets or pointer sizes (e.g., using imgdiag32
708   // to inspect 64-bit images, etc.), we can go beneath the accessors directly to the class members.
709   template <typename VisitorFunc>
VisitMembers(VisitorFunc & visitor)710   void VisitMembers(VisitorFunc& visitor) {
711     DCHECK(IsImagePointerSize(kRuntimePointerSize));
712     visitor(this, &declaring_class_, "declaring_class_");
713     visitor(this, &access_flags_, "access_flags_");
714     visitor(this, &dex_code_item_offset_, "dex_code_item_offset_");
715     visitor(this, &dex_method_index_, "dex_method_index_");
716     visitor(this, &method_index_, "method_index_");
717     visitor(this, &hotness_count_, "hotness_count_");
718     visitor(this, &ptr_sized_fields_.data_, "ptr_sized_fields_.data_");
719     visitor(this,
720             &ptr_sized_fields_.entry_point_from_quick_compiled_code_,
721             "ptr_sized_fields_.entry_point_from_quick_compiled_code_");
722   }
723 
724   // Returns the dex instructions of the code item for the art method. Returns an empty array for
725   // the null code item case.
726   ALWAYS_INLINE CodeItemInstructionAccessor DexInstructions()
727       REQUIRES_SHARED(Locks::mutator_lock_);
728 
729   // Returns the dex code item data section of the DexFile for the art method.
730   ALWAYS_INLINE CodeItemDataAccessor DexInstructionData()
731       REQUIRES_SHARED(Locks::mutator_lock_);
732 
733   // Returns the dex code item debug info section of the DexFile for the art method.
734   ALWAYS_INLINE CodeItemDebugInfoAccessor DexInstructionDebugInfo()
735       REQUIRES_SHARED(Locks::mutator_lock_);
736 
DeclaringClassRoot()737   GcRoot<mirror::Class>& DeclaringClassRoot() {
738     return declaring_class_;
739   }
740 
741  protected:
742   // Field order required by test "ValidateFieldOrderOfJavaCppUnionClasses".
743   // The class we are a part of.
744   GcRoot<mirror::Class> declaring_class_;
745 
746   // Access flags; low 16 bits are defined by spec.
747   // Getting and setting this flag needs to be atomic when concurrency is
748   // possible, e.g. after this method's class is linked. Such as when setting
749   // verifier flags and single-implementation flag.
750   std::atomic<std::uint32_t> access_flags_;
751 
752   /* Dex file fields. The defining dex file is available via declaring_class_->dex_cache_ */
753 
754   // Offset to the CodeItem.
755   uint32_t dex_code_item_offset_;
756 
757   // Index into method_ids of the dex file associated with this method.
758   uint32_t dex_method_index_;
759 
760   /* End of dex file fields. */
761 
762   // Entry within a dispatch table for this method. For static/direct methods the index is into
763   // the declaringClass.directMethods, for virtual methods the vtable and for interface methods the
764   // ifTable.
765   uint16_t method_index_;
766 
767   union {
768     // Non-abstract methods: The hotness we measure for this method. Not atomic,
769     // as we allow missing increments: if the method is hot, we will see it eventually.
770     uint16_t hotness_count_;
771     // Abstract methods: IMT index (bitwise negated) or zero if it was not cached.
772     // The negation is needed to distinguish zero index and missing cached entry.
773     uint16_t imt_index_;
774   };
775 
776   // Fake padding field gets inserted here.
777 
778   // Must be the last fields in the method.
779   struct PtrSizedFields {
780     // Depending on the method type, the data is
781     //   - native method: pointer to the JNI function registered to this method
782     //                    or a function to resolve the JNI function,
783     //   - conflict method: ImtConflictTable,
784     //   - abstract/interface method: the single-implementation if any,
785     //   - proxy method: the original interface method or constructor,
786     //   - other methods: the profiling data.
787     void* data_;
788 
789     // Method dispatch from quick compiled code invokes this pointer which may cause bridging into
790     // the interpreter.
791     void* entry_point_from_quick_compiled_code_;
792   } ptr_sized_fields_;
793 
794  private:
795   uint16_t FindObsoleteDexClassDefIndex() REQUIRES_SHARED(Locks::mutator_lock_);
796 
PtrSizedFieldsOffset(PointerSize pointer_size)797   static constexpr size_t PtrSizedFieldsOffset(PointerSize pointer_size) {
798     // Round up to pointer size for padding field. Tested in art_method.cc.
799     return RoundUp(offsetof(ArtMethod, hotness_count_) + sizeof(hotness_count_),
800                    static_cast<size_t>(pointer_size));
801   }
802 
803   // Compare given pointer size to the image pointer size.
804   static bool IsImagePointerSize(PointerSize pointer_size);
805 
806   dex::TypeIndex GetReturnTypeIndex() REQUIRES_SHARED(Locks::mutator_lock_);
807 
808   template<typename T>
GetNativePointer(MemberOffset offset,PointerSize pointer_size)809   ALWAYS_INLINE T GetNativePointer(MemberOffset offset, PointerSize pointer_size) const {
810     static_assert(std::is_pointer<T>::value, "T must be a pointer type");
811     const auto addr = reinterpret_cast<uintptr_t>(this) + offset.Uint32Value();
812     if (pointer_size == PointerSize::k32) {
813       return reinterpret_cast<T>(*reinterpret_cast<const uint32_t*>(addr));
814     } else {
815       auto v = *reinterpret_cast<const uint64_t*>(addr);
816       return reinterpret_cast<T>(dchecked_integral_cast<uintptr_t>(v));
817     }
818   }
819 
820   template<typename T>
SetNativePointer(MemberOffset offset,T new_value,PointerSize pointer_size)821   ALWAYS_INLINE void SetNativePointer(MemberOffset offset, T new_value, PointerSize pointer_size) {
822     static_assert(std::is_pointer<T>::value, "T must be a pointer type");
823     const auto addr = reinterpret_cast<uintptr_t>(this) + offset.Uint32Value();
824     if (pointer_size == PointerSize::k32) {
825       uintptr_t ptr = reinterpret_cast<uintptr_t>(new_value);
826       *reinterpret_cast<uint32_t*>(addr) = dchecked_integral_cast<uint32_t>(ptr);
827     } else {
828       *reinterpret_cast<uint64_t*>(addr) = reinterpret_cast<uintptr_t>(new_value);
829     }
830   }
831 
IsValidIntrinsicUpdate(uint32_t modifier)832   static inline bool IsValidIntrinsicUpdate(uint32_t modifier) {
833     return (((modifier & kAccIntrinsic) == kAccIntrinsic) &&
834             (((modifier & ~(kAccIntrinsic | kAccIntrinsicBits)) == 0)));
835   }
836 
OverlapsIntrinsicBits(uint32_t modifier)837   static inline bool OverlapsIntrinsicBits(uint32_t modifier) {
838     return (modifier & kAccIntrinsicBits) != 0;
839   }
840 
841   // This setter guarantees atomicity.
AddAccessFlags(uint32_t flag)842   void AddAccessFlags(uint32_t flag) {
843     DCHECK(!IsIntrinsic() ||
844            !OverlapsIntrinsicBits(flag) ||
845            IsValidIntrinsicUpdate(flag));
846     uint32_t old_access_flags;
847     uint32_t new_access_flags;
848     do {
849       old_access_flags = access_flags_.load(std::memory_order_relaxed);
850       new_access_flags = old_access_flags | flag;
851     } while (!access_flags_.compare_exchange_weak(old_access_flags, new_access_flags));
852   }
853 
854   // This setter guarantees atomicity.
ClearAccessFlags(uint32_t flag)855   void ClearAccessFlags(uint32_t flag) {
856     DCHECK(!IsIntrinsic() || !OverlapsIntrinsicBits(flag) || IsValidIntrinsicUpdate(flag));
857     uint32_t old_access_flags;
858     uint32_t new_access_flags;
859     do {
860       old_access_flags = access_flags_.load(std::memory_order_relaxed);
861       new_access_flags = old_access_flags & ~flag;
862     } while (!access_flags_.compare_exchange_weak(old_access_flags, new_access_flags));
863   }
864 
865   // Used by GetName and GetNameView to share common code.
866   const char* GetRuntimeMethodName() REQUIRES_SHARED(Locks::mutator_lock_);
867 
868   DISALLOW_COPY_AND_ASSIGN(ArtMethod);  // Need to use CopyFrom to deal with 32 vs 64 bits.
869 };
870 
871 class MethodCallback {
872  public:
~MethodCallback()873   virtual ~MethodCallback() {}
874 
875   virtual void RegisterNativeMethod(ArtMethod* method,
876                                     const void* original_implementation,
877                                     /*out*/void** new_implementation)
878       REQUIRES_SHARED(Locks::mutator_lock_) = 0;
879 };
880 
881 }  // namespace art
882 
883 #endif  // ART_RUNTIME_ART_METHOD_H_
884