1 /*
2  * Copyright (C) 2011 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ART_RUNTIME_ART_METHOD_H_
18 #define ART_RUNTIME_ART_METHOD_H_
19 
20 #include <cstddef>
21 
22 #include <android-base/logging.h>
23 
24 #include "base/array_ref.h"
25 #include "base/bit_utils.h"
26 #include "base/casts.h"
27 #include "base/enums.h"
28 #include "base/iteration_range.h"
29 #include "base/macros.h"
30 #include "base/runtime_debug.h"
31 #include "dex/code_item_accessors.h"
32 #include "dex/dex_file.h"
33 #include "dex/dex_instruction_iterator.h"
34 #include "dex/modifiers.h"
35 #include "dex/primitive.h"
36 #include "gc_root.h"
37 #include "obj_ptr.h"
38 #include "offsets.h"
39 #include "read_barrier_option.h"
40 
41 namespace art {
42 
43 template<class T> class Handle;
44 class ImtConflictTable;
45 enum InvokeType : uint32_t;
46 union JValue;
47 class OatQuickMethodHeader;
48 class ProfilingInfo;
49 class ScopedObjectAccessAlreadyRunnable;
50 class StringPiece;
51 class ShadowFrame;
52 
53 namespace mirror {
54 class Array;
55 class Class;
56 class ClassLoader;
57 class DexCache;
58 class IfTable;
59 class Object;
60 template <typename MirrorType> class ObjectArray;
61 class PointerArray;
62 class String;
63 
64 template <typename T> struct NativeDexCachePair;
65 using MethodDexCachePair = NativeDexCachePair<ArtMethod>;
66 using MethodDexCacheType = std::atomic<MethodDexCachePair>;
67 }  // namespace mirror
68 
69 class ArtMethod FINAL {
70  public:
71   // Should the class state be checked on sensitive operations?
72   DECLARE_RUNTIME_DEBUG_FLAG(kCheckDeclaringClassState);
73 
74   // The runtime dex_method_index is kDexNoIndex. To lower dependencies, we use this
75   // constexpr, and ensure that the value is correct in art_method.cc.
76   static constexpr uint32_t kRuntimeMethodDexMethodIndex = 0xFFFFFFFF;
77 
ArtMethod()78   ArtMethod() : access_flags_(0), dex_code_item_offset_(0), dex_method_index_(0),
79       method_index_(0), hotness_count_(0) { }
80 
ArtMethod(ArtMethod * src,PointerSize image_pointer_size)81   ArtMethod(ArtMethod* src, PointerSize image_pointer_size) {
82     CopyFrom(src, image_pointer_size);
83   }
84 
85   static ArtMethod* FromReflectedMethod(const ScopedObjectAccessAlreadyRunnable& soa,
86                                         jobject jlr_method)
87       REQUIRES_SHARED(Locks::mutator_lock_);
88 
89   template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
90   ALWAYS_INLINE mirror::Class* GetDeclaringClass() REQUIRES_SHARED(Locks::mutator_lock_);
91 
92   template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
93   ALWAYS_INLINE mirror::Class* GetDeclaringClassUnchecked()
94       REQUIRES_SHARED(Locks::mutator_lock_);
95 
GetDeclaringClassAddressWithoutBarrier()96   mirror::CompressedReference<mirror::Object>* GetDeclaringClassAddressWithoutBarrier() {
97     return declaring_class_.AddressWithoutBarrier();
98   }
99 
100   void SetDeclaringClass(ObjPtr<mirror::Class> new_declaring_class)
101       REQUIRES_SHARED(Locks::mutator_lock_);
102 
103   bool CASDeclaringClass(mirror::Class* expected_class, mirror::Class* desired_class)
104       REQUIRES_SHARED(Locks::mutator_lock_);
105 
DeclaringClassOffset()106   static MemberOffset DeclaringClassOffset() {
107     return MemberOffset(OFFSETOF_MEMBER(ArtMethod, declaring_class_));
108   }
109 
110   // Note: GetAccessFlags acquires the mutator lock in debug mode to check that it is not called for
111   // a proxy method.
112   template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
GetAccessFlags()113   uint32_t GetAccessFlags() {
114     if (kCheckDeclaringClassState) {
115       GetAccessFlagsDCheck<kReadBarrierOption>();
116     }
117     return access_flags_.load(std::memory_order_relaxed);
118   }
119 
120   // This version should only be called when it's certain there is no
121   // concurrency so there is no need to guarantee atomicity. For example,
122   // before the method is linked.
SetAccessFlags(uint32_t new_access_flags)123   void SetAccessFlags(uint32_t new_access_flags) {
124     access_flags_.store(new_access_flags, std::memory_order_relaxed);
125   }
126 
AccessFlagsOffset()127   static MemberOffset AccessFlagsOffset() {
128     return MemberOffset(OFFSETOF_MEMBER(ArtMethod, access_flags_));
129   }
130 
131   // Approximate what kind of method call would be used for this method.
132   InvokeType GetInvokeType() REQUIRES_SHARED(Locks::mutator_lock_);
133 
134   // Returns true if the method is declared public.
IsPublic()135   bool IsPublic() {
136     return (GetAccessFlags() & kAccPublic) != 0;
137   }
138 
139   // Returns true if the method is declared private.
IsPrivate()140   bool IsPrivate() {
141     return (GetAccessFlags() & kAccPrivate) != 0;
142   }
143 
144   // Returns true if the method is declared static.
IsStatic()145   bool IsStatic() {
146     return (GetAccessFlags() & kAccStatic) != 0;
147   }
148 
149   // Returns true if the method is a constructor according to access flags.
IsConstructor()150   bool IsConstructor() {
151     return (GetAccessFlags() & kAccConstructor) != 0;
152   }
153 
154   // Returns true if the method is a class initializer according to access flags.
IsClassInitializer()155   bool IsClassInitializer() {
156     return IsConstructor() && IsStatic();
157   }
158 
159   // Returns true if the method is static, private, or a constructor.
IsDirect()160   bool IsDirect() {
161     return IsDirect(GetAccessFlags());
162   }
163 
IsDirect(uint32_t access_flags)164   static bool IsDirect(uint32_t access_flags) {
165     constexpr uint32_t direct = kAccStatic | kAccPrivate | kAccConstructor;
166     return (access_flags & direct) != 0;
167   }
168 
169   // Returns true if the method is declared synchronized.
IsSynchronized()170   bool IsSynchronized() {
171     constexpr uint32_t synchonized = kAccSynchronized | kAccDeclaredSynchronized;
172     return (GetAccessFlags() & synchonized) != 0;
173   }
174 
175   template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
IsFinal()176   bool IsFinal() {
177     return (GetAccessFlags<kReadBarrierOption>() & kAccFinal) != 0;
178   }
179 
180   template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
IsIntrinsic()181   bool IsIntrinsic() {
182     return (GetAccessFlags<kReadBarrierOption>() & kAccIntrinsic) != 0;
183   }
184 
185   ALWAYS_INLINE void SetIntrinsic(uint32_t intrinsic) REQUIRES_SHARED(Locks::mutator_lock_);
186 
GetIntrinsic()187   uint32_t GetIntrinsic() {
188     static const int kAccFlagsShift = CTZ(kAccIntrinsicBits);
189     static_assert(IsPowerOfTwo((kAccIntrinsicBits >> kAccFlagsShift) + 1),
190                   "kAccIntrinsicBits are not continuous");
191     static_assert((kAccIntrinsic & kAccIntrinsicBits) == 0,
192                   "kAccIntrinsic overlaps kAccIntrinsicBits");
193     DCHECK(IsIntrinsic());
194     return (GetAccessFlags() & kAccIntrinsicBits) >> kAccFlagsShift;
195   }
196 
197   void SetNotIntrinsic() REQUIRES_SHARED(Locks::mutator_lock_);
198 
IsCopied()199   bool IsCopied() {
200     static_assert((kAccCopied & (kAccIntrinsic | kAccIntrinsicBits)) == 0,
201                   "kAccCopied conflicts with intrinsic modifier");
202     const bool copied = (GetAccessFlags() & kAccCopied) != 0;
203     // (IsMiranda() || IsDefaultConflicting()) implies copied
204     DCHECK(!(IsMiranda() || IsDefaultConflicting()) || copied)
205         << "Miranda or default-conflict methods must always be copied.";
206     return copied;
207   }
208 
IsMiranda()209   bool IsMiranda() {
210     // The kAccMiranda flag value is used with a different meaning for native methods,
211     // so we need to check the kAccNative flag as well.
212     return (GetAccessFlags() & (kAccNative | kAccMiranda)) == kAccMiranda;
213   }
214 
215   // Returns true if invoking this method will not throw an AbstractMethodError or
216   // IncompatibleClassChangeError.
IsInvokable()217   bool IsInvokable() {
218     return !IsAbstract() && !IsDefaultConflicting();
219   }
220 
IsCompilable()221   bool IsCompilable() {
222     if (IsIntrinsic()) {
223       // kAccCompileDontBother overlaps with kAccIntrinsicBits.
224       return true;
225     }
226     return (GetAccessFlags() & kAccCompileDontBother) == 0;
227   }
228 
SetDontCompile()229   void SetDontCompile() {
230     AddAccessFlags(kAccCompileDontBother);
231   }
232 
233   // A default conflict method is a special sentinel method that stands for a conflict between
234   // multiple default methods. It cannot be invoked, throwing an IncompatibleClassChangeError if one
235   // attempts to do so.
IsDefaultConflicting()236   bool IsDefaultConflicting() {
237     if (IsIntrinsic()) {
238       return false;
239     }
240     return (GetAccessFlags() & kAccDefaultConflict) != 0u;
241   }
242 
243   // This is set by the class linker.
244   template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
IsDefault()245   bool IsDefault() {
246     static_assert((kAccDefault & (kAccIntrinsic | kAccIntrinsicBits)) == 0,
247                   "kAccDefault conflicts with intrinsic modifier");
248     return (GetAccessFlags<kReadBarrierOption>() & kAccDefault) != 0;
249   }
250 
251   template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
IsObsolete()252   bool IsObsolete() {
253     return (GetAccessFlags<kReadBarrierOption>() & kAccObsoleteMethod) != 0;
254   }
255 
SetIsObsolete()256   void SetIsObsolete() {
257     AddAccessFlags(kAccObsoleteMethod);
258   }
259 
260   template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
IsNative()261   bool IsNative() {
262     return (GetAccessFlags<kReadBarrierOption>() & kAccNative) != 0;
263   }
264 
265   // Checks to see if the method was annotated with @dalvik.annotation.optimization.FastNative.
IsFastNative()266   bool IsFastNative() {
267     // The presence of the annotation is checked by ClassLinker and recorded in access flags.
268     // The kAccFastNative flag value is used with a different meaning for non-native methods,
269     // so we need to check the kAccNative flag as well.
270     constexpr uint32_t mask = kAccFastNative | kAccNative;
271     return (GetAccessFlags() & mask) == mask;
272   }
273 
274   // Checks to see if the method was annotated with @dalvik.annotation.optimization.CriticalNative.
IsCriticalNative()275   bool IsCriticalNative() {
276     // The presence of the annotation is checked by ClassLinker and recorded in access flags.
277     // The kAccCriticalNative flag value is used with a different meaning for non-native methods,
278     // so we need to check the kAccNative flag as well.
279     constexpr uint32_t mask = kAccCriticalNative | kAccNative;
280     return (GetAccessFlags() & mask) == mask;
281   }
282 
283   template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
IsAbstract()284   bool IsAbstract() {
285     return (GetAccessFlags<kReadBarrierOption>() & kAccAbstract) != 0;
286   }
287 
IsSynthetic()288   bool IsSynthetic() {
289     return (GetAccessFlags() & kAccSynthetic) != 0;
290   }
291 
IsVarargs()292   bool IsVarargs() {
293     return (GetAccessFlags() & kAccVarargs) != 0;
294   }
295 
296   bool IsProxyMethod() REQUIRES_SHARED(Locks::mutator_lock_);
297 
298   bool IsPolymorphicSignature() REQUIRES_SHARED(Locks::mutator_lock_);
299 
SkipAccessChecks()300   bool SkipAccessChecks() {
301     // The kAccSkipAccessChecks flag value is used with a different meaning for native methods,
302     // so we need to check the kAccNative flag as well.
303     return (GetAccessFlags() & (kAccSkipAccessChecks | kAccNative)) == kAccSkipAccessChecks;
304   }
305 
SetSkipAccessChecks()306   void SetSkipAccessChecks() {
307     // SkipAccessChecks() is applicable only to non-native methods.
308     DCHECK(!IsNative<kWithoutReadBarrier>());
309     AddAccessFlags(kAccSkipAccessChecks);
310   }
311 
PreviouslyWarm()312   bool PreviouslyWarm() {
313     if (IsIntrinsic()) {
314       // kAccPreviouslyWarm overlaps with kAccIntrinsicBits.
315       return true;
316     }
317     return (GetAccessFlags() & kAccPreviouslyWarm) != 0;
318   }
319 
320   template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
SetPreviouslyWarm()321   void SetPreviouslyWarm() {
322     if (IsIntrinsic<kReadBarrierOption>()) {
323       // kAccPreviouslyWarm overlaps with kAccIntrinsicBits.
324       return;
325     }
326     AddAccessFlags<kReadBarrierOption>(kAccPreviouslyWarm);
327   }
328 
329   // Should this method be run in the interpreter and count locks (e.g., failed structured-
330   // locking verification)?
MustCountLocks()331   bool MustCountLocks() {
332     if (IsIntrinsic()) {
333       return false;
334     }
335     return (GetAccessFlags() & kAccMustCountLocks) != 0;
336   }
337 
SetMustCountLocks()338   void SetMustCountLocks() {
339     AddAccessFlags(kAccMustCountLocks);
340   }
341 
342   HiddenApiAccessFlags::ApiList GetHiddenApiAccessFlags() REQUIRES_SHARED(Locks::mutator_lock_);
343 
344   // Returns true if this method could be overridden by a default method.
345   bool IsOverridableByDefaultMethod() REQUIRES_SHARED(Locks::mutator_lock_);
346 
347   bool CheckIncompatibleClassChange(InvokeType type) REQUIRES_SHARED(Locks::mutator_lock_);
348 
349   // Throws the error that would result from trying to invoke this method (i.e.
350   // IncompatibleClassChangeError or AbstractMethodError). Only call if !IsInvokable();
351   void ThrowInvocationTimeError() REQUIRES_SHARED(Locks::mutator_lock_);
352 
353   uint16_t GetMethodIndex() REQUIRES_SHARED(Locks::mutator_lock_);
354 
355   // Doesn't do erroneous / unresolved class checks.
356   uint16_t GetMethodIndexDuringLinking() REQUIRES_SHARED(Locks::mutator_lock_);
357 
GetVtableIndex()358   size_t GetVtableIndex() REQUIRES_SHARED(Locks::mutator_lock_) {
359     return GetMethodIndex();
360   }
361 
SetMethodIndex(uint16_t new_method_index)362   void SetMethodIndex(uint16_t new_method_index) REQUIRES_SHARED(Locks::mutator_lock_) {
363     // Not called within a transaction.
364     method_index_ = new_method_index;
365   }
366 
DexMethodIndexOffset()367   static MemberOffset DexMethodIndexOffset() {
368     return MemberOffset(OFFSETOF_MEMBER(ArtMethod, dex_method_index_));
369   }
370 
MethodIndexOffset()371   static MemberOffset MethodIndexOffset() {
372     return MemberOffset(OFFSETOF_MEMBER(ArtMethod, method_index_));
373   }
374 
GetCodeItemOffset()375   uint32_t GetCodeItemOffset() {
376     return dex_code_item_offset_;
377   }
378 
SetCodeItemOffset(uint32_t new_code_off)379   void SetCodeItemOffset(uint32_t new_code_off) {
380     // Not called within a transaction.
381     dex_code_item_offset_ = new_code_off;
382   }
383 
384   // Number of 32bit registers that would be required to hold all the arguments
385   static size_t NumArgRegisters(const StringPiece& shorty);
386 
GetDexMethodIndexUnchecked()387   ALWAYS_INLINE uint32_t GetDexMethodIndexUnchecked() {
388     return dex_method_index_;
389   }
390   template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
391   ALWAYS_INLINE uint32_t GetDexMethodIndex() REQUIRES_SHARED(Locks::mutator_lock_);
392 
SetDexMethodIndex(uint32_t new_idx)393   void SetDexMethodIndex(uint32_t new_idx) {
394     // Not called within a transaction.
395     dex_method_index_ = new_idx;
396   }
397 
398   // Lookup the Class* from the type index into this method's dex cache.
399   ObjPtr<mirror::Class> LookupResolvedClassFromTypeIndex(dex::TypeIndex type_idx)
400       REQUIRES_SHARED(Locks::mutator_lock_);
401   // Resolve the Class* from the type index into this method's dex cache.
402   ObjPtr<mirror::Class> ResolveClassFromTypeIndex(dex::TypeIndex type_idx)
403       REQUIRES_SHARED(Locks::mutator_lock_);
404 
405   // Returns true if this method has the same name and signature of the other method.
406   bool HasSameNameAndSignature(ArtMethod* other) REQUIRES_SHARED(Locks::mutator_lock_);
407 
408   // Find the method that this method overrides.
409   ArtMethod* FindOverriddenMethod(PointerSize pointer_size)
410       REQUIRES_SHARED(Locks::mutator_lock_);
411 
412   // Find the method index for this method within other_dexfile. If this method isn't present then
413   // return dex::kDexNoIndex. The name_and_signature_idx MUST refer to a MethodId with the same
414   // name and signature in the other_dexfile, such as the method index used to resolve this method
415   // in the other_dexfile.
416   uint32_t FindDexMethodIndexInOtherDexFile(const DexFile& other_dexfile,
417                                             uint32_t name_and_signature_idx)
418       REQUIRES_SHARED(Locks::mutator_lock_);
419 
420   void Invoke(Thread* self, uint32_t* args, uint32_t args_size, JValue* result, const char* shorty)
421       REQUIRES_SHARED(Locks::mutator_lock_);
422 
GetEntryPointFromQuickCompiledCode()423   const void* GetEntryPointFromQuickCompiledCode() {
424     return GetEntryPointFromQuickCompiledCodePtrSize(kRuntimePointerSize);
425   }
GetEntryPointFromQuickCompiledCodePtrSize(PointerSize pointer_size)426   ALWAYS_INLINE const void* GetEntryPointFromQuickCompiledCodePtrSize(PointerSize pointer_size) {
427     return GetNativePointer<const void*>(
428         EntryPointFromQuickCompiledCodeOffset(pointer_size), pointer_size);
429   }
430 
SetEntryPointFromQuickCompiledCode(const void * entry_point_from_quick_compiled_code)431   void SetEntryPointFromQuickCompiledCode(const void* entry_point_from_quick_compiled_code) {
432     SetEntryPointFromQuickCompiledCodePtrSize(entry_point_from_quick_compiled_code,
433                                               kRuntimePointerSize);
434   }
SetEntryPointFromQuickCompiledCodePtrSize(const void * entry_point_from_quick_compiled_code,PointerSize pointer_size)435   ALWAYS_INLINE void SetEntryPointFromQuickCompiledCodePtrSize(
436       const void* entry_point_from_quick_compiled_code, PointerSize pointer_size) {
437     SetNativePointer(EntryPointFromQuickCompiledCodeOffset(pointer_size),
438                      entry_point_from_quick_compiled_code,
439                      pointer_size);
440   }
441 
442   // Registers the native method and returns the new entry point. NB The returned entry point might
443   // be different from the native_method argument if some MethodCallback modifies it.
444   const void* RegisterNative(const void* native_method)
445       REQUIRES_SHARED(Locks::mutator_lock_) WARN_UNUSED;
446 
447   void UnregisterNative() REQUIRES_SHARED(Locks::mutator_lock_);
448 
DataOffset(PointerSize pointer_size)449   static MemberOffset DataOffset(PointerSize pointer_size) {
450     return MemberOffset(PtrSizedFieldsOffset(pointer_size) + OFFSETOF_MEMBER(
451         PtrSizedFields, data_) / sizeof(void*) * static_cast<size_t>(pointer_size));
452   }
453 
EntryPointFromJniOffset(PointerSize pointer_size)454   static MemberOffset EntryPointFromJniOffset(PointerSize pointer_size) {
455     return DataOffset(pointer_size);
456   }
457 
EntryPointFromQuickCompiledCodeOffset(PointerSize pointer_size)458   static MemberOffset EntryPointFromQuickCompiledCodeOffset(PointerSize pointer_size) {
459     return MemberOffset(PtrSizedFieldsOffset(pointer_size) + OFFSETOF_MEMBER(
460         PtrSizedFields, entry_point_from_quick_compiled_code_) / sizeof(void*)
461             * static_cast<size_t>(pointer_size));
462   }
463 
GetImtConflictTable(PointerSize pointer_size)464   ImtConflictTable* GetImtConflictTable(PointerSize pointer_size) {
465     DCHECK(IsRuntimeMethod());
466     return reinterpret_cast<ImtConflictTable*>(GetDataPtrSize(pointer_size));
467   }
468 
SetImtConflictTable(ImtConflictTable * table,PointerSize pointer_size)469   ALWAYS_INLINE void SetImtConflictTable(ImtConflictTable* table, PointerSize pointer_size) {
470     DCHECK(IsRuntimeMethod());
471     SetDataPtrSize(table, pointer_size);
472   }
473 
GetProfilingInfo(PointerSize pointer_size)474   ProfilingInfo* GetProfilingInfo(PointerSize pointer_size) REQUIRES_SHARED(Locks::mutator_lock_) {
475     // Don't do a read barrier in the DCHECK() inside GetAccessFlags() called by IsNative(),
476     // as GetProfilingInfo is called in places where the declaring class is treated as a weak
477     // reference (accessing it with a read barrier would either prevent unloading the class,
478     // or crash the runtime if the GC wants to unload it).
479     if (UNLIKELY(IsNative<kWithoutReadBarrier>()) || UNLIKELY(IsProxyMethod())) {
480       return nullptr;
481     }
482     return reinterpret_cast<ProfilingInfo*>(GetDataPtrSize(pointer_size));
483   }
484 
SetProfilingInfo(ProfilingInfo * info)485   ALWAYS_INLINE void SetProfilingInfo(ProfilingInfo* info) {
486     SetDataPtrSize(info, kRuntimePointerSize);
487   }
488 
SetProfilingInfoPtrSize(ProfilingInfo * info,PointerSize pointer_size)489   ALWAYS_INLINE void SetProfilingInfoPtrSize(ProfilingInfo* info, PointerSize pointer_size) {
490     SetDataPtrSize(info, pointer_size);
491   }
492 
ProfilingInfoOffset()493   static MemberOffset ProfilingInfoOffset() {
494     DCHECK(IsImagePointerSize(kRuntimePointerSize));
495     return DataOffset(kRuntimePointerSize);
496   }
497 
498   template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
499   ALWAYS_INLINE bool HasSingleImplementation() REQUIRES_SHARED(Locks::mutator_lock_);
500 
SetHasSingleImplementation(bool single_impl)501   ALWAYS_INLINE void SetHasSingleImplementation(bool single_impl) {
502     DCHECK(!IsIntrinsic()) << "conflict with intrinsic bits";
503     if (single_impl) {
504       AddAccessFlags(kAccSingleImplementation);
505     } else {
506       ClearAccessFlags(kAccSingleImplementation);
507     }
508   }
509 
510   // Takes a method and returns a 'canonical' one if the method is default (and therefore
511   // potentially copied from some other class). For example, this ensures that the debugger does not
512   // get confused as to which method we are in.
513   ArtMethod* GetCanonicalMethod(PointerSize pointer_size = kRuntimePointerSize)
514       REQUIRES_SHARED(Locks::mutator_lock_);
515 
516   template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
517   ArtMethod* GetSingleImplementation(PointerSize pointer_size)
518       REQUIRES_SHARED(Locks::mutator_lock_);
519 
520   template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
SetSingleImplementation(ArtMethod * method,PointerSize pointer_size)521   ALWAYS_INLINE void SetSingleImplementation(ArtMethod* method, PointerSize pointer_size) {
522     DCHECK(!IsNative<kReadBarrierOption>());
523     // Non-abstract method's single implementation is just itself.
524     DCHECK(IsAbstract<kReadBarrierOption>());
525     SetDataPtrSize(method, pointer_size);
526   }
527 
GetEntryPointFromJni()528   void* GetEntryPointFromJni() {
529     DCHECK(IsNative());
530     return GetEntryPointFromJniPtrSize(kRuntimePointerSize);
531   }
532 
GetEntryPointFromJniPtrSize(PointerSize pointer_size)533   ALWAYS_INLINE void* GetEntryPointFromJniPtrSize(PointerSize pointer_size) {
534     return GetDataPtrSize(pointer_size);
535   }
536 
SetEntryPointFromJni(const void * entrypoint)537   void SetEntryPointFromJni(const void* entrypoint) {
538     DCHECK(IsNative());
539     SetEntryPointFromJniPtrSize(entrypoint, kRuntimePointerSize);
540   }
541 
SetEntryPointFromJniPtrSize(const void * entrypoint,PointerSize pointer_size)542   ALWAYS_INLINE void SetEntryPointFromJniPtrSize(const void* entrypoint, PointerSize pointer_size) {
543     SetDataPtrSize(entrypoint, pointer_size);
544   }
545 
GetDataPtrSize(PointerSize pointer_size)546   ALWAYS_INLINE void* GetDataPtrSize(PointerSize pointer_size) {
547     DCHECK(IsImagePointerSize(pointer_size));
548     return GetNativePointer<void*>(DataOffset(pointer_size), pointer_size);
549   }
550 
SetDataPtrSize(const void * data,PointerSize pointer_size)551   ALWAYS_INLINE void SetDataPtrSize(const void* data, PointerSize pointer_size) {
552     DCHECK(IsImagePointerSize(pointer_size));
553     SetNativePointer(DataOffset(pointer_size), data, pointer_size);
554   }
555 
556   // Is this a CalleSaveMethod or ResolutionMethod and therefore doesn't adhere to normal
557   // conventions for a method of managed code. Returns false for Proxy methods.
IsRuntimeMethod()558   ALWAYS_INLINE bool IsRuntimeMethod() {
559     return dex_method_index_ == kRuntimeMethodDexMethodIndex;
560   }
561 
562   // Is this a hand crafted method used for something like describing callee saves?
563   bool IsCalleeSaveMethod() REQUIRES_SHARED(Locks::mutator_lock_);
564 
565   bool IsResolutionMethod() REQUIRES_SHARED(Locks::mutator_lock_);
566 
567   bool IsImtUnimplementedMethod() REQUIRES_SHARED(Locks::mutator_lock_);
568 
569   // Find the catch block for the given exception type and dex_pc. When a catch block is found,
570   // indicates whether the found catch block is responsible for clearing the exception or whether
571   // a move-exception instruction is present.
572   uint32_t FindCatchBlock(Handle<mirror::Class> exception_type, uint32_t dex_pc,
573                           bool* has_no_move_exception)
574       REQUIRES_SHARED(Locks::mutator_lock_);
575 
576   // NO_THREAD_SAFETY_ANALYSIS since we don't know what the callback requires.
577   template<ReadBarrierOption kReadBarrierOption = kWithReadBarrier, typename RootVisitorType>
578   void VisitRoots(RootVisitorType& visitor, PointerSize pointer_size) NO_THREAD_SAFETY_ANALYSIS;
579 
580   const DexFile* GetDexFile() REQUIRES_SHARED(Locks::mutator_lock_);
581 
582   const char* GetDeclaringClassDescriptor() REQUIRES_SHARED(Locks::mutator_lock_);
583 
584   ALWAYS_INLINE const char* GetShorty() REQUIRES_SHARED(Locks::mutator_lock_);
585 
586   const char* GetShorty(uint32_t* out_length) REQUIRES_SHARED(Locks::mutator_lock_);
587 
588   const Signature GetSignature() REQUIRES_SHARED(Locks::mutator_lock_);
589 
590   ALWAYS_INLINE const char* GetName() REQUIRES_SHARED(Locks::mutator_lock_);
591 
592   ObjPtr<mirror::String> GetNameAsString(Thread* self) REQUIRES_SHARED(Locks::mutator_lock_);
593 
594   const DexFile::CodeItem* GetCodeItem() REQUIRES_SHARED(Locks::mutator_lock_);
595 
596   bool IsResolvedTypeIdx(dex::TypeIndex type_idx) REQUIRES_SHARED(Locks::mutator_lock_);
597 
598   int32_t GetLineNumFromDexPC(uint32_t dex_pc) REQUIRES_SHARED(Locks::mutator_lock_);
599 
600   const DexFile::ProtoId& GetPrototype() REQUIRES_SHARED(Locks::mutator_lock_);
601 
602   const DexFile::TypeList* GetParameterTypeList() REQUIRES_SHARED(Locks::mutator_lock_);
603 
604   const char* GetDeclaringClassSourceFile() REQUIRES_SHARED(Locks::mutator_lock_);
605 
606   uint16_t GetClassDefIndex() REQUIRES_SHARED(Locks::mutator_lock_);
607 
608   const DexFile::ClassDef& GetClassDef() REQUIRES_SHARED(Locks::mutator_lock_);
609 
610   ALWAYS_INLINE size_t GetNumberOfParameters() REQUIRES_SHARED(Locks::mutator_lock_);
611 
612   const char* GetReturnTypeDescriptor() REQUIRES_SHARED(Locks::mutator_lock_);
613 
614   ALWAYS_INLINE Primitive::Type GetReturnTypePrimitive() REQUIRES_SHARED(Locks::mutator_lock_);
615 
616   const char* GetTypeDescriptorFromTypeIdx(dex::TypeIndex type_idx)
617       REQUIRES_SHARED(Locks::mutator_lock_);
618 
619   // Lookup return type.
620   ObjPtr<mirror::Class> LookupResolvedReturnType() REQUIRES_SHARED(Locks::mutator_lock_);
621   // Resolve return type. May cause thread suspension due to GetClassFromTypeIdx
622   // calling ResolveType this caused a large number of bugs at call sites.
623   ObjPtr<mirror::Class> ResolveReturnType() REQUIRES_SHARED(Locks::mutator_lock_);
624 
625   mirror::ClassLoader* GetClassLoader() REQUIRES_SHARED(Locks::mutator_lock_);
626 
627   template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
628   mirror::DexCache* GetDexCache() REQUIRES_SHARED(Locks::mutator_lock_);
629   mirror::DexCache* GetObsoleteDexCache() REQUIRES_SHARED(Locks::mutator_lock_);
630 
631   ALWAYS_INLINE ArtMethod* GetInterfaceMethodForProxyUnchecked(PointerSize pointer_size)
632       REQUIRES_SHARED(Locks::mutator_lock_);
633   ALWAYS_INLINE ArtMethod* GetInterfaceMethodIfProxy(PointerSize pointer_size)
634       REQUIRES_SHARED(Locks::mutator_lock_);
635 
636   ArtMethod* GetNonObsoleteMethod() REQUIRES_SHARED(Locks::mutator_lock_);
637 
638   // May cause thread suspension due to class resolution.
639   bool EqualParameters(Handle<mirror::ObjectArray<mirror::Class>> params)
640       REQUIRES_SHARED(Locks::mutator_lock_);
641 
642   // Size of an instance of this native class.
Size(PointerSize pointer_size)643   static size_t Size(PointerSize pointer_size) {
644     return PtrSizedFieldsOffset(pointer_size) +
645         (sizeof(PtrSizedFields) / sizeof(void*)) * static_cast<size_t>(pointer_size);
646   }
647 
648   // Alignment of an instance of this native class.
Alignment(PointerSize pointer_size)649   static size_t Alignment(PointerSize pointer_size) {
650     // The ArtMethod alignment is the same as image pointer size. This differs from
651     // alignof(ArtMethod) if cross-compiling with pointer_size != sizeof(void*).
652     return static_cast<size_t>(pointer_size);
653   }
654 
655   void CopyFrom(ArtMethod* src, PointerSize image_pointer_size)
656       REQUIRES_SHARED(Locks::mutator_lock_);
657 
658   // Note, hotness_counter_ updates are non-atomic but it doesn't need to be precise.  Also,
659   // given that the counter is only 16 bits wide we can expect wrap-around in some
660   // situations.  Consumers of hotness_count_ must be able to deal with that.
IncrementCounter()661   uint16_t IncrementCounter() {
662     return ++hotness_count_;
663   }
664 
ClearCounter()665   void ClearCounter() {
666     hotness_count_ = 0;
667   }
668 
SetCounter(int16_t hotness_count)669   void SetCounter(int16_t hotness_count) {
670     hotness_count_ = hotness_count;
671   }
672 
GetCounter()673   uint16_t GetCounter() const {
674     return hotness_count_;
675   }
676 
HotnessCountOffset()677   static MemberOffset HotnessCountOffset() {
678     return MemberOffset(OFFSETOF_MEMBER(ArtMethod, hotness_count_));
679   }
680 
681   ArrayRef<const uint8_t> GetQuickenedInfo() REQUIRES_SHARED(Locks::mutator_lock_);
682   uint16_t GetIndexFromQuickening(uint32_t dex_pc) REQUIRES_SHARED(Locks::mutator_lock_);
683 
684   // Returns the method header for the compiled code containing 'pc'. Note that runtime
685   // methods will return null for this method, as they are not oat based.
686   const OatQuickMethodHeader* GetOatQuickMethodHeader(uintptr_t pc)
687       REQUIRES_SHARED(Locks::mutator_lock_);
688 
689   // Get compiled code for the method, return null if no code exists.
690   const void* GetOatMethodQuickCode(PointerSize pointer_size)
691       REQUIRES_SHARED(Locks::mutator_lock_);
692 
693   // Returns whether the method has any compiled code, JIT or AOT.
694   bool HasAnyCompiledCode() REQUIRES_SHARED(Locks::mutator_lock_);
695 
696   // Returns a human-readable signature for 'm'. Something like "a.b.C.m" or
697   // "a.b.C.m(II)V" (depending on the value of 'with_signature').
698   static std::string PrettyMethod(ArtMethod* m, bool with_signature = true)
699       REQUIRES_SHARED(Locks::mutator_lock_);
700   std::string PrettyMethod(bool with_signature = true)
701       REQUIRES_SHARED(Locks::mutator_lock_);
702   // Returns the JNI native function name for the non-overloaded method 'm'.
703   std::string JniShortName()
704       REQUIRES_SHARED(Locks::mutator_lock_);
705   // Returns the JNI native function name for the overloaded method 'm'.
706   std::string JniLongName()
707       REQUIRES_SHARED(Locks::mutator_lock_);
708 
709   // Update heap objects and non-entrypoint pointers by the passed in visitor for image relocation.
710   // Does not use read barrier.
711   template <typename Visitor>
712   ALWAYS_INLINE void UpdateObjectsForImageRelocation(const Visitor& visitor)
713       REQUIRES_SHARED(Locks::mutator_lock_);
714 
715   // Update entry points by passing them through the visitor.
716   template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier, typename Visitor>
717   ALWAYS_INLINE void UpdateEntrypoints(const Visitor& visitor, PointerSize pointer_size);
718 
719   // Visit the individual members of an ArtMethod.  Used by imgdiag.
720   // As imgdiag does not support mixing instruction sets or pointer sizes (e.g., using imgdiag32
721   // to inspect 64-bit images, etc.), we can go beneath the accessors directly to the class members.
722   template <typename VisitorFunc>
VisitMembers(VisitorFunc & visitor)723   void VisitMembers(VisitorFunc& visitor) {
724     DCHECK(IsImagePointerSize(kRuntimePointerSize));
725     visitor(this, &declaring_class_, "declaring_class_");
726     visitor(this, &access_flags_, "access_flags_");
727     visitor(this, &dex_code_item_offset_, "dex_code_item_offset_");
728     visitor(this, &dex_method_index_, "dex_method_index_");
729     visitor(this, &method_index_, "method_index_");
730     visitor(this, &hotness_count_, "hotness_count_");
731     visitor(this, &ptr_sized_fields_.data_, "ptr_sized_fields_.data_");
732     visitor(this,
733             &ptr_sized_fields_.entry_point_from_quick_compiled_code_,
734             "ptr_sized_fields_.entry_point_from_quick_compiled_code_");
735   }
736 
737   // Returns the dex instructions of the code item for the art method. Returns an empty array for
738   // the null code item case.
739   ALWAYS_INLINE CodeItemInstructionAccessor DexInstructions()
740       REQUIRES_SHARED(Locks::mutator_lock_);
741 
742   // Returns the dex code item data section of the DexFile for the art method.
743   ALWAYS_INLINE CodeItemDataAccessor DexInstructionData()
744       REQUIRES_SHARED(Locks::mutator_lock_);
745 
746   // Returns the dex code item debug info section of the DexFile for the art method.
747   ALWAYS_INLINE CodeItemDebugInfoAccessor DexInstructionDebugInfo()
748       REQUIRES_SHARED(Locks::mutator_lock_);
749 
750  protected:
751   // Field order required by test "ValidateFieldOrderOfJavaCppUnionClasses".
752   // The class we are a part of.
753   GcRoot<mirror::Class> declaring_class_;
754 
755   // Access flags; low 16 bits are defined by spec.
756   // Getting and setting this flag needs to be atomic when concurrency is
757   // possible, e.g. after this method's class is linked. Such as when setting
758   // verifier flags and single-implementation flag.
759   std::atomic<std::uint32_t> access_flags_;
760 
761   /* Dex file fields. The defining dex file is available via declaring_class_->dex_cache_ */
762 
763   // Offset to the CodeItem.
764   uint32_t dex_code_item_offset_;
765 
766   // Index into method_ids of the dex file associated with this method.
767   uint32_t dex_method_index_;
768 
769   /* End of dex file fields. */
770 
771   // Entry within a dispatch table for this method. For static/direct methods the index is into
772   // the declaringClass.directMethods, for virtual methods the vtable and for interface methods the
773   // ifTable.
774   uint16_t method_index_;
775 
776   // The hotness we measure for this method. Not atomic, as we allow
777   // missing increments: if the method is hot, we will see it eventually.
778   uint16_t hotness_count_;
779 
780   // Fake padding field gets inserted here.
781 
782   // Must be the last fields in the method.
783   struct PtrSizedFields {
784     // Depending on the method type, the data is
785     //   - native method: pointer to the JNI function registered to this method
786     //                    or a function to resolve the JNI function,
787     //   - conflict method: ImtConflictTable,
788     //   - abstract/interface method: the single-implementation if any,
789     //   - proxy method: the original interface method or constructor,
790     //   - other methods: the profiling data.
791     void* data_;
792 
793     // Method dispatch from quick compiled code invokes this pointer which may cause bridging into
794     // the interpreter.
795     void* entry_point_from_quick_compiled_code_;
796   } ptr_sized_fields_;
797 
798  private:
799   uint16_t FindObsoleteDexClassDefIndex() REQUIRES_SHARED(Locks::mutator_lock_);
800 
PtrSizedFieldsOffset(PointerSize pointer_size)801   static constexpr size_t PtrSizedFieldsOffset(PointerSize pointer_size) {
802     // Round up to pointer size for padding field. Tested in art_method.cc.
803     return RoundUp(offsetof(ArtMethod, hotness_count_) + sizeof(hotness_count_),
804                    static_cast<size_t>(pointer_size));
805   }
806 
807   // Compare given pointer size to the image pointer size.
808   static bool IsImagePointerSize(PointerSize pointer_size);
809 
810   dex::TypeIndex GetReturnTypeIndex() REQUIRES_SHARED(Locks::mutator_lock_);
811 
812   template<typename T>
GetNativePointer(MemberOffset offset,PointerSize pointer_size)813   ALWAYS_INLINE T GetNativePointer(MemberOffset offset, PointerSize pointer_size) const {
814     static_assert(std::is_pointer<T>::value, "T must be a pointer type");
815     const auto addr = reinterpret_cast<uintptr_t>(this) + offset.Uint32Value();
816     if (pointer_size == PointerSize::k32) {
817       return reinterpret_cast<T>(*reinterpret_cast<const uint32_t*>(addr));
818     } else {
819       auto v = *reinterpret_cast<const uint64_t*>(addr);
820       return reinterpret_cast<T>(dchecked_integral_cast<uintptr_t>(v));
821     }
822   }
823 
824   template<typename T>
SetNativePointer(MemberOffset offset,T new_value,PointerSize pointer_size)825   ALWAYS_INLINE void SetNativePointer(MemberOffset offset, T new_value, PointerSize pointer_size) {
826     static_assert(std::is_pointer<T>::value, "T must be a pointer type");
827     const auto addr = reinterpret_cast<uintptr_t>(this) + offset.Uint32Value();
828     if (pointer_size == PointerSize::k32) {
829       uintptr_t ptr = reinterpret_cast<uintptr_t>(new_value);
830       *reinterpret_cast<uint32_t*>(addr) = dchecked_integral_cast<uint32_t>(ptr);
831     } else {
832       *reinterpret_cast<uint64_t*>(addr) = reinterpret_cast<uintptr_t>(new_value);
833     }
834   }
835 
836   template <ReadBarrierOption kReadBarrierOption> void GetAccessFlagsDCheck();
837 
IsValidIntrinsicUpdate(uint32_t modifier)838   static inline bool IsValidIntrinsicUpdate(uint32_t modifier) {
839     return (((modifier & kAccIntrinsic) == kAccIntrinsic) &&
840             (((modifier & ~(kAccIntrinsic | kAccIntrinsicBits)) == 0)));
841   }
842 
OverlapsIntrinsicBits(uint32_t modifier)843   static inline bool OverlapsIntrinsicBits(uint32_t modifier) {
844     return (modifier & kAccIntrinsicBits) != 0;
845   }
846 
847   // This setter guarantees atomicity.
848   template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
AddAccessFlags(uint32_t flag)849   void AddAccessFlags(uint32_t flag) {
850     DCHECK(!IsIntrinsic<kReadBarrierOption>() ||
851            !OverlapsIntrinsicBits(flag) ||
852            IsValidIntrinsicUpdate(flag));
853     uint32_t old_access_flags;
854     uint32_t new_access_flags;
855     do {
856       old_access_flags = access_flags_.load(std::memory_order_relaxed);
857       new_access_flags = old_access_flags | flag;
858     } while (!access_flags_.compare_exchange_weak(old_access_flags, new_access_flags));
859   }
860 
861   // This setter guarantees atomicity.
ClearAccessFlags(uint32_t flag)862   void ClearAccessFlags(uint32_t flag) {
863     DCHECK(!IsIntrinsic() || !OverlapsIntrinsicBits(flag) || IsValidIntrinsicUpdate(flag));
864     uint32_t old_access_flags;
865     uint32_t new_access_flags;
866     do {
867       old_access_flags = access_flags_.load(std::memory_order_relaxed);
868       new_access_flags = old_access_flags & ~flag;
869     } while (!access_flags_.compare_exchange_weak(old_access_flags, new_access_flags));
870   }
871 
872   DISALLOW_COPY_AND_ASSIGN(ArtMethod);  // Need to use CopyFrom to deal with 32 vs 64 bits.
873 };
874 
875 class MethodCallback {
876  public:
~MethodCallback()877   virtual ~MethodCallback() {}
878 
879   virtual void RegisterNativeMethod(ArtMethod* method,
880                                     const void* original_implementation,
881                                     /*out*/void** new_implementation)
882       REQUIRES_SHARED(Locks::mutator_lock_) = 0;
883 };
884 
885 }  // namespace art
886 
887 #endif  // ART_RUNTIME_ART_METHOD_H_
888