1 /*
2  * Copyright (C) 2011 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ART_RUNTIME_ART_METHOD_H_
18 #define ART_RUNTIME_ART_METHOD_H_
19 
20 #include <cstddef>
21 
22 #include "base/bit_utils.h"
23 #include "base/casts.h"
24 #include "base/enums.h"
25 #include "dex_file.h"
26 #include "gc_root.h"
27 #include "invoke_type.h"
28 #include "method_reference.h"
29 #include "modifiers.h"
30 #include "mirror/dex_cache.h"
31 #include "mirror/object.h"
32 #include "obj_ptr.h"
33 #include "read_barrier_option.h"
34 #include "utils.h"
35 
36 namespace art {
37 
38 template<class T> class Handle;
39 class ImtConflictTable;
40 union JValue;
41 class OatQuickMethodHeader;
42 class ProfilingInfo;
43 class ScopedObjectAccessAlreadyRunnable;
44 class StringPiece;
45 class ShadowFrame;
46 
47 namespace mirror {
48 class Array;
49 class Class;
50 class IfTable;
51 class PointerArray;
52 }  // namespace mirror
53 
54 class ArtMethod FINAL {
55  public:
56   static constexpr bool kCheckDeclaringClassState = kIsDebugBuild;
57 
58   // The runtime dex_method_index is kDexNoIndex. To lower dependencies, we use this
59   // constexpr, and ensure that the value is correct in art_method.cc.
60   static constexpr uint32_t kRuntimeMethodDexMethodIndex = 0xFFFFFFFF;
61 
ArtMethod()62   ArtMethod() : access_flags_(0), dex_code_item_offset_(0), dex_method_index_(0),
63       method_index_(0), hotness_count_(0) { }
64 
ArtMethod(ArtMethod * src,PointerSize image_pointer_size)65   ArtMethod(ArtMethod* src, PointerSize image_pointer_size) {
66     CopyFrom(src, image_pointer_size);
67   }
68 
69   static ArtMethod* FromReflectedMethod(const ScopedObjectAccessAlreadyRunnable& soa,
70                                         jobject jlr_method)
71       REQUIRES_SHARED(Locks::mutator_lock_);
72 
73   template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
74   ALWAYS_INLINE mirror::Class* GetDeclaringClass() REQUIRES_SHARED(Locks::mutator_lock_);
75 
76   template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
77   ALWAYS_INLINE mirror::Class* GetDeclaringClassUnchecked()
78       REQUIRES_SHARED(Locks::mutator_lock_);
79 
GetDeclaringClassAddressWithoutBarrier()80   mirror::CompressedReference<mirror::Object>* GetDeclaringClassAddressWithoutBarrier() {
81     return declaring_class_.AddressWithoutBarrier();
82   }
83 
84   void SetDeclaringClass(ObjPtr<mirror::Class> new_declaring_class)
85       REQUIRES_SHARED(Locks::mutator_lock_);
86 
87   bool CASDeclaringClass(mirror::Class* expected_class, mirror::Class* desired_class)
88       REQUIRES_SHARED(Locks::mutator_lock_);
89 
DeclaringClassOffset()90   static MemberOffset DeclaringClassOffset() {
91     return MemberOffset(OFFSETOF_MEMBER(ArtMethod, declaring_class_));
92   }
93 
94   // Note: GetAccessFlags acquires the mutator lock in debug mode to check that it is not called for
95   // a proxy method.
96   template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
GetAccessFlags()97   uint32_t GetAccessFlags() {
98     if (kCheckDeclaringClassState) {
99       GetAccessFlagsDCheck<kReadBarrierOption>();
100     }
101     return access_flags_.load(std::memory_order_relaxed);
102   }
103 
104   // This version should only be called when it's certain there is no
105   // concurrency so there is no need to guarantee atomicity. For example,
106   // before the method is linked.
SetAccessFlags(uint32_t new_access_flags)107   void SetAccessFlags(uint32_t new_access_flags) {
108     access_flags_.store(new_access_flags, std::memory_order_relaxed);
109   }
110 
111   // This setter guarantees atomicity.
AddAccessFlags(uint32_t flag)112   void AddAccessFlags(uint32_t flag) {
113     uint32_t old_access_flags;
114     uint32_t new_access_flags;
115     do {
116       old_access_flags = access_flags_.load(std::memory_order_relaxed);
117       new_access_flags = old_access_flags | flag;
118     } while (!access_flags_.compare_exchange_weak(old_access_flags, new_access_flags));
119   }
120 
121   // This setter guarantees atomicity.
ClearAccessFlags(uint32_t flag)122   void ClearAccessFlags(uint32_t flag) {
123     uint32_t old_access_flags;
124     uint32_t new_access_flags;
125     do {
126       old_access_flags = access_flags_.load(std::memory_order_relaxed);
127       new_access_flags = old_access_flags & ~flag;
128     } while (!access_flags_.compare_exchange_weak(old_access_flags, new_access_flags));
129   }
130 
131   // Approximate what kind of method call would be used for this method.
132   InvokeType GetInvokeType() REQUIRES_SHARED(Locks::mutator_lock_);
133 
134   // Returns true if the method is declared public.
IsPublic()135   bool IsPublic() {
136     return (GetAccessFlags() & kAccPublic) != 0;
137   }
138 
139   // Returns true if the method is declared private.
IsPrivate()140   bool IsPrivate() {
141     return (GetAccessFlags() & kAccPrivate) != 0;
142   }
143 
144   // Returns true if the method is declared static.
IsStatic()145   bool IsStatic() {
146     return (GetAccessFlags() & kAccStatic) != 0;
147   }
148 
149   // Returns true if the method is a constructor according to access flags.
IsConstructor()150   bool IsConstructor() {
151     return (GetAccessFlags() & kAccConstructor) != 0;
152   }
153 
154   // Returns true if the method is a class initializer according to access flags.
IsClassInitializer()155   bool IsClassInitializer() {
156     return IsConstructor() && IsStatic();
157   }
158 
159   // Returns true if the method is static, private, or a constructor.
IsDirect()160   bool IsDirect() {
161     return IsDirect(GetAccessFlags());
162   }
163 
IsDirect(uint32_t access_flags)164   static bool IsDirect(uint32_t access_flags) {
165     constexpr uint32_t direct = kAccStatic | kAccPrivate | kAccConstructor;
166     return (access_flags & direct) != 0;
167   }
168 
169   // Returns true if the method is declared synchronized.
IsSynchronized()170   bool IsSynchronized() {
171     constexpr uint32_t synchonized = kAccSynchronized | kAccDeclaredSynchronized;
172     return (GetAccessFlags() & synchonized) != 0;
173   }
174 
IsFinal()175   bool IsFinal() {
176     return (GetAccessFlags() & kAccFinal) != 0;
177   }
178 
IsIntrinsic()179   bool IsIntrinsic() {
180     return (GetAccessFlags() & kAccIntrinsic) != 0;
181   }
182 
183   ALWAYS_INLINE void SetIntrinsic(uint32_t intrinsic) REQUIRES_SHARED(Locks::mutator_lock_);
184 
GetIntrinsic()185   uint32_t GetIntrinsic() {
186     DCHECK(IsIntrinsic());
187     return (GetAccessFlags() >> POPCOUNT(kAccFlagsNotUsedByIntrinsic)) & kAccMaxIntrinsic;
188   }
189 
IsCopied()190   bool IsCopied() {
191     static_assert((kAccCopied & kAccFlagsNotUsedByIntrinsic) == kAccCopied,
192                   "kAccCopied conflicts with intrinsic modifier");
193     const bool copied = (GetAccessFlags() & kAccCopied) != 0;
194     // (IsMiranda() || IsDefaultConflicting()) implies copied
195     DCHECK(!(IsMiranda() || IsDefaultConflicting()) || copied)
196         << "Miranda or default-conflict methods must always be copied.";
197     return copied;
198   }
199 
IsMiranda()200   bool IsMiranda() {
201     static_assert((kAccMiranda & kAccFlagsNotUsedByIntrinsic) == kAccMiranda,
202                   "kAccMiranda conflicts with intrinsic modifier");
203     return (GetAccessFlags() & kAccMiranda) != 0;
204   }
205 
206   // Returns true if invoking this method will not throw an AbstractMethodError or
207   // IncompatibleClassChangeError.
IsInvokable()208   bool IsInvokable() {
209     return !IsAbstract() && !IsDefaultConflicting();
210   }
211 
IsCompilable()212   bool IsCompilable() {
213     if (IsIntrinsic()) {
214       return true;
215     }
216     return (GetAccessFlags() & kAccCompileDontBother) == 0;
217   }
218 
SetDontCompile()219   void SetDontCompile() {
220     AddAccessFlags(kAccCompileDontBother);
221   }
222 
223   // A default conflict method is a special sentinel method that stands for a conflict between
224   // multiple default methods. It cannot be invoked, throwing an IncompatibleClassChangeError if one
225   // attempts to do so.
IsDefaultConflicting()226   bool IsDefaultConflicting() {
227     if (IsIntrinsic()) {
228       return false;
229     }
230     return (GetAccessFlags() & kAccDefaultConflict) != 0u;
231   }
232 
233   // This is set by the class linker.
IsDefault()234   bool IsDefault() {
235     static_assert((kAccDefault & kAccFlagsNotUsedByIntrinsic) == kAccDefault,
236                   "kAccDefault conflicts with intrinsic modifier");
237     return (GetAccessFlags() & kAccDefault) != 0;
238   }
239 
IsObsolete()240   bool IsObsolete() {
241     return (GetAccessFlags() & kAccObsoleteMethod) != 0;
242   }
243 
SetIsObsolete()244   void SetIsObsolete() {
245     AddAccessFlags(kAccObsoleteMethod);
246   }
247 
248   template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
IsNative()249   bool IsNative() {
250     return (GetAccessFlags<kReadBarrierOption>() & kAccNative) != 0;
251   }
252 
IsFastNative()253   bool IsFastNative() {
254     constexpr uint32_t mask = kAccFastNative | kAccNative;
255     return (GetAccessFlags() & mask) == mask;
256   }
257 
IsAbstract()258   bool IsAbstract() {
259     return (GetAccessFlags() & kAccAbstract) != 0;
260   }
261 
IsSynthetic()262   bool IsSynthetic() {
263     return (GetAccessFlags() & kAccSynthetic) != 0;
264   }
265 
IsVarargs()266   bool IsVarargs() {
267     return (GetAccessFlags() & kAccVarargs) != 0;
268   }
269 
270   bool IsProxyMethod() REQUIRES_SHARED(Locks::mutator_lock_);
271 
SkipAccessChecks()272   bool SkipAccessChecks() {
273     return (GetAccessFlags() & kAccSkipAccessChecks) != 0;
274   }
275 
SetSkipAccessChecks()276   void SetSkipAccessChecks() {
277     AddAccessFlags(kAccSkipAccessChecks);
278   }
279 
280   // Should this method be run in the interpreter and count locks (e.g., failed structured-
281   // locking verification)?
MustCountLocks()282   bool MustCountLocks() {
283     if (IsIntrinsic()) {
284       return false;
285     }
286     return (GetAccessFlags() & kAccMustCountLocks) != 0;
287   }
288 
289   // Checks to see if the method was annotated with @dalvik.annotation.optimization.FastNative
290   // -- Independent of kAccFastNative access flags.
291   bool IsAnnotatedWithFastNative();
292 
293   // Checks to see if the method was annotated with @dalvik.annotation.optimization.CriticalNative
294   // -- Unrelated to the GC notion of "critical".
295   bool IsAnnotatedWithCriticalNative();
296 
297   // Returns true if this method could be overridden by a default method.
298   bool IsOverridableByDefaultMethod() REQUIRES_SHARED(Locks::mutator_lock_);
299 
300   bool CheckIncompatibleClassChange(InvokeType type) REQUIRES_SHARED(Locks::mutator_lock_);
301 
302   // Throws the error that would result from trying to invoke this method (i.e.
303   // IncompatibleClassChangeError or AbstractMethodError). Only call if !IsInvokable();
304   void ThrowInvocationTimeError() REQUIRES_SHARED(Locks::mutator_lock_);
305 
306   uint16_t GetMethodIndex() REQUIRES_SHARED(Locks::mutator_lock_);
307 
308   // Doesn't do erroneous / unresolved class checks.
309   uint16_t GetMethodIndexDuringLinking() REQUIRES_SHARED(Locks::mutator_lock_);
310 
GetVtableIndex()311   size_t GetVtableIndex() REQUIRES_SHARED(Locks::mutator_lock_) {
312     return GetMethodIndex();
313   }
314 
SetMethodIndex(uint16_t new_method_index)315   void SetMethodIndex(uint16_t new_method_index) REQUIRES_SHARED(Locks::mutator_lock_) {
316     // Not called within a transaction.
317     method_index_ = new_method_index;
318   }
319 
DexMethodIndexOffset()320   static MemberOffset DexMethodIndexOffset() {
321     return OFFSET_OF_OBJECT_MEMBER(ArtMethod, dex_method_index_);
322   }
323 
MethodIndexOffset()324   static MemberOffset MethodIndexOffset() {
325     return OFFSET_OF_OBJECT_MEMBER(ArtMethod, method_index_);
326   }
327 
GetCodeItemOffset()328   uint32_t GetCodeItemOffset() {
329     return dex_code_item_offset_;
330   }
331 
SetCodeItemOffset(uint32_t new_code_off)332   void SetCodeItemOffset(uint32_t new_code_off) {
333     // Not called within a transaction.
334     dex_code_item_offset_ = new_code_off;
335   }
336 
337   // Number of 32bit registers that would be required to hold all the arguments
338   static size_t NumArgRegisters(const StringPiece& shorty);
339 
GetDexMethodIndexUnchecked()340   ALWAYS_INLINE uint32_t GetDexMethodIndexUnchecked() {
341     return dex_method_index_;
342   }
343   ALWAYS_INLINE uint32_t GetDexMethodIndex() REQUIRES_SHARED(Locks::mutator_lock_);
344 
SetDexMethodIndex(uint32_t new_idx)345   void SetDexMethodIndex(uint32_t new_idx) {
346     // Not called within a transaction.
347     dex_method_index_ = new_idx;
348   }
349 
350   ALWAYS_INLINE ArtMethod** GetDexCacheResolvedMethods(PointerSize pointer_size)
351       REQUIRES_SHARED(Locks::mutator_lock_);
352   ALWAYS_INLINE ArtMethod* GetDexCacheResolvedMethod(uint16_t method_index,
353                                                      PointerSize pointer_size)
354       REQUIRES_SHARED(Locks::mutator_lock_);
355 
356   ALWAYS_INLINE void SetDexCacheResolvedMethod(uint16_t method_index,
357                                                ArtMethod* new_method,
358                                                PointerSize pointer_size)
359       REQUIRES_SHARED(Locks::mutator_lock_);
360   ALWAYS_INLINE void SetDexCacheResolvedMethods(ArtMethod** new_dex_cache_methods,
361                                                 PointerSize pointer_size)
362       REQUIRES_SHARED(Locks::mutator_lock_);
363   bool HasDexCacheResolvedMethods(PointerSize pointer_size) REQUIRES_SHARED(Locks::mutator_lock_);
364   bool HasSameDexCacheResolvedMethods(ArtMethod* other, PointerSize pointer_size)
365       REQUIRES_SHARED(Locks::mutator_lock_);
366   bool HasSameDexCacheResolvedMethods(ArtMethod** other_cache, PointerSize pointer_size)
367       REQUIRES_SHARED(Locks::mutator_lock_);
368 
369   // Get the Class* from the type index into this method's dex cache.
370   mirror::Class* GetClassFromTypeIndex(dex::TypeIndex type_idx, bool resolve)
371       REQUIRES_SHARED(Locks::mutator_lock_);
372 
373   // Returns true if this method has the same name and signature of the other method.
374   bool HasSameNameAndSignature(ArtMethod* other) REQUIRES_SHARED(Locks::mutator_lock_);
375 
376   // Find the method that this method overrides.
377   ArtMethod* FindOverriddenMethod(PointerSize pointer_size)
378       REQUIRES_SHARED(Locks::mutator_lock_);
379 
380   // Find the method index for this method within other_dexfile. If this method isn't present then
381   // return DexFile::kDexNoIndex. The name_and_signature_idx MUST refer to a MethodId with the same
382   // name and signature in the other_dexfile, such as the method index used to resolve this method
383   // in the other_dexfile.
384   uint32_t FindDexMethodIndexInOtherDexFile(const DexFile& other_dexfile,
385                                             uint32_t name_and_signature_idx)
386       REQUIRES_SHARED(Locks::mutator_lock_);
387 
388   void Invoke(Thread* self, uint32_t* args, uint32_t args_size, JValue* result, const char* shorty)
389       REQUIRES_SHARED(Locks::mutator_lock_);
390 
GetEntryPointFromQuickCompiledCode()391   const void* GetEntryPointFromQuickCompiledCode() {
392     return GetEntryPointFromQuickCompiledCodePtrSize(kRuntimePointerSize);
393   }
GetEntryPointFromQuickCompiledCodePtrSize(PointerSize pointer_size)394   ALWAYS_INLINE const void* GetEntryPointFromQuickCompiledCodePtrSize(PointerSize pointer_size) {
395     return GetNativePointer<const void*>(
396         EntryPointFromQuickCompiledCodeOffset(pointer_size), pointer_size);
397   }
398 
SetEntryPointFromQuickCompiledCode(const void * entry_point_from_quick_compiled_code)399   void SetEntryPointFromQuickCompiledCode(const void* entry_point_from_quick_compiled_code) {
400     SetEntryPointFromQuickCompiledCodePtrSize(entry_point_from_quick_compiled_code,
401                                               kRuntimePointerSize);
402   }
SetEntryPointFromQuickCompiledCodePtrSize(const void * entry_point_from_quick_compiled_code,PointerSize pointer_size)403   ALWAYS_INLINE void SetEntryPointFromQuickCompiledCodePtrSize(
404       const void* entry_point_from_quick_compiled_code, PointerSize pointer_size) {
405     SetNativePointer(EntryPointFromQuickCompiledCodeOffset(pointer_size),
406                      entry_point_from_quick_compiled_code,
407                      pointer_size);
408   }
409 
410   // Registers the native method and returns the new entry point. NB The returned entry point might
411   // be different from the native_method argument if some MethodCallback modifies it.
412   const void* RegisterNative(const void* native_method, bool is_fast)
413       REQUIRES_SHARED(Locks::mutator_lock_) WARN_UNUSED;
414 
415   void UnregisterNative() REQUIRES_SHARED(Locks::mutator_lock_);
416 
DexCacheResolvedMethodsOffset(PointerSize pointer_size)417   static MemberOffset DexCacheResolvedMethodsOffset(PointerSize pointer_size) {
418     return MemberOffset(PtrSizedFieldsOffset(pointer_size) + OFFSETOF_MEMBER(
419         PtrSizedFields, dex_cache_resolved_methods_) / sizeof(void*)
420             * static_cast<size_t>(pointer_size));
421   }
422 
DataOffset(PointerSize pointer_size)423   static MemberOffset DataOffset(PointerSize pointer_size) {
424     return MemberOffset(PtrSizedFieldsOffset(pointer_size) + OFFSETOF_MEMBER(
425         PtrSizedFields, data_) / sizeof(void*) * static_cast<size_t>(pointer_size));
426   }
427 
EntryPointFromJniOffset(PointerSize pointer_size)428   static MemberOffset EntryPointFromJniOffset(PointerSize pointer_size) {
429     return DataOffset(pointer_size);
430   }
431 
EntryPointFromQuickCompiledCodeOffset(PointerSize pointer_size)432   static MemberOffset EntryPointFromQuickCompiledCodeOffset(PointerSize pointer_size) {
433     return MemberOffset(PtrSizedFieldsOffset(pointer_size) + OFFSETOF_MEMBER(
434         PtrSizedFields, entry_point_from_quick_compiled_code_) / sizeof(void*)
435             * static_cast<size_t>(pointer_size));
436   }
437 
GetImtConflictTable(PointerSize pointer_size)438   ImtConflictTable* GetImtConflictTable(PointerSize pointer_size) {
439     DCHECK(IsRuntimeMethod());
440     return reinterpret_cast<ImtConflictTable*>(GetDataPtrSize(pointer_size));
441   }
442 
SetImtConflictTable(ImtConflictTable * table,PointerSize pointer_size)443   ALWAYS_INLINE void SetImtConflictTable(ImtConflictTable* table, PointerSize pointer_size) {
444     DCHECK(IsRuntimeMethod());
445     SetDataPtrSize(table, pointer_size);
446   }
447 
GetProfilingInfo(PointerSize pointer_size)448   ProfilingInfo* GetProfilingInfo(PointerSize pointer_size) {
449     DCHECK(!IsNative());
450     return reinterpret_cast<ProfilingInfo*>(GetDataPtrSize(pointer_size));
451   }
452 
SetProfilingInfo(ProfilingInfo * info)453   ALWAYS_INLINE void SetProfilingInfo(ProfilingInfo* info) {
454     SetDataPtrSize(info, kRuntimePointerSize);
455   }
456 
SetProfilingInfoPtrSize(ProfilingInfo * info,PointerSize pointer_size)457   ALWAYS_INLINE void SetProfilingInfoPtrSize(ProfilingInfo* info, PointerSize pointer_size) {
458     SetDataPtrSize(info, pointer_size);
459   }
460 
ProfilingInfoOffset()461   static MemberOffset ProfilingInfoOffset() {
462     DCHECK(IsImagePointerSize(kRuntimePointerSize));
463     return DataOffset(kRuntimePointerSize);
464   }
465 
466   ALWAYS_INLINE bool HasSingleImplementation() REQUIRES_SHARED(Locks::mutator_lock_);
467 
SetHasSingleImplementation(bool single_impl)468   ALWAYS_INLINE void SetHasSingleImplementation(bool single_impl) {
469     DCHECK(!IsIntrinsic()) << "conflict with intrinsic bits";
470     if (single_impl) {
471       AddAccessFlags(kAccSingleImplementation);
472     } else {
473       ClearAccessFlags(kAccSingleImplementation);
474     }
475   }
476 
477   ArtMethod* GetSingleImplementation(PointerSize pointer_size)
478       REQUIRES_SHARED(Locks::mutator_lock_);
479 
SetSingleImplementation(ArtMethod * method,PointerSize pointer_size)480   ALWAYS_INLINE void SetSingleImplementation(ArtMethod* method, PointerSize pointer_size) {
481     DCHECK(!IsNative());
482     DCHECK(IsAbstract());  // Non-abstract method's single implementation is just itself.
483     SetDataPtrSize(method, pointer_size);
484   }
485 
GetEntryPointFromJni()486   void* GetEntryPointFromJni() {
487     DCHECK(IsNative());
488     return GetEntryPointFromJniPtrSize(kRuntimePointerSize);
489   }
490 
GetEntryPointFromJniPtrSize(PointerSize pointer_size)491   ALWAYS_INLINE void* GetEntryPointFromJniPtrSize(PointerSize pointer_size) {
492     return GetDataPtrSize(pointer_size);
493   }
494 
SetEntryPointFromJni(const void * entrypoint)495   void SetEntryPointFromJni(const void* entrypoint) {
496     DCHECK(IsNative());
497     SetEntryPointFromJniPtrSize(entrypoint, kRuntimePointerSize);
498   }
499 
SetEntryPointFromJniPtrSize(const void * entrypoint,PointerSize pointer_size)500   ALWAYS_INLINE void SetEntryPointFromJniPtrSize(const void* entrypoint, PointerSize pointer_size) {
501     SetDataPtrSize(entrypoint, pointer_size);
502   }
503 
GetDataPtrSize(PointerSize pointer_size)504   ALWAYS_INLINE void* GetDataPtrSize(PointerSize pointer_size) {
505     DCHECK(IsImagePointerSize(pointer_size));
506     return GetNativePointer<void*>(DataOffset(pointer_size), pointer_size);
507   }
508 
SetDataPtrSize(const void * data,PointerSize pointer_size)509   ALWAYS_INLINE void SetDataPtrSize(const void* data, PointerSize pointer_size) {
510     DCHECK(IsImagePointerSize(pointer_size));
511     SetNativePointer(DataOffset(pointer_size), data, pointer_size);
512   }
513 
514   // Is this a CalleSaveMethod or ResolutionMethod and therefore doesn't adhere to normal
515   // conventions for a method of managed code. Returns false for Proxy methods.
IsRuntimeMethod()516   ALWAYS_INLINE bool IsRuntimeMethod() {
517     return dex_method_index_ == kRuntimeMethodDexMethodIndex;;
518   }
519 
520   // Is this a hand crafted method used for something like describing callee saves?
521   bool IsCalleeSaveMethod() REQUIRES_SHARED(Locks::mutator_lock_);
522 
523   bool IsResolutionMethod() REQUIRES_SHARED(Locks::mutator_lock_);
524 
525   bool IsImtUnimplementedMethod() REQUIRES_SHARED(Locks::mutator_lock_);
526 
ToMethodReference()527   MethodReference ToMethodReference() REQUIRES_SHARED(Locks::mutator_lock_) {
528     return MethodReference(GetDexFile(), GetDexMethodIndex());
529   }
530 
531   // Find the catch block for the given exception type and dex_pc. When a catch block is found,
532   // indicates whether the found catch block is responsible for clearing the exception or whether
533   // a move-exception instruction is present.
534   uint32_t FindCatchBlock(Handle<mirror::Class> exception_type, uint32_t dex_pc,
535                           bool* has_no_move_exception)
536       REQUIRES_SHARED(Locks::mutator_lock_);
537 
538   // NO_THREAD_SAFETY_ANALYSIS since we don't know what the callback requires.
539   template<ReadBarrierOption kReadBarrierOption = kWithReadBarrier, typename RootVisitorType>
540   void VisitRoots(RootVisitorType& visitor, PointerSize pointer_size) NO_THREAD_SAFETY_ANALYSIS;
541 
542   const DexFile* GetDexFile() REQUIRES_SHARED(Locks::mutator_lock_);
543 
544   const char* GetDeclaringClassDescriptor() REQUIRES_SHARED(Locks::mutator_lock_);
545 
546   ALWAYS_INLINE const char* GetShorty() REQUIRES_SHARED(Locks::mutator_lock_);
547 
548   const char* GetShorty(uint32_t* out_length) REQUIRES_SHARED(Locks::mutator_lock_);
549 
550   const Signature GetSignature() REQUIRES_SHARED(Locks::mutator_lock_);
551 
552   ALWAYS_INLINE const char* GetName() REQUIRES_SHARED(Locks::mutator_lock_);
553 
554   mirror::String* GetNameAsString(Thread* self) REQUIRES_SHARED(Locks::mutator_lock_);
555 
556   const DexFile::CodeItem* GetCodeItem() REQUIRES_SHARED(Locks::mutator_lock_);
557 
558   bool IsResolvedTypeIdx(dex::TypeIndex type_idx) REQUIRES_SHARED(Locks::mutator_lock_);
559 
560   int32_t GetLineNumFromDexPC(uint32_t dex_pc) REQUIRES_SHARED(Locks::mutator_lock_);
561 
562   const DexFile::ProtoId& GetPrototype() REQUIRES_SHARED(Locks::mutator_lock_);
563 
564   const DexFile::TypeList* GetParameterTypeList() REQUIRES_SHARED(Locks::mutator_lock_);
565 
566   const char* GetDeclaringClassSourceFile() REQUIRES_SHARED(Locks::mutator_lock_);
567 
568   uint16_t GetClassDefIndex() REQUIRES_SHARED(Locks::mutator_lock_);
569 
570   const DexFile::ClassDef& GetClassDef() REQUIRES_SHARED(Locks::mutator_lock_);
571 
572   const char* GetReturnTypeDescriptor() REQUIRES_SHARED(Locks::mutator_lock_);
573 
574   const char* GetTypeDescriptorFromTypeIdx(dex::TypeIndex type_idx)
575       REQUIRES_SHARED(Locks::mutator_lock_);
576 
577   // May cause thread suspension due to GetClassFromTypeIdx calling ResolveType this caused a large
578   // number of bugs at call sites.
579   mirror::Class* GetReturnType(bool resolve) REQUIRES_SHARED(Locks::mutator_lock_);
580 
581   mirror::ClassLoader* GetClassLoader() REQUIRES_SHARED(Locks::mutator_lock_);
582 
583   template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
584   mirror::DexCache* GetDexCache() REQUIRES_SHARED(Locks::mutator_lock_);
585   mirror::DexCache* GetObsoleteDexCache() REQUIRES_SHARED(Locks::mutator_lock_);
586 
587   ALWAYS_INLINE ArtMethod* GetInterfaceMethodIfProxy(PointerSize pointer_size)
588       REQUIRES_SHARED(Locks::mutator_lock_);
589 
590   ArtMethod* GetNonObsoleteMethod() REQUIRES_SHARED(Locks::mutator_lock_);
591 
592   // May cause thread suspension due to class resolution.
593   bool EqualParameters(Handle<mirror::ObjectArray<mirror::Class>> params)
594       REQUIRES_SHARED(Locks::mutator_lock_);
595 
596   // Size of an instance of this native class.
Size(PointerSize pointer_size)597   static size_t Size(PointerSize pointer_size) {
598     return PtrSizedFieldsOffset(pointer_size) +
599         (sizeof(PtrSizedFields) / sizeof(void*)) * static_cast<size_t>(pointer_size);
600   }
601 
602   // Alignment of an instance of this native class.
Alignment(PointerSize pointer_size)603   static size_t Alignment(PointerSize pointer_size) {
604     // The ArtMethod alignment is the same as image pointer size. This differs from
605     // alignof(ArtMethod) if cross-compiling with pointer_size != sizeof(void*).
606     return static_cast<size_t>(pointer_size);
607   }
608 
609   void CopyFrom(ArtMethod* src, PointerSize image_pointer_size)
610       REQUIRES_SHARED(Locks::mutator_lock_);
611 
612   // Note, hotness_counter_ updates are non-atomic but it doesn't need to be precise.  Also,
613   // given that the counter is only 16 bits wide we can expect wrap-around in some
614   // situations.  Consumers of hotness_count_ must be able to deal with that.
IncrementCounter()615   uint16_t IncrementCounter() {
616     return ++hotness_count_;
617   }
618 
ClearCounter()619   void ClearCounter() {
620     hotness_count_ = 0;
621   }
622 
SetCounter(int16_t hotness_count)623   void SetCounter(int16_t hotness_count) {
624     hotness_count_ = hotness_count;
625   }
626 
GetCounter()627   uint16_t GetCounter() const {
628     return hotness_count_;
629   }
630 
631   const uint8_t* GetQuickenedInfo(PointerSize pointer_size) REQUIRES_SHARED(Locks::mutator_lock_);
632 
633   // Returns the method header for the compiled code containing 'pc'. Note that runtime
634   // methods will return null for this method, as they are not oat based.
635   const OatQuickMethodHeader* GetOatQuickMethodHeader(uintptr_t pc)
636       REQUIRES_SHARED(Locks::mutator_lock_);
637 
638   // Get compiled code for the method, return null if no code exists.
639   const void* GetOatMethodQuickCode(PointerSize pointer_size)
640       REQUIRES_SHARED(Locks::mutator_lock_);
641 
642   // Returns whether the method has any compiled code, JIT or AOT.
643   bool HasAnyCompiledCode() REQUIRES_SHARED(Locks::mutator_lock_);
644 
645   // Returns a human-readable signature for 'm'. Something like "a.b.C.m" or
646   // "a.b.C.m(II)V" (depending on the value of 'with_signature').
647   static std::string PrettyMethod(ArtMethod* m, bool with_signature = true)
648       REQUIRES_SHARED(Locks::mutator_lock_);
649   std::string PrettyMethod(bool with_signature = true)
650       REQUIRES_SHARED(Locks::mutator_lock_);
651   // Returns the JNI native function name for the non-overloaded method 'm'.
652   std::string JniShortName()
653       REQUIRES_SHARED(Locks::mutator_lock_);
654   // Returns the JNI native function name for the overloaded method 'm'.
655   std::string JniLongName()
656       REQUIRES_SHARED(Locks::mutator_lock_);
657 
658   // Update heap objects and non-entrypoint pointers by the passed in visitor for image relocation.
659   // Does not use read barrier.
660   template <typename Visitor>
661   ALWAYS_INLINE void UpdateObjectsForImageRelocation(const Visitor& visitor,
662                                                      PointerSize pointer_size)
663       REQUIRES_SHARED(Locks::mutator_lock_);
664 
665   // Update entry points by passing them through the visitor.
666   template <ReadBarrierOption kReadBarrierOption = kWithReadBarrier, typename Visitor>
667   ALWAYS_INLINE void UpdateEntrypoints(const Visitor& visitor, PointerSize pointer_size);
668 
669  protected:
670   // Field order required by test "ValidateFieldOrderOfJavaCppUnionClasses".
671   // The class we are a part of.
672   GcRoot<mirror::Class> declaring_class_;
673 
674   // Access flags; low 16 bits are defined by spec.
675   // Getting and setting this flag needs to be atomic when concurrency is
676   // possible, e.g. after this method's class is linked. Such as when setting
677   // verifier flags and single-implementation flag.
678   std::atomic<std::uint32_t> access_flags_;
679 
680   /* Dex file fields. The defining dex file is available via declaring_class_->dex_cache_ */
681 
682   // Offset to the CodeItem.
683   uint32_t dex_code_item_offset_;
684 
685   // Index into method_ids of the dex file associated with this method.
686   uint32_t dex_method_index_;
687 
688   /* End of dex file fields. */
689 
690   // Entry within a dispatch table for this method. For static/direct methods the index is into
691   // the declaringClass.directMethods, for virtual methods the vtable and for interface methods the
692   // ifTable.
693   uint16_t method_index_;
694 
695   // The hotness we measure for this method. Managed by the interpreter. Not atomic, as we allow
696   // missing increments: if the method is hot, we will see it eventually.
697   uint16_t hotness_count_;
698 
699   // Fake padding field gets inserted here.
700 
701   // Must be the last fields in the method.
702   struct PtrSizedFields {
703     // Short cuts to declaring_class_->dex_cache_ member for fast compiled code access.
704     ArtMethod** dex_cache_resolved_methods_;
705 
706     // Pointer to JNI function registered to this method, or a function to resolve the JNI function,
707     // or the profiling data for non-native methods, or an ImtConflictTable, or the
708     // single-implementation of an abstract/interface method.
709     void* data_;
710 
711     // Method dispatch from quick compiled code invokes this pointer which may cause bridging into
712     // the interpreter.
713     void* entry_point_from_quick_compiled_code_;
714   } ptr_sized_fields_;
715 
716  private:
717   uint16_t FindObsoleteDexClassDefIndex() REQUIRES_SHARED(Locks::mutator_lock_);
718 
719   // If `lookup_in_resolved_boot_classes` is true, look up any of the
720   // method's annotations' classes in the bootstrap class loader's
721   // resolved types; otherwise, resolve them as a side effect.
722   bool IsAnnotatedWith(jclass klass, uint32_t visibility, bool lookup_in_resolved_boot_classes);
723 
PtrSizedFieldsOffset(PointerSize pointer_size)724   static constexpr size_t PtrSizedFieldsOffset(PointerSize pointer_size) {
725     // Round up to pointer size for padding field. Tested in art_method.cc.
726     return RoundUp(offsetof(ArtMethod, hotness_count_) + sizeof(hotness_count_),
727                    static_cast<size_t>(pointer_size));
728   }
729 
730   // Compare given pointer size to the image pointer size.
731   static bool IsImagePointerSize(PointerSize pointer_size);
732 
733   template<typename T>
GetNativePointer(MemberOffset offset,PointerSize pointer_size)734   ALWAYS_INLINE T GetNativePointer(MemberOffset offset, PointerSize pointer_size) const {
735     static_assert(std::is_pointer<T>::value, "T must be a pointer type");
736     const auto addr = reinterpret_cast<uintptr_t>(this) + offset.Uint32Value();
737     if (pointer_size == PointerSize::k32) {
738       return reinterpret_cast<T>(*reinterpret_cast<const uint32_t*>(addr));
739     } else {
740       auto v = *reinterpret_cast<const uint64_t*>(addr);
741       return reinterpret_cast<T>(dchecked_integral_cast<uintptr_t>(v));
742     }
743   }
744 
745   template<typename T>
SetNativePointer(MemberOffset offset,T new_value,PointerSize pointer_size)746   ALWAYS_INLINE void SetNativePointer(MemberOffset offset, T new_value, PointerSize pointer_size) {
747     static_assert(std::is_pointer<T>::value, "T must be a pointer type");
748     const auto addr = reinterpret_cast<uintptr_t>(this) + offset.Uint32Value();
749     if (pointer_size == PointerSize::k32) {
750       uintptr_t ptr = reinterpret_cast<uintptr_t>(new_value);
751       *reinterpret_cast<uint32_t*>(addr) = dchecked_integral_cast<uint32_t>(ptr);
752     } else {
753       *reinterpret_cast<uint64_t*>(addr) = reinterpret_cast<uintptr_t>(new_value);
754     }
755   }
756 
757   template <ReadBarrierOption kReadBarrierOption> void GetAccessFlagsDCheck();
758 
759   DISALLOW_COPY_AND_ASSIGN(ArtMethod);  // Need to use CopyFrom to deal with 32 vs 64 bits.
760 };
761 
762 class MethodCallback {
763  public:
~MethodCallback()764   virtual ~MethodCallback() {}
765 
766   virtual void RegisterNativeMethod(ArtMethod* method,
767                                     const void* original_implementation,
768                                     /*out*/void** new_implementation)
769       REQUIRES_SHARED(Locks::mutator_lock_) = 0;
770 };
771 
772 }  // namespace art
773 
774 #endif  // ART_RUNTIME_ART_METHOD_H_
775