1 /*
2  * Copyright (C) 2008 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "dalvik_system_VMStack.h"
18 
19 #include <type_traits>
20 
21 #include "nativehelper/jni_macros.h"
22 
23 #include "art_method-inl.h"
24 #include "gc/task_processor.h"
25 #include "jni/jni_internal.h"
26 #include "mirror/class-inl.h"
27 #include "mirror/class_loader.h"
28 #include "mirror/object-inl.h"
29 #include "native_util.h"
30 #include "nth_caller_visitor.h"
31 #include "scoped_fast_native_object_access-inl.h"
32 #include "scoped_thread_state_change-inl.h"
33 #include "thread_list.h"
34 
35 namespace art {
36 
37 template <typename T,
38           typename ResultT =
39               typename std::result_of<T(Thread*, const ScopedFastNativeObjectAccess&)>::type>
GetThreadStack(const ScopedFastNativeObjectAccess & soa,jobject peer,T fn)40 static ResultT GetThreadStack(const ScopedFastNativeObjectAccess& soa,
41                               jobject peer,
42                               T fn)
43     REQUIRES_SHARED(Locks::mutator_lock_) {
44   ResultT trace = nullptr;
45   ObjPtr<mirror::Object> decoded_peer = soa.Decode<mirror::Object>(peer);
46   if (decoded_peer == soa.Self()->GetPeer()) {
47     trace = fn(soa.Self(), soa);
48   } else {
49     // Never allow suspending the heap task thread since it may deadlock if allocations are
50     // required for the stack trace.
51     Thread* heap_task_thread =
52         Runtime::Current()->GetHeap()->GetTaskProcessor()->GetRunningThread();
53     // heap_task_thread could be null if the daemons aren't yet started.
54     if (heap_task_thread != nullptr && decoded_peer == heap_task_thread->GetPeerFromOtherThread()) {
55       return nullptr;
56     }
57     // Suspend thread to build stack trace.
58     ScopedThreadSuspension sts(soa.Self(), kNative);
59     ThreadList* thread_list = Runtime::Current()->GetThreadList();
60     bool timed_out;
61     Thread* thread = thread_list->SuspendThreadByPeer(peer,
62                                                       SuspendReason::kInternal,
63                                                       &timed_out);
64     if (thread != nullptr) {
65       // Must be runnable to create returned array.
66       {
67         ScopedObjectAccess soa2(soa.Self());
68         trace = fn(thread, soa);
69       }
70       // Restart suspended thread.
71       bool resumed = thread_list->Resume(thread, SuspendReason::kInternal);
72       DCHECK(resumed);
73     } else if (timed_out) {
74       LOG(ERROR) << "Trying to get thread's stack failed as the thread failed to suspend within a "
75           "generous timeout.";
76     }
77   }
78   return trace;
79 }
80 
VMStack_fillStackTraceElements(JNIEnv * env,jclass,jobject javaThread,jobjectArray javaSteArray)81 static jint VMStack_fillStackTraceElements(JNIEnv* env, jclass, jobject javaThread,
82                                            jobjectArray javaSteArray) {
83   ScopedFastNativeObjectAccess soa(env);
84   auto fn = [](Thread* thread, const ScopedFastNativeObjectAccess& soaa)
85       REQUIRES_SHARED(Locks::mutator_lock_) -> jobject {
86     return thread->CreateInternalStackTrace(soaa);
87   };
88   jobject trace = GetThreadStack(soa, javaThread, fn);
89   if (trace == nullptr) {
90     return 0;
91   }
92   int32_t depth;
93   Thread::InternalStackTraceToStackTraceElementArray(soa, trace, javaSteArray, &depth);
94   return depth;
95 }
96 
97 // Returns the defining class loader of the caller's caller.
VMStack_getCallingClassLoader(JNIEnv * env,jclass)98 static jobject VMStack_getCallingClassLoader(JNIEnv* env, jclass) {
99   ScopedFastNativeObjectAccess soa(env);
100   NthCallerVisitor visitor(soa.Self(), 2);
101   visitor.WalkStack();
102   if (UNLIKELY(visitor.caller == nullptr)) {
103     // The caller is an attached native thread.
104     return nullptr;
105   }
106   return soa.AddLocalReference<jobject>(visitor.caller->GetDeclaringClass()->GetClassLoader());
107 }
108 
VMStack_getClosestUserClassLoader(JNIEnv * env,jclass)109 static jobject VMStack_getClosestUserClassLoader(JNIEnv* env, jclass) {
110   struct ClosestUserClassLoaderVisitor : public StackVisitor {
111     explicit ClosestUserClassLoaderVisitor(Thread* thread)
112       : StackVisitor(thread, nullptr, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
113         class_loader(nullptr) {}
114 
115     bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
116       DCHECK(class_loader == nullptr);
117       ObjPtr<mirror::Class> c = GetMethod()->GetDeclaringClass();
118       // c is null for runtime methods.
119       if (c != nullptr) {
120         ObjPtr<mirror::Object> cl = c->GetClassLoader();
121         if (cl != nullptr) {
122           class_loader = cl;
123           return false;
124         }
125       }
126       return true;
127     }
128 
129     ObjPtr<mirror::Object> class_loader;
130   };
131   ScopedFastNativeObjectAccess soa(env);
132   ClosestUserClassLoaderVisitor visitor(soa.Self());
133   visitor.WalkStack();
134   return soa.AddLocalReference<jobject>(visitor.class_loader);
135 }
136 
137 // Returns the class of the caller's caller's caller.
VMStack_getStackClass2(JNIEnv * env,jclass)138 static jclass VMStack_getStackClass2(JNIEnv* env, jclass) {
139   ScopedFastNativeObjectAccess soa(env);
140   NthCallerVisitor visitor(soa.Self(), 3);
141   visitor.WalkStack();
142   if (UNLIKELY(visitor.caller == nullptr)) {
143     // The caller is an attached native thread.
144     return nullptr;
145   }
146   return soa.AddLocalReference<jclass>(visitor.caller->GetDeclaringClass());
147 }
148 
VMStack_getThreadStackTrace(JNIEnv * env,jclass,jobject javaThread)149 static jobjectArray VMStack_getThreadStackTrace(JNIEnv* env, jclass, jobject javaThread) {
150   ScopedFastNativeObjectAccess soa(env);
151   auto fn = [](Thread* thread, const ScopedFastNativeObjectAccess& soaa)
152      REQUIRES_SHARED(Locks::mutator_lock_) -> jobject {
153     return thread->CreateInternalStackTrace(soaa);
154   };
155   jobject trace = GetThreadStack(soa, javaThread, fn);
156   if (trace == nullptr) {
157     return nullptr;
158   }
159   return Thread::InternalStackTraceToStackTraceElementArray(soa, trace);
160 }
161 
VMStack_getAnnotatedThreadStackTrace(JNIEnv * env,jclass,jobject javaThread)162 static jobjectArray VMStack_getAnnotatedThreadStackTrace(JNIEnv* env, jclass, jobject javaThread) {
163   ScopedFastNativeObjectAccess soa(env);
164   auto fn = [](Thread* thread, const ScopedFastNativeObjectAccess& soaa)
165       REQUIRES_SHARED(Locks::mutator_lock_) -> jobjectArray {
166     return thread->CreateAnnotatedStackTrace(soaa);
167   };
168   return GetThreadStack(soa, javaThread, fn);
169 }
170 
171 static JNINativeMethod gMethods[] = {
172   FAST_NATIVE_METHOD(VMStack, fillStackTraceElements, "(Ljava/lang/Thread;[Ljava/lang/StackTraceElement;)I"),
173   FAST_NATIVE_METHOD(VMStack, getCallingClassLoader, "()Ljava/lang/ClassLoader;"),
174   FAST_NATIVE_METHOD(VMStack, getClosestUserClassLoader, "()Ljava/lang/ClassLoader;"),
175   FAST_NATIVE_METHOD(VMStack, getStackClass2, "()Ljava/lang/Class;"),
176   FAST_NATIVE_METHOD(VMStack, getThreadStackTrace, "(Ljava/lang/Thread;)[Ljava/lang/StackTraceElement;"),
177   FAST_NATIVE_METHOD(VMStack, getAnnotatedThreadStackTrace, "(Ljava/lang/Thread;)[Ldalvik/system/AnnotatedStackTraceElement;"),
178 };
179 
register_dalvik_system_VMStack(JNIEnv * env)180 void register_dalvik_system_VMStack(JNIEnv* env) {
181   REGISTER_NATIVE_METHODS("dalvik/system/VMStack");
182 }
183 
184 }  // namespace art
185