1 /* Copyright (C) 2016 The Android Open Source Project
2  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
3  *
4  * This file implements interfaces from the file jvmti.h. This implementation
5  * is licensed under the same terms as the file jvmti.h.  The
6  * copyright and license information for the file jvmti.h follows.
7  *
8  * Copyright (c) 2003, 2011, Oracle and/or its affiliates. All rights reserved.
9  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
10  *
11  * This code is free software; you can redistribute it and/or modify it
12  * under the terms of the GNU General Public License version 2 only, as
13  * published by the Free Software Foundation.  Oracle designates this
14  * particular file as subject to the "Classpath" exception as provided
15  * by Oracle in the LICENSE file that accompanied this code.
16  *
17  * This code is distributed in the hope that it will be useful, but WITHOUT
18  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
19  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
20  * version 2 for more details (a copy is included in the LICENSE file that
21  * accompanied this code).
22  *
23  * You should have received a copy of the GNU General Public License version
24  * 2 along with this work; if not, write to the Free Software Foundation,
25  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
26  *
27  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
28  * or visit www.oracle.com if you need additional information or have any
29  * questions.
30  */
31 
32 #include <android-base/thread_annotations.h>
33 
34 #include "alloc_manager.h"
35 #include "base/locks.h"
36 #include "base/mutex.h"
37 #include "events-inl.h"
38 
39 #include <array>
40 #include <functional>
41 #include <sys/time.h>
42 
43 #include "arch/context.h"
44 #include "art_field-inl.h"
45 #include "art_jvmti.h"
46 #include "art_method-inl.h"
47 #include "base/mutex.h"
48 #include "deopt_manager.h"
49 #include "dex/dex_file_types.h"
50 #include "events.h"
51 #include "gc/allocation_listener.h"
52 #include "gc/gc_pause_listener.h"
53 #include "gc/heap.h"
54 #include "gc/scoped_gc_critical_section.h"
55 #include "handle_scope-inl.h"
56 #include "indirect_reference_table.h"
57 #include "instrumentation.h"
58 #include "interpreter/shadow_frame.h"
59 #include "jni/jni_env_ext-inl.h"
60 #include "jni/jni_internal.h"
61 #include "jvalue-inl.h"
62 #include "jvalue.h"
63 #include "jvmti.h"
64 #include "mirror/class.h"
65 #include "mirror/object-inl.h"
66 #include "monitor-inl.h"
67 #include "nativehelper/scoped_local_ref.h"
68 #include "reflective_handle.h"
69 #include "reflective_handle_scope-inl.h"
70 #include "runtime.h"
71 #include "scoped_thread_state_change-inl.h"
72 #include "scoped_thread_state_change.h"
73 #include "stack.h"
74 #include "thread.h"
75 #include "thread-inl.h"
76 #include "thread_list.h"
77 #include "ti_phase.h"
78 #include "ti_thread.h"
79 #include "well_known_classes.h"
80 
81 namespace openjdkjvmti {
82 
CopyExtensionsFrom(const ArtJvmtiEventCallbacks * cb)83 void ArtJvmtiEventCallbacks::CopyExtensionsFrom(const ArtJvmtiEventCallbacks* cb) {
84   if (art::kIsDebugBuild) {
85     ArtJvmtiEventCallbacks clean;
86     DCHECK_EQ(memcmp(&clean, this, sizeof(clean)), 0)
87         << "CopyExtensionsFrom called with initialized eventsCallbacks!";
88   }
89   if (cb != nullptr) {
90     memcpy(this, cb, sizeof(*this));
91   } else {
92     memset(this, 0, sizeof(*this));
93   }
94 }
95 
Set(jint index,jvmtiExtensionEvent cb)96 jvmtiError ArtJvmtiEventCallbacks::Set(jint index, jvmtiExtensionEvent cb) {
97   switch (index) {
98     case static_cast<jint>(ArtJvmtiEvent::kObsoleteObjectCreated):
99       ObsoleteObjectCreated = reinterpret_cast<ArtJvmtiEventObsoleteObjectCreated>(cb);
100       return OK;
101     case static_cast<jint>(ArtJvmtiEvent::kDdmPublishChunk):
102       DdmPublishChunk = reinterpret_cast<ArtJvmtiEventDdmPublishChunk>(cb);
103       return OK;
104     case static_cast<jint>(ArtJvmtiEvent::kStructuralDexFileLoadHook):
105       StructuralDexFileLoadHook = reinterpret_cast<ArtJvmtiEventStructuralDexFileLoadHook>(cb);
106       return OK;
107     default:
108       return ERR(ILLEGAL_ARGUMENT);
109   }
110 }
111 
112 
IsExtensionEvent(jint e)113 bool IsExtensionEvent(jint e) {
114   return e >= static_cast<jint>(ArtJvmtiEvent::kMinEventTypeVal) &&
115       e <= static_cast<jint>(ArtJvmtiEvent::kMaxEventTypeVal) &&
116       IsExtensionEvent(static_cast<ArtJvmtiEvent>(e));
117 }
118 
IsExtensionEvent(ArtJvmtiEvent e)119 bool IsExtensionEvent(ArtJvmtiEvent e) {
120   switch (e) {
121     case ArtJvmtiEvent::kDdmPublishChunk:
122     case ArtJvmtiEvent::kObsoleteObjectCreated:
123     case ArtJvmtiEvent::kStructuralDexFileLoadHook:
124       return true;
125     default:
126       return false;
127   }
128 }
129 
IsEnabledAnywhere(ArtJvmtiEvent event)130 bool EventMasks::IsEnabledAnywhere(ArtJvmtiEvent event) {
131   return global_event_mask.Test(event) || unioned_thread_event_mask.Test(event);
132 }
133 
GetEventMask(art::Thread * thread)134 EventMask& EventMasks::GetEventMask(art::Thread* thread) {
135   if (thread == nullptr) {
136     return global_event_mask;
137   }
138 
139   for (auto& pair : thread_event_masks) {
140     const UniqueThread& unique_thread = pair.first;
141     if (unique_thread.first == thread &&
142         unique_thread.second == static_cast<uint32_t>(thread->GetTid())) {
143       return pair.second;
144     }
145   }
146 
147   // TODO: Remove old UniqueThread with the same pointer, if exists.
148 
149   thread_event_masks.emplace_back(UniqueThread(thread, thread->GetTid()), EventMask());
150   return thread_event_masks.back().second;
151 }
152 
GetEventMaskOrNull(art::Thread * thread)153 EventMask* EventMasks::GetEventMaskOrNull(art::Thread* thread) {
154   if (thread == nullptr) {
155     return &global_event_mask;
156   }
157 
158   for (auto& pair : thread_event_masks) {
159     const UniqueThread& unique_thread = pair.first;
160     if (unique_thread.first == thread &&
161         unique_thread.second == static_cast<uint32_t>(thread->GetTid())) {
162       return &pair.second;
163     }
164   }
165 
166   return nullptr;
167 }
168 
169 
EnableEvent(ArtJvmTiEnv * env,art::Thread * thread,ArtJvmtiEvent event)170 void EventMasks::EnableEvent(ArtJvmTiEnv* env, art::Thread* thread, ArtJvmtiEvent event) {
171   DCHECK_EQ(&env->event_masks, this);
172   env->event_info_mutex_.AssertExclusiveHeld(art::Thread::Current());
173   DCHECK(EventMask::EventIsInRange(event));
174   GetEventMask(thread).Set(event);
175   if (thread != nullptr) {
176     unioned_thread_event_mask.Set(event, true);
177   }
178 }
179 
DisableEvent(ArtJvmTiEnv * env,art::Thread * thread,ArtJvmtiEvent event)180 void EventMasks::DisableEvent(ArtJvmTiEnv* env, art::Thread* thread, ArtJvmtiEvent event) {
181   DCHECK_EQ(&env->event_masks, this);
182   env->event_info_mutex_.AssertExclusiveHeld(art::Thread::Current());
183   DCHECK(EventMask::EventIsInRange(event));
184   GetEventMask(thread).Set(event, false);
185   if (thread != nullptr) {
186     // Regenerate union for the event.
187     bool union_value = false;
188     for (auto& pair : thread_event_masks) {
189       union_value |= pair.second.Test(event);
190       if (union_value) {
191         break;
192       }
193     }
194     unioned_thread_event_mask.Set(event, union_value);
195   }
196 }
197 
HandleChangedCapabilities(const jvmtiCapabilities & caps,bool caps_added)198 void EventMasks::HandleChangedCapabilities(const jvmtiCapabilities& caps, bool caps_added) {
199   if (UNLIKELY(caps.can_retransform_classes == 1)) {
200     // If we are giving this env the retransform classes cap we need to switch all events of
201     // NonTransformable to Transformable and vice versa.
202     ArtJvmtiEvent to_remove = caps_added ? ArtJvmtiEvent::kClassFileLoadHookNonRetransformable
203                                          : ArtJvmtiEvent::kClassFileLoadHookRetransformable;
204     ArtJvmtiEvent to_add = caps_added ? ArtJvmtiEvent::kClassFileLoadHookRetransformable
205                                       : ArtJvmtiEvent::kClassFileLoadHookNonRetransformable;
206     if (global_event_mask.Test(to_remove)) {
207       CHECK(!global_event_mask.Test(to_add));
208       global_event_mask.Set(to_remove, false);
209       global_event_mask.Set(to_add, true);
210     }
211 
212     if (unioned_thread_event_mask.Test(to_remove)) {
213       CHECK(!unioned_thread_event_mask.Test(to_add));
214       unioned_thread_event_mask.Set(to_remove, false);
215       unioned_thread_event_mask.Set(to_add, true);
216     }
217     for (auto thread_mask : thread_event_masks) {
218       if (thread_mask.second.Test(to_remove)) {
219         CHECK(!thread_mask.second.Test(to_add));
220         thread_mask.second.Set(to_remove, false);
221         thread_mask.second.Set(to_add, true);
222       }
223     }
224   }
225 }
226 
RegisterArtJvmTiEnv(ArtJvmTiEnv * env)227 void EventHandler::RegisterArtJvmTiEnv(ArtJvmTiEnv* env) {
228   art::WriterMutexLock mu(art::Thread::Current(), envs_lock_);
229   envs.push_back(env);
230 }
231 
RemoveArtJvmTiEnv(ArtJvmTiEnv * env)232 void EventHandler::RemoveArtJvmTiEnv(ArtJvmTiEnv* env) {
233   art::WriterMutexLock mu(art::Thread::Current(), envs_lock_);
234   // Since we might be currently iterating over the envs list we cannot actually erase elements.
235   // Instead we will simply replace them with 'nullptr' and skip them manually.
236   auto it = std::find(envs.begin(), envs.end(), env);
237   if (it != envs.end()) {
238     envs.erase(it);
239     for (size_t i = static_cast<size_t>(ArtJvmtiEvent::kMinEventTypeVal);
240          i <= static_cast<size_t>(ArtJvmtiEvent::kMaxEventTypeVal);
241          ++i) {
242       RecalculateGlobalEventMaskLocked(static_cast<ArtJvmtiEvent>(i));
243     }
244   }
245 }
246 
IsThreadControllable(ArtJvmtiEvent event)247 static bool IsThreadControllable(ArtJvmtiEvent event) {
248   switch (event) {
249     case ArtJvmtiEvent::kVmInit:
250     case ArtJvmtiEvent::kVmStart:
251     case ArtJvmtiEvent::kVmDeath:
252     case ArtJvmtiEvent::kThreadStart:
253     case ArtJvmtiEvent::kCompiledMethodLoad:
254     case ArtJvmtiEvent::kCompiledMethodUnload:
255     case ArtJvmtiEvent::kDynamicCodeGenerated:
256     case ArtJvmtiEvent::kDataDumpRequest:
257     case ArtJvmtiEvent::kObsoleteObjectCreated:
258       return false;
259 
260     default:
261       return true;
262   }
263 }
264 
265 template<typename Type>
AddLocalRef(art::JNIEnvExt * e,art::ObjPtr<art::mirror::Object> obj)266 static Type AddLocalRef(art::JNIEnvExt* e, art::ObjPtr<art::mirror::Object> obj)
267     REQUIRES_SHARED(art::Locks::mutator_lock_) {
268   return (obj == nullptr) ? nullptr : e->AddLocalReference<Type>(obj);
269 }
270 
271 template<ArtJvmtiEvent kEvent, typename ...Args>
RunEventCallback(EventHandler * handler,art::Thread * self,art::JNIEnvExt * jnienv,Args...args)272 static void RunEventCallback(EventHandler* handler,
273                              art::Thread* self,
274                              art::JNIEnvExt* jnienv,
275                              Args... args)
276     REQUIRES_SHARED(art::Locks::mutator_lock_) {
277   ScopedLocalRef<jthread> thread_jni(jnienv, AddLocalRef<jthread>(jnienv, self->GetPeer()));
278   handler->DispatchEvent<kEvent>(self,
279                                  static_cast<JNIEnv*>(jnienv),
280                                  thread_jni.get(),
281                                  args...);
282 }
283 
SetupDdmTracking(art::DdmCallback * listener,bool enable)284 static void SetupDdmTracking(art::DdmCallback* listener, bool enable) {
285   art::ScopedObjectAccess soa(art::Thread::Current());
286   if (enable) {
287     art::Runtime::Current()->GetRuntimeCallbacks()->AddDdmCallback(listener);
288   } else {
289     art::Runtime::Current()->GetRuntimeCallbacks()->RemoveDdmCallback(listener);
290   }
291 }
292 
293 class JvmtiDdmChunkListener : public art::DdmCallback {
294  public:
JvmtiDdmChunkListener(EventHandler * handler)295   explicit JvmtiDdmChunkListener(EventHandler* handler) : handler_(handler) {}
296 
DdmPublishChunk(uint32_t type,const art::ArrayRef<const uint8_t> & data)297   void DdmPublishChunk(uint32_t type, const art::ArrayRef<const uint8_t>& data)
298       override REQUIRES_SHARED(art::Locks::mutator_lock_) {
299     if (handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kDdmPublishChunk)) {
300       art::Thread* self = art::Thread::Current();
301       handler_->DispatchEvent<ArtJvmtiEvent::kDdmPublishChunk>(
302           self,
303           static_cast<JNIEnv*>(self->GetJniEnv()),
304           static_cast<jint>(type),
305           static_cast<jint>(data.size()),
306           reinterpret_cast<const jbyte*>(data.data()));
307     }
308   }
309 
310  private:
311   EventHandler* handler_;
312 
313   DISALLOW_COPY_AND_ASSIGN(JvmtiDdmChunkListener);
314 };
315 
316 class JvmtiEventAllocationListener : public AllocationManager::AllocationCallback {
317  public:
JvmtiEventAllocationListener(EventHandler * handler)318   explicit JvmtiEventAllocationListener(EventHandler* handler) : handler_(handler) {}
319 
ObjectAllocated(art::Thread * self,art::ObjPtr<art::mirror::Object> * obj,size_t byte_count)320   void ObjectAllocated(art::Thread* self, art::ObjPtr<art::mirror::Object>* obj, size_t byte_count)
321       override REQUIRES_SHARED(art::Locks::mutator_lock_) {
322     DCHECK_EQ(self, art::Thread::Current());
323 
324     if (handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kVmObjectAlloc)) {
325       art::StackHandleScope<1> hs(self);
326       auto h = hs.NewHandleWrapper(obj);
327       // jvmtiEventVMObjectAlloc parameters:
328       //      jvmtiEnv *jvmti_env,
329       //      JNIEnv* jni_env,
330       //      jthread thread,
331       //      jobject object,
332       //      jclass object_klass,
333       //      jlong size
334       art::JNIEnvExt* jni_env = self->GetJniEnv();
335       ScopedLocalRef<jobject> object(
336           jni_env, jni_env->AddLocalReference<jobject>(*obj));
337       ScopedLocalRef<jclass> klass(
338           jni_env, jni_env->AddLocalReference<jclass>(obj->Ptr()->GetClass()));
339 
340       RunEventCallback<ArtJvmtiEvent::kVmObjectAlloc>(handler_,
341                                                       self,
342                                                       jni_env,
343                                                       object.get(),
344                                                       klass.get(),
345                                                       static_cast<jlong>(byte_count));
346     }
347   }
348 
349  private:
350   EventHandler* handler_;
351 };
352 
SetupObjectAllocationTracking(bool enable)353 static void SetupObjectAllocationTracking(bool enable) {
354   // We must not hold the mutator lock here, but if we're in FastJNI, for example, we might. For
355   // now, do a workaround: (possibly) acquire and release.
356   art::ScopedObjectAccess soa(art::Thread::Current());
357   if (enable) {
358     AllocationManager::Get()->EnableAllocationCallback(soa.Self());
359   } else {
360     AllocationManager::Get()->DisableAllocationCallback(soa.Self());
361   }
362 }
363 
364 class JvmtiMonitorListener : public art::MonitorCallback {
365  public:
JvmtiMonitorListener(EventHandler * handler)366   explicit JvmtiMonitorListener(EventHandler* handler) : handler_(handler) {}
367 
MonitorContendedLocking(art::Monitor * m)368   void MonitorContendedLocking(art::Monitor* m)
369       override REQUIRES_SHARED(art::Locks::mutator_lock_) {
370     if (handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMonitorContendedEnter)) {
371       art::Thread* self = art::Thread::Current();
372       art::JNIEnvExt* jnienv = self->GetJniEnv();
373       ScopedLocalRef<jobject> mon(jnienv, AddLocalRef<jobject>(jnienv, m->GetObject()));
374       RunEventCallback<ArtJvmtiEvent::kMonitorContendedEnter>(
375           handler_,
376           self,
377           jnienv,
378           mon.get());
379     }
380   }
381 
MonitorContendedLocked(art::Monitor * m)382   void MonitorContendedLocked(art::Monitor* m)
383       override REQUIRES_SHARED(art::Locks::mutator_lock_) {
384     if (handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMonitorContendedEntered)) {
385       art::Thread* self = art::Thread::Current();
386       art::JNIEnvExt* jnienv = self->GetJniEnv();
387       ScopedLocalRef<jobject> mon(jnienv, AddLocalRef<jobject>(jnienv, m->GetObject()));
388       RunEventCallback<ArtJvmtiEvent::kMonitorContendedEntered>(
389           handler_,
390           self,
391           jnienv,
392           mon.get());
393     }
394   }
395 
ObjectWaitStart(art::Handle<art::mirror::Object> obj,int64_t timeout)396   void ObjectWaitStart(art::Handle<art::mirror::Object> obj, int64_t timeout)
397       override REQUIRES_SHARED(art::Locks::mutator_lock_) {
398     if (handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMonitorWait)) {
399       art::Thread* self = art::Thread::Current();
400       art::JNIEnvExt* jnienv = self->GetJniEnv();
401       ScopedLocalRef<jobject> mon(jnienv, AddLocalRef<jobject>(jnienv, obj.Get()));
402       RunEventCallback<ArtJvmtiEvent::kMonitorWait>(
403           handler_,
404           self,
405           jnienv,
406           mon.get(),
407           static_cast<jlong>(timeout));
408     }
409   }
410 
411 
412   // Our interpretation of the spec is that the JVMTI_EVENT_MONITOR_WAITED will be sent immediately
413   // after a thread has woken up from a sleep caused by a call to Object#wait. If the thread will
414   // never go to sleep (due to not having the lock, having bad arguments, or having an exception
415   // propogated from JVMTI_EVENT_MONITOR_WAIT) we will not send this event.
416   //
417   // This does not fully match the RI semantics. Specifically, we will not send the
418   // JVMTI_EVENT_MONITOR_WAITED event in one situation where the RI would, there was an exception in
419   // the JVMTI_EVENT_MONITOR_WAIT event but otherwise the call was fine. In that case the RI would
420   // send this event and return without going to sleep.
421   //
422   // See b/65558434 for more discussion.
MonitorWaitFinished(art::Monitor * m,bool timeout)423   void MonitorWaitFinished(art::Monitor* m, bool timeout)
424       override REQUIRES_SHARED(art::Locks::mutator_lock_) {
425     if (handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMonitorWaited)) {
426       art::Thread* self = art::Thread::Current();
427       art::JNIEnvExt* jnienv = self->GetJniEnv();
428       ScopedLocalRef<jobject> mon(jnienv, AddLocalRef<jobject>(jnienv, m->GetObject()));
429       RunEventCallback<ArtJvmtiEvent::kMonitorWaited>(
430           handler_,
431           self,
432           jnienv,
433           mon.get(),
434           static_cast<jboolean>(timeout));
435     }
436   }
437 
438  private:
439   EventHandler* handler_;
440 };
441 
442 class JvmtiParkListener : public art::ParkCallback {
443  public:
JvmtiParkListener(EventHandler * handler)444   explicit JvmtiParkListener(EventHandler* handler) : handler_(handler) {}
445 
ThreadParkStart(bool is_absolute,int64_t timeout)446   void ThreadParkStart(bool is_absolute, int64_t timeout)
447       override REQUIRES_SHARED(art::Locks::mutator_lock_) {
448     if (handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMonitorWait)) {
449       art::Thread* self = art::Thread::Current();
450       art::JNIEnvExt* jnienv = self->GetJniEnv();
451       art::ArtField* parkBlockerField = art::jni::DecodeArtField(
452           art::WellKnownClasses::java_lang_Thread_parkBlocker);
453       art::ObjPtr<art::mirror::Object> blocker_obj = parkBlockerField->GetObj(self->GetPeer());
454       if (blocker_obj.IsNull()) {
455         blocker_obj = self->GetPeer();
456       }
457       int64_t timeout_ms;
458       if (!is_absolute) {
459         if (timeout == 0) {
460           timeout_ms = 0;
461         } else {
462           timeout_ms = timeout / 1000000;
463           if (timeout_ms == 0) {
464             // If we were instructed to park for a nonzero number of nanoseconds, but not enough
465             // to be a full millisecond, round up to 1 ms. A nonzero park() call will return
466             // soon, but a 0 wait or park call will wait indefinitely.
467             timeout_ms = 1;
468           }
469         }
470       } else {
471         struct timeval tv;
472         gettimeofday(&tv, (struct timezone *) nullptr);
473         int64_t now = tv.tv_sec * 1000LL + tv.tv_usec / 1000;
474         if (now < timeout) {
475           timeout_ms = timeout - now;
476         } else {
477           // Waiting for 0 ms is an indefinite wait; parking until a time in
478           // the past or the current time will return immediately, so emulate
479           // the shortest possible wait event.
480           timeout_ms = 1;
481         }
482       }
483       ScopedLocalRef<jobject> blocker(jnienv, AddLocalRef<jobject>(jnienv, blocker_obj.Ptr()));
484       RunEventCallback<ArtJvmtiEvent::kMonitorWait>(
485           handler_,
486           self,
487           jnienv,
488           blocker.get(),
489           static_cast<jlong>(timeout_ms));
490     }
491   }
492 
493 
494   // Our interpretation of the spec is that the JVMTI_EVENT_MONITOR_WAITED will be sent immediately
495   // after a thread has woken up from a sleep caused by a call to Object#wait. If the thread will
496   // never go to sleep (due to not having the lock, having bad arguments, or having an exception
497   // propogated from JVMTI_EVENT_MONITOR_WAIT) we will not send this event.
498   //
499   // This does not fully match the RI semantics. Specifically, we will not send the
500   // JVMTI_EVENT_MONITOR_WAITED event in one situation where the RI would, there was an exception in
501   // the JVMTI_EVENT_MONITOR_WAIT event but otherwise the call was fine. In that case the RI would
502   // send this event and return without going to sleep.
503   //
504   // See b/65558434 for more discussion.
ThreadParkFinished(bool timeout)505   void ThreadParkFinished(bool timeout) override REQUIRES_SHARED(art::Locks::mutator_lock_) {
506     if (handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMonitorWaited)) {
507       art::Thread* self = art::Thread::Current();
508       art::JNIEnvExt* jnienv = self->GetJniEnv();
509       art::ArtField* parkBlockerField = art::jni::DecodeArtField(
510           art::WellKnownClasses::java_lang_Thread_parkBlocker);
511       art::ObjPtr<art::mirror::Object> blocker_obj = parkBlockerField->GetObj(self->GetPeer());
512       if (blocker_obj.IsNull()) {
513         blocker_obj = self->GetPeer();
514       }
515       ScopedLocalRef<jobject> blocker(jnienv, AddLocalRef<jobject>(jnienv, blocker_obj.Ptr()));
516       RunEventCallback<ArtJvmtiEvent::kMonitorWaited>(
517           handler_,
518           self,
519           jnienv,
520           blocker.get(),
521           static_cast<jboolean>(timeout));
522     }
523   }
524 
525  private:
526   EventHandler* handler_;
527 };
528 
SetupMonitorListener(art::MonitorCallback * monitor_listener,art::ParkCallback * park_listener,bool enable)529 static void SetupMonitorListener(art::MonitorCallback* monitor_listener, art::ParkCallback* park_listener, bool enable) {
530   // We must not hold the mutator lock here, but if we're in FastJNI, for example, we might. For
531   // now, do a workaround: (possibly) acquire and release.
532   art::ScopedObjectAccess soa(art::Thread::Current());
533   if (enable) {
534     art::Runtime::Current()->GetRuntimeCallbacks()->AddMonitorCallback(monitor_listener);
535     art::Runtime::Current()->GetRuntimeCallbacks()->AddParkCallback(park_listener);
536   } else {
537     art::Runtime::Current()->GetRuntimeCallbacks()->RemoveMonitorCallback(monitor_listener);
538     art::Runtime::Current()->GetRuntimeCallbacks()->RemoveParkCallback(park_listener);
539   }
540 }
541 
542 // Report GC pauses (see spec) as GARBAGE_COLLECTION_START and GARBAGE_COLLECTION_END.
543 class JvmtiGcPauseListener : public art::gc::GcPauseListener {
544  public:
JvmtiGcPauseListener(EventHandler * handler)545   explicit JvmtiGcPauseListener(EventHandler* handler)
546       : handler_(handler),
547         start_enabled_(false),
548         finish_enabled_(false) {}
549 
StartPause()550   void StartPause() override {
551     handler_->DispatchEvent<ArtJvmtiEvent::kGarbageCollectionStart>(art::Thread::Current());
552   }
553 
EndPause()554   void EndPause() override {
555     handler_->DispatchEvent<ArtJvmtiEvent::kGarbageCollectionFinish>(art::Thread::Current());
556   }
557 
IsEnabled()558   bool IsEnabled() {
559     return start_enabled_ || finish_enabled_;
560   }
561 
SetStartEnabled(bool e)562   void SetStartEnabled(bool e) {
563     start_enabled_ = e;
564   }
565 
SetFinishEnabled(bool e)566   void SetFinishEnabled(bool e) {
567     finish_enabled_ = e;
568   }
569 
570  private:
571   EventHandler* handler_;
572   bool start_enabled_;
573   bool finish_enabled_;
574 };
575 
SetupGcPauseTracking(JvmtiGcPauseListener * listener,ArtJvmtiEvent event,bool enable)576 static void SetupGcPauseTracking(JvmtiGcPauseListener* listener, ArtJvmtiEvent event, bool enable) {
577   bool old_state = listener->IsEnabled();
578 
579   if (event == ArtJvmtiEvent::kGarbageCollectionStart) {
580     listener->SetStartEnabled(enable);
581   } else {
582     listener->SetFinishEnabled(enable);
583   }
584 
585   bool new_state = listener->IsEnabled();
586 
587   if (old_state != new_state) {
588     if (new_state) {
589       art::Runtime::Current()->GetHeap()->SetGcPauseListener(listener);
590     } else {
591       art::Runtime::Current()->GetHeap()->RemoveGcPauseListener();
592     }
593   }
594 }
595 
596 class JvmtiMethodTraceListener final : public art::instrumentation::InstrumentationListener {
597  public:
JvmtiMethodTraceListener(EventHandler * handler)598   explicit JvmtiMethodTraceListener(EventHandler* handler)
599       : event_handler_(handler),
600         non_standard_exits_lock_("JVMTI NonStandard Exits list lock",
601                                  art::LockLevel::kGenericBottomLock) {}
602 
AddDelayedNonStandardExitEvent(const art::ShadowFrame * frame,bool is_object,jvalue val)603   void AddDelayedNonStandardExitEvent(const art::ShadowFrame* frame, bool is_object, jvalue val)
604       REQUIRES_SHARED(art::Locks::mutator_lock_)
605           REQUIRES(art::Locks::user_code_suspension_lock_, art::Locks::thread_list_lock_) {
606     art::Thread* self = art::Thread::Current();
607     jobject to_cleanup = nullptr;
608     jobject new_val = is_object ? self->GetJniEnv()->NewGlobalRef(val.l) : nullptr;
609     {
610       art::MutexLock mu(self, non_standard_exits_lock_);
611       NonStandardExitEventInfo saved{ nullptr, { .j = 0 } };
612       if (is_object) {
613         saved.return_val_obj_ = new_val;
614         saved.return_val_.l = saved.return_val_obj_;
615       } else {
616         saved.return_val_.j = val.j;
617       }
618       // only objects need cleanup.
619       if (UNLIKELY(is_object && non_standard_exits_.find(frame) != non_standard_exits_.end())) {
620         to_cleanup = non_standard_exits_.find(frame)->second.return_val_obj_;
621       }
622       non_standard_exits_.insert_or_assign(frame, saved);
623     }
624     self->GetJniEnv()->DeleteGlobalRef(to_cleanup);
625   }
626 
627   // Call-back for when a method is entered.
MethodEntered(art::Thread * self,art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,art::ArtMethod * method,uint32_t dex_pc ATTRIBUTE_UNUSED)628   void MethodEntered(art::Thread* self,
629                      art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,
630                      art::ArtMethod* method,
631                      uint32_t dex_pc ATTRIBUTE_UNUSED)
632       REQUIRES_SHARED(art::Locks::mutator_lock_) override {
633     if (!method->IsRuntimeMethod() &&
634         event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMethodEntry)) {
635       art::JNIEnvExt* jnienv = self->GetJniEnv();
636       RunEventCallback<ArtJvmtiEvent::kMethodEntry>(event_handler_,
637                                                     self,
638                                                     jnienv,
639                                                     art::jni::EncodeArtMethod(method));
640     }
641   }
642 
643   // TODO Maybe try to combine this with below using templates?
644   // Callback for when a method is exited with a reference return value.
MethodExited(art::Thread * self,art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,art::ArtMethod * method,uint32_t dex_pc ATTRIBUTE_UNUSED,art::instrumentation::OptionalFrame frame,art::MutableHandle<art::mirror::Object> & return_value)645   void MethodExited(art::Thread* self,
646                     art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,
647                     art::ArtMethod* method,
648                     uint32_t dex_pc ATTRIBUTE_UNUSED,
649                     art::instrumentation::OptionalFrame frame,
650                     art::MutableHandle<art::mirror::Object>& return_value)
651       REQUIRES_SHARED(art::Locks::mutator_lock_) override {
652     if (method->IsRuntimeMethod()) {
653       return;
654     }
655     if (frame.has_value() && UNLIKELY(event_handler_->IsEventEnabledAnywhere(
656                                  ArtJvmtiEvent::kForceEarlyReturnUpdateReturnValue))) {
657       DCHECK(!frame->get().GetSkipMethodExitEvents());
658       bool has_return = false;
659       jobject ret_val = nullptr;
660       {
661         art::MutexLock mu(self, non_standard_exits_lock_);
662         const art::ShadowFrame* sframe = &frame.value().get();
663         const auto it = non_standard_exits_.find(sframe);
664         if (it != non_standard_exits_.end()) {
665           ret_val = it->second.return_val_obj_;
666           non_standard_exits_.erase(it);
667           has_return = true;
668         }
669       }
670       if (has_return) {
671         return_value.Assign(self->DecodeJObject(ret_val));
672         ScopedLocalRef<jthread> thr(self->GetJniEnv(),
673                                     self->GetJniEnv()->NewLocalRef(self->GetPeer()));
674         art::ScopedThreadSuspension sts(self, art::ThreadState::kNative);
675         self->GetJniEnv()->DeleteGlobalRef(ret_val);
676         event_handler_->SetInternalEvent(
677             thr.get(), ArtJvmtiEvent::kForceEarlyReturnUpdateReturnValue, JVMTI_DISABLE);
678       }
679     }
680     if (event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMethodExit)) {
681       DCHECK_EQ(
682           method->GetInterfaceMethodIfProxy(art::kRuntimePointerSize)->GetReturnTypePrimitive(),
683           art::Primitive::kPrimNot) << method->PrettyMethod();
684       DCHECK(!self->IsExceptionPending());
685       jvalue val;
686       art::JNIEnvExt* jnienv = self->GetJniEnv();
687       ScopedLocalRef<jobject> return_jobj(jnienv, AddLocalRef<jobject>(jnienv, return_value.Get()));
688       val.l = return_jobj.get();
689       RunEventCallback<ArtJvmtiEvent::kMethodExit>(
690           event_handler_,
691           self,
692           jnienv,
693           art::jni::EncodeArtMethod(method),
694           /*was_popped_by_exception=*/ static_cast<jboolean>(JNI_FALSE),
695           val);
696     }
697   }
698 
699   // Call-back for when a method is exited.
MethodExited(art::Thread * self,art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,art::ArtMethod * method,uint32_t dex_pc ATTRIBUTE_UNUSED,art::instrumentation::OptionalFrame frame,art::JValue & return_value)700   void MethodExited(art::Thread* self,
701                     art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,
702                     art::ArtMethod* method,
703                     uint32_t dex_pc ATTRIBUTE_UNUSED,
704                     art::instrumentation::OptionalFrame frame,
705                     art::JValue& return_value) REQUIRES_SHARED(art::Locks::mutator_lock_) override {
706     if (frame.has_value() &&
707         UNLIKELY(event_handler_->IsEventEnabledAnywhere(
708             ArtJvmtiEvent::kForceEarlyReturnUpdateReturnValue))) {
709       DCHECK(!frame->get().GetSkipMethodExitEvents());
710       bool has_return = false;
711       {
712         art::MutexLock mu(self, non_standard_exits_lock_);
713         const art::ShadowFrame* sframe = &frame.value().get();
714         const auto it = non_standard_exits_.find(sframe);
715         if (it != non_standard_exits_.end()) {
716           return_value.SetJ(it->second.return_val_.j);
717           non_standard_exits_.erase(it);
718           has_return = true;
719         }
720       }
721       if (has_return) {
722         ScopedLocalRef<jthread> thr(self->GetJniEnv(),
723                                     self->GetJniEnv()->NewLocalRef(self->GetPeer()));
724         art::ScopedThreadSuspension sts(self, art::ThreadState::kNative);
725         event_handler_->SetInternalEvent(
726             thr.get(), ArtJvmtiEvent::kForceEarlyReturnUpdateReturnValue, JVMTI_DISABLE);
727       }
728     }
729     if (event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMethodExit)) {
730       DCHECK_NE(
731           method->GetInterfaceMethodIfProxy(art::kRuntimePointerSize)->GetReturnTypePrimitive(),
732           art::Primitive::kPrimNot) << method->PrettyMethod();
733       DCHECK(!self->IsExceptionPending()) << self->GetException()->Dump();
734       jvalue val;
735       art::JNIEnvExt* jnienv = self->GetJniEnv();
736       // 64bit integer is the largest value in the union so we should be fine simply copying it into
737       // the union.
738       val.j = return_value.GetJ();
739       RunEventCallback<ArtJvmtiEvent::kMethodExit>(
740           event_handler_,
741           self,
742           jnienv,
743           art::jni::EncodeArtMethod(method),
744           /*was_popped_by_exception=*/ static_cast<jboolean>(JNI_FALSE),
745           val);
746     }
747   }
748 
749   // Call-back for when a method is popped due to an exception throw. A method will either cause a
750   // MethodExited call-back or a MethodUnwind call-back when its activation is removed.
MethodUnwind(art::Thread * self,art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,art::ArtMethod * method,uint32_t dex_pc ATTRIBUTE_UNUSED)751   void MethodUnwind(art::Thread* self,
752                     art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,
753                     art::ArtMethod* method,
754                     uint32_t dex_pc ATTRIBUTE_UNUSED)
755       REQUIRES_SHARED(art::Locks::mutator_lock_) override {
756     if (!method->IsRuntimeMethod() &&
757         event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMethodExit)) {
758       jvalue val;
759       // Just set this to 0xffffffffffffffff so it's not uninitialized.
760       val.j = static_cast<jlong>(-1);
761       art::JNIEnvExt* jnienv = self->GetJniEnv();
762       art::StackHandleScope<1> hs(self);
763       art::Handle<art::mirror::Throwable> old_exception(hs.NewHandle(self->GetException()));
764       CHECK(!old_exception.IsNull());
765       self->ClearException();
766       RunEventCallback<ArtJvmtiEvent::kMethodExit>(
767           event_handler_,
768           self,
769           jnienv,
770           art::jni::EncodeArtMethod(method),
771           /*was_popped_by_exception=*/ static_cast<jboolean>(JNI_TRUE),
772           val);
773       // Match RI behavior of just throwing away original exception if a new one is thrown.
774       if (LIKELY(!self->IsExceptionPending())) {
775         self->SetException(old_exception.Get());
776       }
777     }
778   }
779 
780   // Call-back for when the dex pc moves in a method.
DexPcMoved(art::Thread * self,art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,art::ArtMethod * method,uint32_t new_dex_pc)781   void DexPcMoved(art::Thread* self,
782                   art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,
783                   art::ArtMethod* method,
784                   uint32_t new_dex_pc)
785       REQUIRES_SHARED(art::Locks::mutator_lock_) override {
786     DCHECK(!method->IsRuntimeMethod());
787     // Default methods might be copied to multiple classes. We need to get the canonical version of
788     // this method so that we can check for breakpoints correctly.
789     // TODO We should maybe do this on other events to ensure that we are consistent WRT default
790     // methods. This could interact with obsolete methods if we ever let interface redefinition
791     // happen though.
792     method = method->GetCanonicalMethod();
793     art::JNIEnvExt* jnienv = self->GetJniEnv();
794     jmethodID jmethod = art::jni::EncodeArtMethod(method);
795     jlocation location = static_cast<jlocation>(new_dex_pc);
796     // Step event is reported first according to the spec.
797     if (event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kSingleStep)) {
798       RunEventCallback<ArtJvmtiEvent::kSingleStep>(event_handler_, self, jnienv, jmethod, location);
799     }
800     // Next we do the Breakpoint events. The Dispatch code will filter the individual
801     if (event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kBreakpoint)) {
802       RunEventCallback<ArtJvmtiEvent::kBreakpoint>(event_handler_, self, jnienv, jmethod, location);
803     }
804   }
805 
806   // Call-back for when we read from a field.
FieldRead(art::Thread * self,art::Handle<art::mirror::Object> this_object,art::ArtMethod * method_p,uint32_t dex_pc,art::ArtField * field_p)807   void FieldRead(art::Thread* self,
808                  art::Handle<art::mirror::Object> this_object,
809                  art::ArtMethod* method_p,
810                  uint32_t dex_pc,
811                  art::ArtField* field_p)
812       REQUIRES_SHARED(art::Locks::mutator_lock_) override {
813     if (event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kFieldAccess)) {
814       art::StackReflectiveHandleScope<1, 1> rhs(self);
815       art::ReflectiveHandle<art::ArtField> field(rhs.NewHandle(field_p));
816       art::ReflectiveHandle<art::ArtMethod> method(rhs.NewHandle(method_p));
817       art::JNIEnvExt* jnienv = self->GetJniEnv();
818       // DCHECK(!self->IsExceptionPending());
819       ScopedLocalRef<jobject> this_ref(jnienv, AddLocalRef<jobject>(jnienv, this_object.Get()));
820       ScopedLocalRef<jobject> fklass(jnienv,
821                                      AddLocalRef<jobject>(jnienv,
822                                                           field->GetDeclaringClass().Ptr()));
823       RunEventCallback<ArtJvmtiEvent::kFieldAccess>(event_handler_,
824                                                     self,
825                                                     jnienv,
826                                                     art::jni::EncodeArtMethod(method),
827                                                     static_cast<jlocation>(dex_pc),
828                                                     static_cast<jclass>(fklass.get()),
829                                                     this_ref.get(),
830                                                     art::jni::EncodeArtField(field));
831     }
832   }
833 
FieldWritten(art::Thread * self,art::Handle<art::mirror::Object> this_object,art::ArtMethod * method_p,uint32_t dex_pc,art::ArtField * field_p,art::Handle<art::mirror::Object> new_val)834   void FieldWritten(art::Thread* self,
835                     art::Handle<art::mirror::Object> this_object,
836                     art::ArtMethod* method_p,
837                     uint32_t dex_pc,
838                     art::ArtField* field_p,
839                     art::Handle<art::mirror::Object> new_val)
840       REQUIRES_SHARED(art::Locks::mutator_lock_) override {
841     if (event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kFieldModification)) {
842       art::JNIEnvExt* jnienv = self->GetJniEnv();
843       art::StackReflectiveHandleScope<1, 1> rhs(self);
844       art::ReflectiveHandle<art::ArtField> field(rhs.NewHandle(field_p));
845       art::ReflectiveHandle<art::ArtMethod> method(rhs.NewHandle(method_p));
846       // DCHECK(!self->IsExceptionPending());
847       ScopedLocalRef<jobject> this_ref(jnienv, AddLocalRef<jobject>(jnienv, this_object.Get()));
848       ScopedLocalRef<jobject> fklass(jnienv,
849                                      AddLocalRef<jobject>(jnienv,
850                                                           field->GetDeclaringClass().Ptr()));
851       ScopedLocalRef<jobject> fval(jnienv, AddLocalRef<jobject>(jnienv, new_val.Get()));
852       jvalue val;
853       val.l = fval.get();
854       RunEventCallback<ArtJvmtiEvent::kFieldModification>(
855           event_handler_,
856           self,
857           jnienv,
858           art::jni::EncodeArtMethod(method),
859           static_cast<jlocation>(dex_pc),
860           static_cast<jclass>(fklass.get()),
861           field->IsStatic() ? nullptr :  this_ref.get(),
862           art::jni::EncodeArtField(field),
863           'L',  // type_char
864           val);
865     }
866   }
867 
868   // Call-back for when we write into a field.
FieldWritten(art::Thread * self,art::Handle<art::mirror::Object> this_object,art::ArtMethod * method_p,uint32_t dex_pc,art::ArtField * field_p,const art::JValue & field_value)869   void FieldWritten(art::Thread* self,
870                     art::Handle<art::mirror::Object> this_object,
871                     art::ArtMethod* method_p,
872                     uint32_t dex_pc,
873                     art::ArtField* field_p,
874                     const art::JValue& field_value)
875       REQUIRES_SHARED(art::Locks::mutator_lock_) override {
876     if (event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kFieldModification)) {
877       art::JNIEnvExt* jnienv = self->GetJniEnv();
878       art::StackReflectiveHandleScope<1, 1> rhs(self);
879       art::ReflectiveHandle<art::ArtField> field(rhs.NewHandle(field_p));
880       art::ReflectiveHandle<art::ArtMethod> method(rhs.NewHandle(method_p));
881       DCHECK(!self->IsExceptionPending());
882       ScopedLocalRef<jobject> this_ref(jnienv, AddLocalRef<jobject>(jnienv, this_object.Get()));
883       ScopedLocalRef<jobject> fklass(jnienv,
884                                      AddLocalRef<jobject>(jnienv,
885                                                           field->GetDeclaringClass().Ptr()));
886       char type_char = art::Primitive::Descriptor(field->GetTypeAsPrimitiveType())[0];
887       jvalue val;
888       // 64bit integer is the largest value in the union so we should be fine simply copying it into
889       // the union.
890       val.j = field_value.GetJ();
891       RunEventCallback<ArtJvmtiEvent::kFieldModification>(
892           event_handler_,
893           self,
894           jnienv,
895           art::jni::EncodeArtMethod(method),
896           static_cast<jlocation>(dex_pc),
897           static_cast<jclass>(fklass.get()),
898           field->IsStatic() ? nullptr :  this_ref.get(),  // nb static field modification get given
899                                                           // the class as this_object for some
900                                                           // reason.
901           art::jni::EncodeArtField(field),
902           type_char,
903           val);
904     }
905   }
906 
WatchedFramePop(art::Thread * self,const art::ShadowFrame & frame)907   void WatchedFramePop(art::Thread* self, const art::ShadowFrame& frame)
908       REQUIRES_SHARED(art::Locks::mutator_lock_) override {
909       art::JNIEnvExt* jnienv = self->GetJniEnv();
910     // Remove the force-interpreter added by the WatchFrame.
911     {
912       art::MutexLock mu(self, *art::Locks::thread_list_lock_);
913       CHECK_GT(self->ForceInterpreterCount(), 0u);
914       self->DecrementForceInterpreterCount();
915     }
916     jboolean is_exception_pending = self->IsExceptionPending();
917     RunEventCallback<ArtJvmtiEvent::kFramePop>(
918         event_handler_,
919         self,
920         jnienv,
921         art::jni::EncodeArtMethod(frame.GetMethod()),
922         is_exception_pending,
923         &frame);
924   }
925 
FindCatchMethodsFromThrow(art::Thread * self,art::Handle<art::mirror::Throwable> exception,art::ArtMethod ** out_method,uint32_t * dex_pc)926   static void FindCatchMethodsFromThrow(art::Thread* self,
927                                         art::Handle<art::mirror::Throwable> exception,
928                                         /*out*/ art::ArtMethod** out_method,
929                                         /*out*/ uint32_t* dex_pc)
930       REQUIRES_SHARED(art::Locks::mutator_lock_) {
931     // Finds the location where this exception will most likely be caught. We ignore intervening
932     // native frames (which could catch the exception) and return the closest java frame with a
933     // compatible catch statement.
934     class CatchLocationFinder final : public art::StackVisitor {
935      public:
936       CatchLocationFinder(art::Thread* target,
937                           art::Handle<art::mirror::Class> exception_class,
938                           art::Context* context,
939                           /*out*/ art::ArtMethod** out_catch_method,
940                           /*out*/ uint32_t* out_catch_pc)
941           REQUIRES_SHARED(art::Locks::mutator_lock_)
942         : StackVisitor(target, context, art::StackVisitor::StackWalkKind::kIncludeInlinedFrames),
943           exception_class_(exception_class),
944           catch_method_ptr_(out_catch_method),
945           catch_dex_pc_ptr_(out_catch_pc) {}
946 
947       bool VisitFrame() override REQUIRES_SHARED(art::Locks::mutator_lock_) {
948         art::ArtMethod* method = GetMethod();
949         DCHECK(method != nullptr);
950         if (method->IsRuntimeMethod()) {
951           return true;
952         }
953 
954         if (!method->IsNative()) {
955           uint32_t cur_dex_pc = GetDexPc();
956           if (cur_dex_pc == art::dex::kDexNoIndex) {
957             // This frame looks opaque. Just keep on going.
958             return true;
959           }
960           bool has_no_move_exception = false;
961           uint32_t found_dex_pc = method->FindCatchBlock(
962               exception_class_, cur_dex_pc, &has_no_move_exception);
963           if (found_dex_pc != art::dex::kDexNoIndex) {
964             // We found the catch. Store the result and return.
965             *catch_method_ptr_ = method;
966             *catch_dex_pc_ptr_ = found_dex_pc;
967             return false;
968           }
969         }
970         return true;
971       }
972 
973      private:
974       art::Handle<art::mirror::Class> exception_class_;
975       art::ArtMethod** catch_method_ptr_;
976       uint32_t* catch_dex_pc_ptr_;
977 
978       DISALLOW_COPY_AND_ASSIGN(CatchLocationFinder);
979     };
980 
981     art::StackHandleScope<1> hs(self);
982     *out_method = nullptr;
983     *dex_pc = 0;
984     std::unique_ptr<art::Context> context(art::Context::Create());
985 
986     CatchLocationFinder clf(self,
987                             hs.NewHandle(exception->GetClass()),
988                             context.get(),
989                             /*out*/ out_method,
990                             /*out*/ dex_pc);
991     clf.WalkStack(/* include_transitions= */ false);
992   }
993 
994   // Call-back when an exception is thrown.
ExceptionThrown(art::Thread * self,art::Handle<art::mirror::Throwable> exception_object)995   void ExceptionThrown(art::Thread* self, art::Handle<art::mirror::Throwable> exception_object)
996       REQUIRES_SHARED(art::Locks::mutator_lock_) override {
997     DCHECK(self->IsExceptionThrownByCurrentMethod(exception_object.Get()));
998     // The instrumentation events get rid of this for us.
999     DCHECK(!self->IsExceptionPending());
1000     if (event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kException)) {
1001       art::JNIEnvExt* jnienv = self->GetJniEnv();
1002       art::ArtMethod* catch_method;
1003       uint32_t catch_pc;
1004       FindCatchMethodsFromThrow(self, exception_object, &catch_method, &catch_pc);
1005       uint32_t dex_pc = 0;
1006       art::ArtMethod* method = self->GetCurrentMethod(&dex_pc,
1007                                                       /* check_suspended= */ true,
1008                                                       /* abort_on_error= */ art::kIsDebugBuild);
1009       ScopedLocalRef<jobject> exception(jnienv,
1010                                         AddLocalRef<jobject>(jnienv, exception_object.Get()));
1011       RunEventCallback<ArtJvmtiEvent::kException>(
1012           event_handler_,
1013           self,
1014           jnienv,
1015           art::jni::EncodeArtMethod(method),
1016           static_cast<jlocation>(dex_pc),
1017           exception.get(),
1018           art::jni::EncodeArtMethod(catch_method),
1019           static_cast<jlocation>(catch_pc));
1020     }
1021     return;
1022   }
1023 
1024   // Call-back when an exception is handled.
ExceptionHandled(art::Thread * self,art::Handle<art::mirror::Throwable> exception_object)1025   void ExceptionHandled(art::Thread* self, art::Handle<art::mirror::Throwable> exception_object)
1026       REQUIRES_SHARED(art::Locks::mutator_lock_) override {
1027     // Since the exception has already been handled there shouldn't be one pending.
1028     DCHECK(!self->IsExceptionPending());
1029     if (event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kExceptionCatch)) {
1030       art::JNIEnvExt* jnienv = self->GetJniEnv();
1031       uint32_t dex_pc;
1032       art::ArtMethod* method = self->GetCurrentMethod(&dex_pc,
1033                                                       /* check_suspended= */ true,
1034                                                       /* abort_on_error= */ art::kIsDebugBuild);
1035       ScopedLocalRef<jobject> exception(jnienv,
1036                                         AddLocalRef<jobject>(jnienv, exception_object.Get()));
1037       RunEventCallback<ArtJvmtiEvent::kExceptionCatch>(
1038           event_handler_,
1039           self,
1040           jnienv,
1041           art::jni::EncodeArtMethod(method),
1042           static_cast<jlocation>(dex_pc),
1043           exception.get());
1044     }
1045     return;
1046   }
1047 
1048   // Call-back for when we execute a branch.
Branch(art::Thread * self ATTRIBUTE_UNUSED,art::ArtMethod * method ATTRIBUTE_UNUSED,uint32_t dex_pc ATTRIBUTE_UNUSED,int32_t dex_pc_offset ATTRIBUTE_UNUSED)1049   void Branch(art::Thread* self ATTRIBUTE_UNUSED,
1050               art::ArtMethod* method ATTRIBUTE_UNUSED,
1051               uint32_t dex_pc ATTRIBUTE_UNUSED,
1052               int32_t dex_pc_offset ATTRIBUTE_UNUSED)
1053       REQUIRES_SHARED(art::Locks::mutator_lock_) override {
1054     return;
1055   }
1056 
1057  private:
1058   struct NonStandardExitEventInfo {
1059     // if non-null is a GlobalReference to the returned value.
1060     jobject return_val_obj_;
1061     // The return-value to be passed to the MethodExit event.
1062     jvalue return_val_;
1063   };
1064 
1065   EventHandler* const event_handler_;
1066 
1067   mutable art::Mutex non_standard_exits_lock_
1068       ACQUIRED_BEFORE(art::Locks::instrument_entrypoints_lock_);
1069 
1070   std::unordered_map<const art::ShadowFrame*, NonStandardExitEventInfo> non_standard_exits_
1071       GUARDED_BY(non_standard_exits_lock_);
1072 };
1073 
GetInstrumentationEventsFor(ArtJvmtiEvent event)1074 uint32_t EventHandler::GetInstrumentationEventsFor(ArtJvmtiEvent event) {
1075   switch (event) {
1076     case ArtJvmtiEvent::kMethodEntry:
1077       return art::instrumentation::Instrumentation::kMethodEntered;
1078     case ArtJvmtiEvent::kForceEarlyReturnUpdateReturnValue:
1079       // TODO We want to do this but supporting only having a single one is difficult.
1080       // return art::instrumentation::Instrumentation::kMethodExited;
1081     case ArtJvmtiEvent::kMethodExit: {
1082       DCHECK(event == ArtJvmtiEvent::kMethodExit ||
1083             event == ArtJvmtiEvent::kForceEarlyReturnUpdateReturnValue)
1084           << "event = " << static_cast<uint32_t>(event);
1085       ArtJvmtiEvent other = event == ArtJvmtiEvent::kMethodExit
1086                                 ? ArtJvmtiEvent::kForceEarlyReturnUpdateReturnValue
1087                                 : ArtJvmtiEvent::kMethodExit;
1088       if (LIKELY(!IsEventEnabledAnywhere(other))) {
1089         return art::instrumentation::Instrumentation::kMethodExited |
1090                art::instrumentation::Instrumentation::kMethodUnwind;
1091       } else {
1092         // The event needs to be kept around/is already enabled by the other jvmti event that uses
1093         // the same instrumentation event.
1094         return 0u;
1095       }
1096     }
1097     case ArtJvmtiEvent::kFieldModification:
1098       return art::instrumentation::Instrumentation::kFieldWritten;
1099     case ArtJvmtiEvent::kFieldAccess:
1100       return art::instrumentation::Instrumentation::kFieldRead;
1101     case ArtJvmtiEvent::kBreakpoint:
1102     case ArtJvmtiEvent::kSingleStep: {
1103       // Need to skip adding the listeners if the event is breakpoint/single-step since those events
1104       // share the same art-instrumentation underlying event. We need to give them their own deopt
1105       // request though so the test waits until here.
1106       DCHECK(event == ArtJvmtiEvent::kBreakpoint || event == ArtJvmtiEvent::kSingleStep);
1107       ArtJvmtiEvent other = event == ArtJvmtiEvent::kBreakpoint ? ArtJvmtiEvent::kSingleStep
1108                                                                 : ArtJvmtiEvent::kBreakpoint;
1109       if (LIKELY(!IsEventEnabledAnywhere(other))) {
1110         return art::instrumentation::Instrumentation::kDexPcMoved;
1111       } else {
1112         // The event needs to be kept around/is already enabled by the other jvmti event that uses
1113         // the same instrumentation event.
1114         return 0u;
1115       }
1116     }
1117     case ArtJvmtiEvent::kFramePop:
1118       return art::instrumentation::Instrumentation::kWatchedFramePop;
1119     case ArtJvmtiEvent::kException:
1120       return art::instrumentation::Instrumentation::kExceptionThrown;
1121     case ArtJvmtiEvent::kExceptionCatch:
1122       return art::instrumentation::Instrumentation::kExceptionHandled;
1123     default:
1124       LOG(FATAL) << "Unknown event ";
1125       UNREACHABLE();
1126   }
1127 }
1128 
1129 enum class DeoptRequirement {
1130   // No deoptimization work required.
1131   kNone,
1132   // Limited/no deopt required.
1133   kLimited,
1134   // A single thread must be put into interpret only.
1135   kThread,
1136   // All methods and all threads deopted.
1137   kFull,
1138 };
1139 
GetDeoptRequirement(ArtJvmtiEvent event,jthread thread)1140 static DeoptRequirement GetDeoptRequirement(ArtJvmtiEvent event, jthread thread) {
1141   switch (event) {
1142     case ArtJvmtiEvent::kBreakpoint:
1143     case ArtJvmtiEvent::kException:
1144       return DeoptRequirement::kLimited;
1145     // TODO MethodEntry is needed due to inconsistencies between the interpreter and the trampoline
1146     // in how to handle exceptions.
1147     case ArtJvmtiEvent::kMethodEntry:
1148     case ArtJvmtiEvent::kExceptionCatch:
1149       return DeoptRequirement::kFull;
1150     case ArtJvmtiEvent::kMethodExit:
1151     case ArtJvmtiEvent::kFieldModification:
1152     case ArtJvmtiEvent::kFieldAccess:
1153     case ArtJvmtiEvent::kSingleStep:
1154     case ArtJvmtiEvent::kFramePop:
1155     case ArtJvmtiEvent::kForceEarlyReturnUpdateReturnValue:
1156       return thread == nullptr ? DeoptRequirement::kFull : DeoptRequirement::kThread;
1157     case ArtJvmtiEvent::kVmInit:
1158     case ArtJvmtiEvent::kVmDeath:
1159     case ArtJvmtiEvent::kThreadStart:
1160     case ArtJvmtiEvent::kThreadEnd:
1161     case ArtJvmtiEvent::kClassFileLoadHookNonRetransformable:
1162     case ArtJvmtiEvent::kClassLoad:
1163     case ArtJvmtiEvent::kClassPrepare:
1164     case ArtJvmtiEvent::kVmStart:
1165     case ArtJvmtiEvent::kNativeMethodBind:
1166     case ArtJvmtiEvent::kCompiledMethodLoad:
1167     case ArtJvmtiEvent::kCompiledMethodUnload:
1168     case ArtJvmtiEvent::kDynamicCodeGenerated:
1169     case ArtJvmtiEvent::kDataDumpRequest:
1170     case ArtJvmtiEvent::kMonitorWait:
1171     case ArtJvmtiEvent::kMonitorWaited:
1172     case ArtJvmtiEvent::kMonitorContendedEnter:
1173     case ArtJvmtiEvent::kMonitorContendedEntered:
1174     case ArtJvmtiEvent::kResourceExhausted:
1175     case ArtJvmtiEvent::kGarbageCollectionStart:
1176     case ArtJvmtiEvent::kGarbageCollectionFinish:
1177     case ArtJvmtiEvent::kObjectFree:
1178     case ArtJvmtiEvent::kVmObjectAlloc:
1179     case ArtJvmtiEvent::kClassFileLoadHookRetransformable:
1180     case ArtJvmtiEvent::kDdmPublishChunk:
1181     case ArtJvmtiEvent::kObsoleteObjectCreated:
1182     case ArtJvmtiEvent::kStructuralDexFileLoadHook:
1183       return DeoptRequirement::kNone;
1184   }
1185 }
1186 
HandleEventDeopt(ArtJvmtiEvent event,jthread thread,bool enable)1187 jvmtiError EventHandler::HandleEventDeopt(ArtJvmtiEvent event, jthread thread, bool enable) {
1188   DeoptRequirement deopt_req = GetDeoptRequirement(event, thread);
1189   // Make sure we can deopt.
1190   if (deopt_req != DeoptRequirement::kNone) {
1191     art::ScopedObjectAccess soa(art::Thread::Current());
1192     DeoptManager* deopt_manager = DeoptManager::Get();
1193     jvmtiError err = OK;
1194     if (enable) {
1195       deopt_manager->AddDeoptimizationRequester();
1196       switch (deopt_req) {
1197         case DeoptRequirement::kFull:
1198           deopt_manager->AddDeoptimizeAllMethods();
1199           break;
1200         case DeoptRequirement::kThread:
1201           err = deopt_manager->AddDeoptimizeThreadMethods(soa, thread);
1202           break;
1203         default:
1204           break;
1205       }
1206       if (err != OK) {
1207         deopt_manager->RemoveDeoptimizationRequester();
1208         return err;
1209       }
1210     } else {
1211       switch (deopt_req) {
1212         case DeoptRequirement::kFull:
1213           deopt_manager->RemoveDeoptimizeAllMethods();
1214           break;
1215         case DeoptRequirement::kThread:
1216           err = deopt_manager->RemoveDeoptimizeThreadMethods(soa, thread);
1217           break;
1218         default:
1219           break;
1220       }
1221       deopt_manager->RemoveDeoptimizationRequester();
1222       if (err != OK) {
1223         return err;
1224       }
1225     }
1226   }
1227   return OK;
1228 }
1229 
SetupTraceListener(JvmtiMethodTraceListener * listener,ArtJvmtiEvent event,bool enable)1230 void EventHandler::SetupTraceListener(JvmtiMethodTraceListener* listener,
1231                                       ArtJvmtiEvent event,
1232                                       bool enable) {
1233   // Add the actual listeners.
1234   uint32_t new_events = GetInstrumentationEventsFor(event);
1235   if (new_events == 0) {
1236     return;
1237   }
1238   art::ScopedThreadStateChange stsc(art::Thread::Current(), art::ThreadState::kNative);
1239   art::instrumentation::Instrumentation* instr = art::Runtime::Current()->GetInstrumentation();
1240   art::ScopedSuspendAll ssa("jvmti method tracing installation");
1241   if (enable) {
1242     instr->AddListener(listener, new_events);
1243   } else {
1244     instr->RemoveListener(listener, new_events);
1245   }
1246   return;
1247 }
1248 
1249 // Makes sure that all compiled methods are AsyncDeoptimizable so we can deoptimize (and force to
1250 // the switch interpreter) when we try to get or set a local variable.
HandleLocalAccessCapabilityAdded()1251 void EventHandler::HandleLocalAccessCapabilityAdded() {
1252   class UpdateEntryPointsClassVisitor : public art::ClassVisitor {
1253    public:
1254     explicit UpdateEntryPointsClassVisitor(art::Runtime* runtime)
1255         : runtime_(runtime) {}
1256 
1257     bool operator()(art::ObjPtr<art::mirror::Class> klass)
1258         override REQUIRES(art::Locks::mutator_lock_) {
1259       if (!klass->IsLoaded()) {
1260         // Skip classes that aren't loaded since they might not have fully allocated and initialized
1261         // their methods. Furthemore since the jvmti-plugin must have been loaded by this point
1262         // these methods will definitately be using debuggable code.
1263         return true;
1264       }
1265       for (auto& m : klass->GetMethods(art::kRuntimePointerSize)) {
1266         const void* code = m.GetEntryPointFromQuickCompiledCode();
1267         if (m.IsNative() || m.IsProxyMethod()) {
1268           continue;
1269         } else if (!runtime_->GetClassLinker()->IsQuickToInterpreterBridge(code) &&
1270                    !runtime_->IsAsyncDeoptimizeable(reinterpret_cast<uintptr_t>(code))) {
1271           runtime_->GetInstrumentation()->UpdateMethodsCodeToInterpreterEntryPoint(&m);
1272         }
1273       }
1274       return true;
1275     }
1276 
1277    private:
1278     art::Runtime* runtime_;
1279   };
1280   art::ScopedObjectAccess soa(art::Thread::Current());
1281   UpdateEntryPointsClassVisitor visitor(art::Runtime::Current());
1282   art::Runtime::Current()->GetClassLinker()->VisitClasses(&visitor);
1283 }
1284 
OtherMonitorEventsEnabledAnywhere(ArtJvmtiEvent event)1285 bool EventHandler::OtherMonitorEventsEnabledAnywhere(ArtJvmtiEvent event) {
1286   std::array<ArtJvmtiEvent, 4> events {
1287     {
1288       ArtJvmtiEvent::kMonitorContendedEnter,
1289       ArtJvmtiEvent::kMonitorContendedEntered,
1290       ArtJvmtiEvent::kMonitorWait,
1291       ArtJvmtiEvent::kMonitorWaited
1292     }
1293   };
1294   for (ArtJvmtiEvent e : events) {
1295     if (e != event && IsEventEnabledAnywhere(e)) {
1296       return true;
1297     }
1298   }
1299   return false;
1300 }
1301 
SetupFramePopTraceListener(bool enable)1302 void EventHandler::SetupFramePopTraceListener(bool enable) {
1303   if (enable) {
1304     frame_pop_enabled = true;
1305     SetupTraceListener(method_trace_listener_.get(), ArtJvmtiEvent::kFramePop, enable);
1306   } else {
1307     // remove the listener if we have no outstanding frames.
1308     {
1309       art::ReaderMutexLock mu(art::Thread::Current(), envs_lock_);
1310       for (ArtJvmTiEnv *env : envs) {
1311         art::ReaderMutexLock event_mu(art::Thread::Current(), env->event_info_mutex_);
1312         if (!env->notify_frames.empty()) {
1313           // Leaving FramePop listener since there are unsent FramePop events.
1314           return;
1315         }
1316       }
1317       frame_pop_enabled = false;
1318     }
1319     SetupTraceListener(method_trace_listener_.get(), ArtJvmtiEvent::kFramePop, enable);
1320   }
1321 }
1322 
1323 // Handle special work for the given event type, if necessary.
HandleEventType(ArtJvmtiEvent event,bool enable)1324 void EventHandler::HandleEventType(ArtJvmtiEvent event, bool enable) {
1325   switch (event) {
1326     case ArtJvmtiEvent::kDdmPublishChunk:
1327       SetupDdmTracking(ddm_listener_.get(), enable);
1328       return;
1329     case ArtJvmtiEvent::kVmObjectAlloc:
1330       SetupObjectAllocationTracking(enable);
1331       return;
1332     case ArtJvmtiEvent::kGarbageCollectionStart:
1333     case ArtJvmtiEvent::kGarbageCollectionFinish:
1334       SetupGcPauseTracking(gc_pause_listener_.get(), event, enable);
1335       return;
1336     // FramePop can never be disabled once it's been turned on if it was turned off with outstanding
1337     // pop-events since we would either need to deal with dangling pointers or have missed events.
1338     case ArtJvmtiEvent::kFramePop:
1339       if (enable && frame_pop_enabled) {
1340         // The frame-pop event was held on by pending events so we don't need to do anything.
1341       } else {
1342         SetupFramePopTraceListener(enable);
1343       }
1344       return;
1345     case ArtJvmtiEvent::kMethodEntry:
1346     case ArtJvmtiEvent::kMethodExit:
1347     case ArtJvmtiEvent::kFieldAccess:
1348     case ArtJvmtiEvent::kFieldModification:
1349     case ArtJvmtiEvent::kException:
1350     case ArtJvmtiEvent::kExceptionCatch:
1351     case ArtJvmtiEvent::kBreakpoint:
1352     case ArtJvmtiEvent::kSingleStep:
1353     case ArtJvmtiEvent::kForceEarlyReturnUpdateReturnValue:
1354       SetupTraceListener(method_trace_listener_.get(), event, enable);
1355       return;
1356     case ArtJvmtiEvent::kMonitorContendedEnter:
1357     case ArtJvmtiEvent::kMonitorContendedEntered:
1358     case ArtJvmtiEvent::kMonitorWait:
1359     case ArtJvmtiEvent::kMonitorWaited:
1360       if (!OtherMonitorEventsEnabledAnywhere(event)) {
1361         SetupMonitorListener(monitor_listener_.get(), park_listener_.get(), enable);
1362       }
1363       return;
1364     default:
1365       break;
1366   }
1367   return;
1368 }
1369 
1370 // Checks to see if the env has the capabilities associated with the given event.
HasAssociatedCapability(ArtJvmTiEnv * env,ArtJvmtiEvent event)1371 static bool HasAssociatedCapability(ArtJvmTiEnv* env,
1372                                     ArtJvmtiEvent event) {
1373   jvmtiCapabilities caps = env->capabilities;
1374   switch (event) {
1375     case ArtJvmtiEvent::kBreakpoint:
1376       return caps.can_generate_breakpoint_events == 1;
1377 
1378     case ArtJvmtiEvent::kCompiledMethodLoad:
1379     case ArtJvmtiEvent::kCompiledMethodUnload:
1380       return caps.can_generate_compiled_method_load_events == 1;
1381 
1382     case ArtJvmtiEvent::kException:
1383     case ArtJvmtiEvent::kExceptionCatch:
1384       return caps.can_generate_exception_events == 1;
1385 
1386     case ArtJvmtiEvent::kFieldAccess:
1387       return caps.can_generate_field_access_events == 1;
1388 
1389     case ArtJvmtiEvent::kFieldModification:
1390       return caps.can_generate_field_modification_events == 1;
1391 
1392     case ArtJvmtiEvent::kFramePop:
1393       return caps.can_generate_frame_pop_events == 1;
1394 
1395     case ArtJvmtiEvent::kGarbageCollectionStart:
1396     case ArtJvmtiEvent::kGarbageCollectionFinish:
1397       return caps.can_generate_garbage_collection_events == 1;
1398 
1399     case ArtJvmtiEvent::kMethodEntry:
1400       return caps.can_generate_method_entry_events == 1;
1401 
1402     case ArtJvmtiEvent::kMethodExit:
1403       return caps.can_generate_method_exit_events == 1;
1404 
1405     case ArtJvmtiEvent::kMonitorContendedEnter:
1406     case ArtJvmtiEvent::kMonitorContendedEntered:
1407     case ArtJvmtiEvent::kMonitorWait:
1408     case ArtJvmtiEvent::kMonitorWaited:
1409       return caps.can_generate_monitor_events == 1;
1410 
1411     case ArtJvmtiEvent::kNativeMethodBind:
1412       return caps.can_generate_native_method_bind_events == 1;
1413 
1414     case ArtJvmtiEvent::kObjectFree:
1415       return caps.can_generate_object_free_events == 1;
1416 
1417     case ArtJvmtiEvent::kSingleStep:
1418       return caps.can_generate_single_step_events == 1;
1419 
1420     case ArtJvmtiEvent::kVmObjectAlloc:
1421       return caps.can_generate_vm_object_alloc_events == 1;
1422 
1423     default:
1424       return true;
1425   }
1426 }
1427 
IsInternalEvent(ArtJvmtiEvent event)1428 static bool IsInternalEvent(ArtJvmtiEvent event) {
1429   return static_cast<uint32_t>(event) >=
1430          static_cast<uint32_t>(ArtJvmtiEvent::kMinInternalEventTypeVal);
1431 }
1432 
SetInternalEvent(jthread thread,ArtJvmtiEvent event,jvmtiEventMode mode)1433 jvmtiError EventHandler::SetInternalEvent(jthread thread,
1434                                           ArtJvmtiEvent event,
1435                                           jvmtiEventMode mode) {
1436   CHECK(IsInternalEvent(event)) << static_cast<uint32_t>(event);
1437 
1438   art::Thread* self = art::Thread::Current();
1439   art::Thread* target = nullptr;
1440   ScopedNoUserCodeSuspension snucs(self);
1441   // The overall state across all threads and jvmtiEnvs. This is used to control the state of the
1442   // instrumentation handlers since we only want each added once.
1443   bool old_state;
1444   bool new_state;
1445   // The state for just the current 'thread' (including null) across all jvmtiEnvs. This is used to
1446   // control the deoptimization state since we do refcounting for that and need to perform different
1447   // actions depending on if the event is limited to a single thread or global.
1448   bool old_thread_state;
1449   bool new_thread_state;
1450   {
1451     // From now on we know we cannot get suspended by user-code.
1452     // NB This does a SuspendCheck (during thread state change) so we need to
1453     // make sure we don't have the 'suspend_lock' locked here.
1454     art::ScopedObjectAccess soa(self);
1455     art::WriterMutexLock el_mu(self, envs_lock_);
1456     art::MutexLock tll_mu(self, *art::Locks::thread_list_lock_);
1457     jvmtiError err = ERR(INTERNAL);
1458     if (!ThreadUtil::GetAliveNativeThread(thread, soa, &target, &err)) {
1459       return err;
1460     } else if (target->IsStillStarting() || target->GetState() == art::ThreadState::kStarting) {
1461       target->Dump(LOG_STREAM(WARNING) << "Is not alive: ");
1462       return ERR(THREAD_NOT_ALIVE);
1463     }
1464 
1465     // Make sure we have a valid jthread to pass to deopt-manager.
1466     ScopedLocalRef<jthread> thread_lr(
1467         soa.Env(), thread != nullptr ? nullptr : soa.AddLocalReference<jthread>(target->GetPeer()));
1468     if (thread == nullptr) {
1469       thread = thread_lr.get();
1470     }
1471     CHECK(thread != nullptr);
1472 
1473     {
1474       DCHECK_GE(GetInternalEventRefcount(event) + (mode == JVMTI_ENABLE ? 1 : -1), 0)
1475         << "Refcount: " << GetInternalEventRefcount(event);
1476       DCHECK_GE(GetInternalEventThreadRefcount(event, target) + (mode == JVMTI_ENABLE ? 1 : -1), 0)
1477         << "Refcount: " << GetInternalEventThreadRefcount(event, target);
1478       DCHECK_GE(GetInternalEventRefcount(event), GetInternalEventThreadRefcount(event, target));
1479       old_state = GetInternalEventRefcount(event) > 0;
1480       old_thread_state = GetInternalEventThreadRefcount(event, target) > 0;
1481       if (mode == JVMTI_ENABLE) {
1482         new_state = IncrInternalEventRefcount(event) > 0;
1483         new_thread_state = IncrInternalEventThreadRefcount(event, target) > 0;
1484       } else {
1485         new_state = DecrInternalEventRefcount(event) > 0;
1486         new_thread_state = DecrInternalEventThreadRefcount(event, target) > 0;
1487       }
1488       if (old_state != new_state) {
1489         global_mask.Set(event, new_state);
1490       }
1491     }
1492   }
1493   // Handle any special work required for the event type. We still have the
1494   // user_code_suspend_count_lock_ so there won't be any interleaving here.
1495   if (new_state != old_state) {
1496     HandleEventType(event, mode == JVMTI_ENABLE);
1497   }
1498   if (old_thread_state != new_thread_state) {
1499     HandleEventDeopt(event, thread, new_thread_state);
1500   }
1501   return OK;
1502 }
1503 
IsDirectlySettableEvent(ArtJvmtiEvent event)1504 static bool IsDirectlySettableEvent(ArtJvmtiEvent event) {
1505   return !IsInternalEvent(event);
1506 }
1507 
EventIsNormal(ArtJvmtiEvent event)1508 static bool EventIsNormal(ArtJvmtiEvent event) {
1509   return EventMask::EventIsInRange(event) && IsDirectlySettableEvent(event);
1510 }
1511 
SetEvent(ArtJvmTiEnv * env,jthread thread,ArtJvmtiEvent event,jvmtiEventMode mode)1512 jvmtiError EventHandler::SetEvent(ArtJvmTiEnv* env,
1513                                   jthread thread,
1514                                   ArtJvmtiEvent event,
1515                                   jvmtiEventMode mode) {
1516   if (mode != JVMTI_ENABLE && mode != JVMTI_DISABLE) {
1517     return ERR(ILLEGAL_ARGUMENT);
1518   }
1519 
1520   if (!EventIsNormal(event)) {
1521     return ERR(INVALID_EVENT_TYPE);
1522   }
1523 
1524   if (!HasAssociatedCapability(env, event)) {
1525     return ERR(MUST_POSSESS_CAPABILITY);
1526   }
1527 
1528   if (thread != nullptr && !IsThreadControllable(event)) {
1529     return ERR(ILLEGAL_ARGUMENT);
1530   }
1531 
1532   art::Thread* self = art::Thread::Current();
1533   art::Thread* target = nullptr;
1534   ScopedNoUserCodeSuspension snucs(self);
1535   // The overall state across all threads and jvmtiEnvs. This is used to control the state of the
1536   // instrumentation handlers since we only want each added once.
1537   bool old_state;
1538   bool new_state;
1539   // The state for just the current 'thread' (including null) across all jvmtiEnvs. This is used to
1540   // control the deoptimization state since we do refcounting for that and need to perform different
1541   // actions depending on if the event is limited to a single thread or global.
1542   bool old_thread_state;
1543   bool new_thread_state;
1544   {
1545     // From now on we know we cannot get suspended by user-code.
1546     // NB This does a SuspendCheck (during thread state change) so we need to
1547     // make sure we don't have the 'suspend_lock' locked here.
1548     art::ScopedObjectAccess soa(self);
1549     art::WriterMutexLock el_mu(self, envs_lock_);
1550     art::MutexLock tll_mu(self, *art::Locks::thread_list_lock_);
1551     jvmtiError err = ERR(INTERNAL);
1552     if (thread != nullptr) {
1553       if (!ThreadUtil::GetAliveNativeThread(thread, soa, &target, &err)) {
1554         return err;
1555       } else if (target->IsStillStarting() ||
1556                 target->GetState() == art::ThreadState::kStarting) {
1557         target->Dump(LOG_STREAM(WARNING) << "Is not alive: ");
1558         return ERR(THREAD_NOT_ALIVE);
1559       }
1560     }
1561 
1562 
1563     art::WriterMutexLock ei_mu(self, env->event_info_mutex_);
1564     old_thread_state = GetThreadEventState(event, target);
1565     old_state = global_mask.Test(event);
1566     if (mode == JVMTI_ENABLE) {
1567       env->event_masks.EnableEvent(env, target, event);
1568       global_mask.Set(event);
1569       new_state = true;
1570       new_thread_state = true;
1571       DCHECK(GetThreadEventState(event, target));
1572     } else {
1573       DCHECK_EQ(mode, JVMTI_DISABLE);
1574 
1575       env->event_masks.DisableEvent(env, target, event);
1576       RecalculateGlobalEventMaskLocked(event);
1577       new_state = global_mask.Test(event);
1578       new_thread_state = GetThreadEventState(event, target);
1579       DCHECK(new_state || !new_thread_state);
1580     }
1581   }
1582   // Handle any special work required for the event type. We still have the
1583   // user_code_suspend_count_lock_ so there won't be any interleaving here.
1584   if (new_state != old_state) {
1585     HandleEventType(event, mode == JVMTI_ENABLE);
1586   }
1587   if (old_thread_state != new_thread_state) {
1588     return HandleEventDeopt(event, thread, new_thread_state);
1589   }
1590   return OK;
1591 }
1592 
GetThreadEventState(ArtJvmtiEvent event,art::Thread * thread)1593 bool EventHandler::GetThreadEventState(ArtJvmtiEvent event, art::Thread* thread) {
1594   for (ArtJvmTiEnv* stored_env : envs) {
1595     if (stored_env == nullptr) {
1596       continue;
1597     }
1598     auto& masks = stored_env->event_masks;
1599     if (thread == nullptr && masks.global_event_mask.Test(event)) {
1600       return true;
1601     } else if (thread != nullptr) {
1602       EventMask* mask =  masks.GetEventMaskOrNull(thread);
1603       if (mask != nullptr && mask->Test(event)) {
1604         return true;
1605       }
1606     }
1607   }
1608   return false;
1609 }
1610 
HandleBreakpointEventsChanged(bool added)1611 void EventHandler::HandleBreakpointEventsChanged(bool added) {
1612   if (added) {
1613     DeoptManager::Get()->AddDeoptimizationRequester();
1614   } else {
1615     DeoptManager::Get()->RemoveDeoptimizationRequester();
1616   }
1617 }
1618 
AddDelayedNonStandardExitEvent(const art::ShadowFrame * frame,bool is_object,jvalue val)1619 void EventHandler::AddDelayedNonStandardExitEvent(const art::ShadowFrame *frame,
1620                                                   bool is_object,
1621                                                   jvalue val) {
1622   method_trace_listener_->AddDelayedNonStandardExitEvent(frame, is_object, val);
1623 }
1624 
GetInternalEventIndex(ArtJvmtiEvent event)1625 static size_t GetInternalEventIndex(ArtJvmtiEvent event) {
1626   CHECK(IsInternalEvent(event));
1627   return static_cast<size_t>(event) - static_cast<size_t>(ArtJvmtiEvent::kMinInternalEventTypeVal);
1628 }
1629 
DecrInternalEventThreadRefcount(ArtJvmtiEvent event,art::Thread * target)1630 int32_t EventHandler::DecrInternalEventThreadRefcount(ArtJvmtiEvent event, art::Thread* target) {
1631   return --GetInternalEventThreadRefcount(event, target);
1632 }
1633 
IncrInternalEventThreadRefcount(ArtJvmtiEvent event,art::Thread * target)1634 int32_t EventHandler::IncrInternalEventThreadRefcount(ArtJvmtiEvent event, art::Thread* target) {
1635   return ++GetInternalEventThreadRefcount(event, target);
1636 }
1637 
GetInternalEventThreadRefcount(ArtJvmtiEvent event,art::Thread * target)1638 int32_t& EventHandler::GetInternalEventThreadRefcount(ArtJvmtiEvent event, art::Thread* target) {
1639   auto& refs = internal_event_thread_refcount_[GetInternalEventIndex(event)];
1640   UniqueThread target_ut{target, target->GetTid()};
1641   if (refs.find(target_ut) == refs.end()) {
1642     refs.insert({target_ut, 0});
1643   }
1644   return refs.at(target_ut);
1645 }
1646 
DecrInternalEventRefcount(ArtJvmtiEvent event)1647 int32_t EventHandler::DecrInternalEventRefcount(ArtJvmtiEvent event) {
1648   return --internal_event_refcount_[GetInternalEventIndex(event)];
1649 }
1650 
IncrInternalEventRefcount(ArtJvmtiEvent event)1651 int32_t EventHandler::IncrInternalEventRefcount(ArtJvmtiEvent event) {
1652   return ++internal_event_refcount_[GetInternalEventIndex(event)];
1653 }
1654 
GetInternalEventRefcount(ArtJvmtiEvent event) const1655 int32_t EventHandler::GetInternalEventRefcount(ArtJvmtiEvent event) const {
1656   return internal_event_refcount_[GetInternalEventIndex(event)];
1657 }
1658 
Shutdown()1659 void EventHandler::Shutdown() {
1660   // Need to remove the method_trace_listener_ if it's there.
1661   art::Thread* self = art::Thread::Current();
1662   art::gc::ScopedGCCriticalSection gcs(self,
1663                                        art::gc::kGcCauseInstrumentation,
1664                                        art::gc::kCollectorTypeInstrumentation);
1665   art::ScopedSuspendAll ssa("jvmti method tracing uninstallation");
1666   // Just remove every possible event.
1667   art::Runtime::Current()->GetInstrumentation()->RemoveListener(method_trace_listener_.get(), ~0);
1668   AllocationManager::Get()->RemoveAllocListener();
1669 }
1670 
EventHandler()1671 EventHandler::EventHandler()
1672   : envs_lock_("JVMTI Environment List Lock", art::LockLevel::kPostMutatorTopLockLevel),
1673     frame_pop_enabled(false),
1674     internal_event_refcount_({0}) {
1675   alloc_listener_.reset(new JvmtiEventAllocationListener(this));
1676   AllocationManager::Get()->SetAllocListener(alloc_listener_.get());
1677   ddm_listener_.reset(new JvmtiDdmChunkListener(this));
1678   gc_pause_listener_.reset(new JvmtiGcPauseListener(this));
1679   method_trace_listener_.reset(new JvmtiMethodTraceListener(this));
1680   monitor_listener_.reset(new JvmtiMonitorListener(this));
1681   park_listener_.reset(new JvmtiParkListener(this));
1682 }
1683 
~EventHandler()1684 EventHandler::~EventHandler() {
1685 }
1686 
1687 }  // namespace openjdkjvmti
1688