1 /* Copyright (C) 2016 The Android Open Source Project
2 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
3 *
4 * This file implements interfaces from the file jvmti.h. This implementation
5 * is licensed under the same terms as the file jvmti.h. The
6 * copyright and license information for the file jvmti.h follows.
7 *
8 * Copyright (c) 2003, 2011, Oracle and/or its affiliates. All rights reserved.
9 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
10 *
11 * This code is free software; you can redistribute it and/or modify it
12 * under the terms of the GNU General Public License version 2 only, as
13 * published by the Free Software Foundation. Oracle designates this
14 * particular file as subject to the "Classpath" exception as provided
15 * by Oracle in the LICENSE file that accompanied this code.
16 *
17 * This code is distributed in the hope that it will be useful, but WITHOUT
18 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
19 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
20 * version 2 for more details (a copy is included in the LICENSE file that
21 * accompanied this code).
22 *
23 * You should have received a copy of the GNU General Public License version
24 * 2 along with this work; if not, write to the Free Software Foundation,
25 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
26 *
27 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
28 * or visit www.oracle.com if you need additional information or have any
29 * questions.
30 */
31
32 #include "events-inl.h"
33
34 #include <array>
35
36 #include "art_field-inl.h"
37 #include "art_jvmti.h"
38 #include "art_method-inl.h"
39 #include "deopt_manager.h"
40 #include "dex/dex_file_types.h"
41 #include "gc/allocation_listener.h"
42 #include "gc/gc_pause_listener.h"
43 #include "gc/heap.h"
44 #include "gc/scoped_gc_critical_section.h"
45 #include "handle_scope-inl.h"
46 #include "instrumentation.h"
47 #include "jni_env_ext-inl.h"
48 #include "jni_internal.h"
49 #include "mirror/class.h"
50 #include "mirror/object-inl.h"
51 #include "monitor.h"
52 #include "nativehelper/scoped_local_ref.h"
53 #include "runtime.h"
54 #include "scoped_thread_state_change-inl.h"
55 #include "stack.h"
56 #include "thread-inl.h"
57 #include "thread_list.h"
58 #include "ti_phase.h"
59
60 namespace openjdkjvmti {
61
CopyExtensionsFrom(const ArtJvmtiEventCallbacks * cb)62 void ArtJvmtiEventCallbacks::CopyExtensionsFrom(const ArtJvmtiEventCallbacks* cb) {
63 if (art::kIsDebugBuild) {
64 ArtJvmtiEventCallbacks clean;
65 DCHECK_EQ(memcmp(&clean, this, sizeof(clean)), 0)
66 << "CopyExtensionsFrom called with initialized eventsCallbacks!";
67 }
68 if (cb != nullptr) {
69 memcpy(this, cb, sizeof(*this));
70 } else {
71 memset(this, 0, sizeof(*this));
72 }
73 }
74
Set(jint index,jvmtiExtensionEvent cb)75 jvmtiError ArtJvmtiEventCallbacks::Set(jint index, jvmtiExtensionEvent cb) {
76 switch (index) {
77 case static_cast<jint>(ArtJvmtiEvent::kDdmPublishChunk):
78 DdmPublishChunk = reinterpret_cast<ArtJvmtiEventDdmPublishChunk>(cb);
79 return OK;
80 default:
81 return ERR(ILLEGAL_ARGUMENT);
82 }
83 }
84
85
IsExtensionEvent(jint e)86 bool IsExtensionEvent(jint e) {
87 return e >= static_cast<jint>(ArtJvmtiEvent::kMinEventTypeVal) &&
88 e <= static_cast<jint>(ArtJvmtiEvent::kMaxEventTypeVal) &&
89 IsExtensionEvent(static_cast<ArtJvmtiEvent>(e));
90 }
91
IsExtensionEvent(ArtJvmtiEvent e)92 bool IsExtensionEvent(ArtJvmtiEvent e) {
93 switch (e) {
94 case ArtJvmtiEvent::kDdmPublishChunk:
95 return true;
96 default:
97 return false;
98 }
99 }
100
IsEnabledAnywhere(ArtJvmtiEvent event)101 bool EventMasks::IsEnabledAnywhere(ArtJvmtiEvent event) {
102 return global_event_mask.Test(event) || unioned_thread_event_mask.Test(event);
103 }
104
GetEventMask(art::Thread * thread)105 EventMask& EventMasks::GetEventMask(art::Thread* thread) {
106 if (thread == nullptr) {
107 return global_event_mask;
108 }
109
110 for (auto& pair : thread_event_masks) {
111 const UniqueThread& unique_thread = pair.first;
112 if (unique_thread.first == thread &&
113 unique_thread.second == static_cast<uint32_t>(thread->GetTid())) {
114 return pair.second;
115 }
116 }
117
118 // TODO: Remove old UniqueThread with the same pointer, if exists.
119
120 thread_event_masks.emplace_back(UniqueThread(thread, thread->GetTid()), EventMask());
121 return thread_event_masks.back().second;
122 }
123
GetEventMaskOrNull(art::Thread * thread)124 EventMask* EventMasks::GetEventMaskOrNull(art::Thread* thread) {
125 if (thread == nullptr) {
126 return &global_event_mask;
127 }
128
129 for (auto& pair : thread_event_masks) {
130 const UniqueThread& unique_thread = pair.first;
131 if (unique_thread.first == thread &&
132 unique_thread.second == static_cast<uint32_t>(thread->GetTid())) {
133 return &pair.second;
134 }
135 }
136
137 return nullptr;
138 }
139
140
EnableEvent(ArtJvmTiEnv * env,art::Thread * thread,ArtJvmtiEvent event)141 void EventMasks::EnableEvent(ArtJvmTiEnv* env, art::Thread* thread, ArtJvmtiEvent event) {
142 DCHECK_EQ(&env->event_masks, this);
143 env->event_info_mutex_.AssertExclusiveHeld(art::Thread::Current());
144 DCHECK(EventMask::EventIsInRange(event));
145 GetEventMask(thread).Set(event);
146 if (thread != nullptr) {
147 unioned_thread_event_mask.Set(event, true);
148 }
149 }
150
DisableEvent(ArtJvmTiEnv * env,art::Thread * thread,ArtJvmtiEvent event)151 void EventMasks::DisableEvent(ArtJvmTiEnv* env, art::Thread* thread, ArtJvmtiEvent event) {
152 DCHECK_EQ(&env->event_masks, this);
153 env->event_info_mutex_.AssertExclusiveHeld(art::Thread::Current());
154 DCHECK(EventMask::EventIsInRange(event));
155 GetEventMask(thread).Set(event, false);
156 if (thread != nullptr) {
157 // Regenerate union for the event.
158 bool union_value = false;
159 for (auto& pair : thread_event_masks) {
160 union_value |= pair.second.Test(event);
161 if (union_value) {
162 break;
163 }
164 }
165 unioned_thread_event_mask.Set(event, union_value);
166 }
167 }
168
HandleChangedCapabilities(const jvmtiCapabilities & caps,bool caps_added)169 void EventMasks::HandleChangedCapabilities(const jvmtiCapabilities& caps, bool caps_added) {
170 if (UNLIKELY(caps.can_retransform_classes == 1)) {
171 // If we are giving this env the retransform classes cap we need to switch all events of
172 // NonTransformable to Transformable and vice versa.
173 ArtJvmtiEvent to_remove = caps_added ? ArtJvmtiEvent::kClassFileLoadHookNonRetransformable
174 : ArtJvmtiEvent::kClassFileLoadHookRetransformable;
175 ArtJvmtiEvent to_add = caps_added ? ArtJvmtiEvent::kClassFileLoadHookRetransformable
176 : ArtJvmtiEvent::kClassFileLoadHookNonRetransformable;
177 if (global_event_mask.Test(to_remove)) {
178 CHECK(!global_event_mask.Test(to_add));
179 global_event_mask.Set(to_remove, false);
180 global_event_mask.Set(to_add, true);
181 }
182
183 if (unioned_thread_event_mask.Test(to_remove)) {
184 CHECK(!unioned_thread_event_mask.Test(to_add));
185 unioned_thread_event_mask.Set(to_remove, false);
186 unioned_thread_event_mask.Set(to_add, true);
187 }
188 for (auto thread_mask : thread_event_masks) {
189 if (thread_mask.second.Test(to_remove)) {
190 CHECK(!thread_mask.second.Test(to_add));
191 thread_mask.second.Set(to_remove, false);
192 thread_mask.second.Set(to_add, true);
193 }
194 }
195 }
196 }
197
RegisterArtJvmTiEnv(ArtJvmTiEnv * env)198 void EventHandler::RegisterArtJvmTiEnv(ArtJvmTiEnv* env) {
199 art::WriterMutexLock mu(art::Thread::Current(), envs_lock_);
200 envs.push_back(env);
201 }
202
RemoveArtJvmTiEnv(ArtJvmTiEnv * env)203 void EventHandler::RemoveArtJvmTiEnv(ArtJvmTiEnv* env) {
204 art::WriterMutexLock mu(art::Thread::Current(), envs_lock_);
205 // Since we might be currently iterating over the envs list we cannot actually erase elements.
206 // Instead we will simply replace them with 'nullptr' and skip them manually.
207 auto it = std::find(envs.begin(), envs.end(), env);
208 if (it != envs.end()) {
209 envs.erase(it);
210 for (size_t i = static_cast<size_t>(ArtJvmtiEvent::kMinEventTypeVal);
211 i <= static_cast<size_t>(ArtJvmtiEvent::kMaxEventTypeVal);
212 ++i) {
213 RecalculateGlobalEventMaskLocked(static_cast<ArtJvmtiEvent>(i));
214 }
215 }
216 }
217
IsThreadControllable(ArtJvmtiEvent event)218 static bool IsThreadControllable(ArtJvmtiEvent event) {
219 switch (event) {
220 case ArtJvmtiEvent::kVmInit:
221 case ArtJvmtiEvent::kVmStart:
222 case ArtJvmtiEvent::kVmDeath:
223 case ArtJvmtiEvent::kThreadStart:
224 case ArtJvmtiEvent::kCompiledMethodLoad:
225 case ArtJvmtiEvent::kCompiledMethodUnload:
226 case ArtJvmtiEvent::kDynamicCodeGenerated:
227 case ArtJvmtiEvent::kDataDumpRequest:
228 return false;
229
230 default:
231 return true;
232 }
233 }
234
235 template<typename Type>
AddLocalRef(art::JNIEnvExt * e,art::mirror::Object * obj)236 static Type AddLocalRef(art::JNIEnvExt* e, art::mirror::Object* obj)
237 REQUIRES_SHARED(art::Locks::mutator_lock_) {
238 return (obj == nullptr) ? nullptr : e->AddLocalReference<Type>(obj);
239 }
240
241 template<ArtJvmtiEvent kEvent, typename ...Args>
RunEventCallback(EventHandler * handler,art::Thread * self,art::JNIEnvExt * jnienv,Args...args)242 static void RunEventCallback(EventHandler* handler,
243 art::Thread* self,
244 art::JNIEnvExt* jnienv,
245 Args... args)
246 REQUIRES_SHARED(art::Locks::mutator_lock_) {
247 ScopedLocalRef<jthread> thread_jni(jnienv, AddLocalRef<jthread>(jnienv, self->GetPeer()));
248 handler->DispatchEvent<kEvent>(self,
249 static_cast<JNIEnv*>(jnienv),
250 thread_jni.get(),
251 args...);
252 }
253
SetupDdmTracking(art::DdmCallback * listener,bool enable)254 static void SetupDdmTracking(art::DdmCallback* listener, bool enable) {
255 art::ScopedObjectAccess soa(art::Thread::Current());
256 if (enable) {
257 art::Runtime::Current()->GetRuntimeCallbacks()->AddDdmCallback(listener);
258 } else {
259 art::Runtime::Current()->GetRuntimeCallbacks()->RemoveDdmCallback(listener);
260 }
261 }
262
263 class JvmtiDdmChunkListener : public art::DdmCallback {
264 public:
JvmtiDdmChunkListener(EventHandler * handler)265 explicit JvmtiDdmChunkListener(EventHandler* handler) : handler_(handler) {}
266
DdmPublishChunk(uint32_t type,const art::ArrayRef<const uint8_t> & data)267 void DdmPublishChunk(uint32_t type, const art::ArrayRef<const uint8_t>& data)
268 OVERRIDE REQUIRES_SHARED(art::Locks::mutator_lock_) {
269 if (handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kDdmPublishChunk)) {
270 art::Thread* self = art::Thread::Current();
271 handler_->DispatchEvent<ArtJvmtiEvent::kDdmPublishChunk>(
272 self,
273 static_cast<JNIEnv*>(self->GetJniEnv()),
274 static_cast<jint>(type),
275 static_cast<jint>(data.size()),
276 reinterpret_cast<const jbyte*>(data.data()));
277 }
278 }
279
280 private:
281 EventHandler* handler_;
282
283 DISALLOW_COPY_AND_ASSIGN(JvmtiDdmChunkListener);
284 };
285
286 class JvmtiAllocationListener : public art::gc::AllocationListener {
287 public:
JvmtiAllocationListener(EventHandler * handler)288 explicit JvmtiAllocationListener(EventHandler* handler) : handler_(handler) {}
289
ObjectAllocated(art::Thread * self,art::ObjPtr<art::mirror::Object> * obj,size_t byte_count)290 void ObjectAllocated(art::Thread* self, art::ObjPtr<art::mirror::Object>* obj, size_t byte_count)
291 OVERRIDE REQUIRES_SHARED(art::Locks::mutator_lock_) {
292 DCHECK_EQ(self, art::Thread::Current());
293
294 if (handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kVmObjectAlloc)) {
295 art::StackHandleScope<1> hs(self);
296 auto h = hs.NewHandleWrapper(obj);
297 // jvmtiEventVMObjectAlloc parameters:
298 // jvmtiEnv *jvmti_env,
299 // JNIEnv* jni_env,
300 // jthread thread,
301 // jobject object,
302 // jclass object_klass,
303 // jlong size
304 art::JNIEnvExt* jni_env = self->GetJniEnv();
305 ScopedLocalRef<jobject> object(
306 jni_env, jni_env->AddLocalReference<jobject>(*obj));
307 ScopedLocalRef<jclass> klass(
308 jni_env, jni_env->AddLocalReference<jclass>(obj->Ptr()->GetClass()));
309
310 RunEventCallback<ArtJvmtiEvent::kVmObjectAlloc>(handler_,
311 self,
312 jni_env,
313 object.get(),
314 klass.get(),
315 static_cast<jlong>(byte_count));
316 }
317 }
318
319 private:
320 EventHandler* handler_;
321 };
322
SetupObjectAllocationTracking(art::gc::AllocationListener * listener,bool enable)323 static void SetupObjectAllocationTracking(art::gc::AllocationListener* listener, bool enable) {
324 // We must not hold the mutator lock here, but if we're in FastJNI, for example, we might. For
325 // now, do a workaround: (possibly) acquire and release.
326 art::ScopedObjectAccess soa(art::Thread::Current());
327 art::ScopedThreadSuspension sts(soa.Self(), art::ThreadState::kSuspended);
328 if (enable) {
329 art::Runtime::Current()->GetHeap()->SetAllocationListener(listener);
330 } else {
331 art::Runtime::Current()->GetHeap()->RemoveAllocationListener();
332 }
333 }
334
335 class JvmtiMonitorListener : public art::MonitorCallback {
336 public:
JvmtiMonitorListener(EventHandler * handler)337 explicit JvmtiMonitorListener(EventHandler* handler) : handler_(handler) {}
338
MonitorContendedLocking(art::Monitor * m)339 void MonitorContendedLocking(art::Monitor* m)
340 OVERRIDE REQUIRES_SHARED(art::Locks::mutator_lock_) {
341 if (handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMonitorContendedEnter)) {
342 art::Thread* self = art::Thread::Current();
343 art::JNIEnvExt* jnienv = self->GetJniEnv();
344 ScopedLocalRef<jobject> mon(jnienv, AddLocalRef<jobject>(jnienv, m->GetObject()));
345 RunEventCallback<ArtJvmtiEvent::kMonitorContendedEnter>(
346 handler_,
347 self,
348 jnienv,
349 mon.get());
350 }
351 }
352
MonitorContendedLocked(art::Monitor * m)353 void MonitorContendedLocked(art::Monitor* m)
354 OVERRIDE REQUIRES_SHARED(art::Locks::mutator_lock_) {
355 if (handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMonitorContendedEntered)) {
356 art::Thread* self = art::Thread::Current();
357 art::JNIEnvExt* jnienv = self->GetJniEnv();
358 ScopedLocalRef<jobject> mon(jnienv, AddLocalRef<jobject>(jnienv, m->GetObject()));
359 RunEventCallback<ArtJvmtiEvent::kMonitorContendedEntered>(
360 handler_,
361 self,
362 jnienv,
363 mon.get());
364 }
365 }
366
ObjectWaitStart(art::Handle<art::mirror::Object> obj,int64_t timeout)367 void ObjectWaitStart(art::Handle<art::mirror::Object> obj, int64_t timeout)
368 OVERRIDE REQUIRES_SHARED(art::Locks::mutator_lock_) {
369 if (handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMonitorWait)) {
370 art::Thread* self = art::Thread::Current();
371 art::JNIEnvExt* jnienv = self->GetJniEnv();
372 ScopedLocalRef<jobject> mon(jnienv, AddLocalRef<jobject>(jnienv, obj.Get()));
373 RunEventCallback<ArtJvmtiEvent::kMonitorWait>(
374 handler_,
375 self,
376 jnienv,
377 mon.get(),
378 static_cast<jlong>(timeout));
379 }
380 }
381
382
383 // Our interpretation of the spec is that the JVMTI_EVENT_MONITOR_WAITED will be sent immediately
384 // after a thread has woken up from a sleep caused by a call to Object#wait. If the thread will
385 // never go to sleep (due to not having the lock, having bad arguments, or having an exception
386 // propogated from JVMTI_EVENT_MONITOR_WAIT) we will not send this event.
387 //
388 // This does not fully match the RI semantics. Specifically, we will not send the
389 // JVMTI_EVENT_MONITOR_WAITED event in one situation where the RI would, there was an exception in
390 // the JVMTI_EVENT_MONITOR_WAIT event but otherwise the call was fine. In that case the RI would
391 // send this event and return without going to sleep.
392 //
393 // See b/65558434 for more discussion.
MonitorWaitFinished(art::Monitor * m,bool timeout)394 void MonitorWaitFinished(art::Monitor* m, bool timeout)
395 OVERRIDE REQUIRES_SHARED(art::Locks::mutator_lock_) {
396 if (handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMonitorWaited)) {
397 art::Thread* self = art::Thread::Current();
398 art::JNIEnvExt* jnienv = self->GetJniEnv();
399 ScopedLocalRef<jobject> mon(jnienv, AddLocalRef<jobject>(jnienv, m->GetObject()));
400 RunEventCallback<ArtJvmtiEvent::kMonitorWaited>(
401 handler_,
402 self,
403 jnienv,
404 mon.get(),
405 static_cast<jboolean>(timeout));
406 }
407 }
408
409 private:
410 EventHandler* handler_;
411 };
412
SetupMonitorListener(art::MonitorCallback * listener,bool enable)413 static void SetupMonitorListener(art::MonitorCallback* listener, bool enable) {
414 // We must not hold the mutator lock here, but if we're in FastJNI, for example, we might. For
415 // now, do a workaround: (possibly) acquire and release.
416 art::ScopedObjectAccess soa(art::Thread::Current());
417 if (enable) {
418 art::Runtime::Current()->GetRuntimeCallbacks()->AddMonitorCallback(listener);
419 } else {
420 art::Runtime::Current()->GetRuntimeCallbacks()->RemoveMonitorCallback(listener);
421 }
422 }
423
424 // Report GC pauses (see spec) as GARBAGE_COLLECTION_START and GARBAGE_COLLECTION_END.
425 class JvmtiGcPauseListener : public art::gc::GcPauseListener {
426 public:
JvmtiGcPauseListener(EventHandler * handler)427 explicit JvmtiGcPauseListener(EventHandler* handler)
428 : handler_(handler),
429 start_enabled_(false),
430 finish_enabled_(false) {}
431
StartPause()432 void StartPause() OVERRIDE {
433 handler_->DispatchEvent<ArtJvmtiEvent::kGarbageCollectionStart>(art::Thread::Current());
434 }
435
EndPause()436 void EndPause() OVERRIDE {
437 handler_->DispatchEvent<ArtJvmtiEvent::kGarbageCollectionFinish>(art::Thread::Current());
438 }
439
IsEnabled()440 bool IsEnabled() {
441 return start_enabled_ || finish_enabled_;
442 }
443
SetStartEnabled(bool e)444 void SetStartEnabled(bool e) {
445 start_enabled_ = e;
446 }
447
SetFinishEnabled(bool e)448 void SetFinishEnabled(bool e) {
449 finish_enabled_ = e;
450 }
451
452 private:
453 EventHandler* handler_;
454 bool start_enabled_;
455 bool finish_enabled_;
456 };
457
SetupGcPauseTracking(JvmtiGcPauseListener * listener,ArtJvmtiEvent event,bool enable)458 static void SetupGcPauseTracking(JvmtiGcPauseListener* listener, ArtJvmtiEvent event, bool enable) {
459 bool old_state = listener->IsEnabled();
460
461 if (event == ArtJvmtiEvent::kGarbageCollectionStart) {
462 listener->SetStartEnabled(enable);
463 } else {
464 listener->SetFinishEnabled(enable);
465 }
466
467 bool new_state = listener->IsEnabled();
468
469 if (old_state != new_state) {
470 if (new_state) {
471 art::Runtime::Current()->GetHeap()->SetGcPauseListener(listener);
472 } else {
473 art::Runtime::Current()->GetHeap()->RemoveGcPauseListener();
474 }
475 }
476 }
477
478 class JvmtiMethodTraceListener FINAL : public art::instrumentation::InstrumentationListener {
479 public:
JvmtiMethodTraceListener(EventHandler * handler)480 explicit JvmtiMethodTraceListener(EventHandler* handler) : event_handler_(handler) {}
481
482 // Call-back for when a method is entered.
MethodEntered(art::Thread * self,art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,art::ArtMethod * method,uint32_t dex_pc ATTRIBUTE_UNUSED)483 void MethodEntered(art::Thread* self,
484 art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,
485 art::ArtMethod* method,
486 uint32_t dex_pc ATTRIBUTE_UNUSED)
487 REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
488 if (!method->IsRuntimeMethod() &&
489 event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMethodEntry)) {
490 art::JNIEnvExt* jnienv = self->GetJniEnv();
491 RunEventCallback<ArtJvmtiEvent::kMethodEntry>(event_handler_,
492 self,
493 jnienv,
494 art::jni::EncodeArtMethod(method));
495 }
496 }
497
498 // Callback for when a method is exited with a reference return value.
MethodExited(art::Thread * self,art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,art::ArtMethod * method,uint32_t dex_pc ATTRIBUTE_UNUSED,art::Handle<art::mirror::Object> return_value)499 void MethodExited(art::Thread* self,
500 art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,
501 art::ArtMethod* method,
502 uint32_t dex_pc ATTRIBUTE_UNUSED,
503 art::Handle<art::mirror::Object> return_value)
504 REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
505 if (!method->IsRuntimeMethod() &&
506 event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMethodExit)) {
507 DCHECK_EQ(method->GetReturnTypePrimitive(), art::Primitive::kPrimNot)
508 << method->PrettyMethod();
509 DCHECK(!self->IsExceptionPending());
510 jvalue val;
511 art::JNIEnvExt* jnienv = self->GetJniEnv();
512 ScopedLocalRef<jobject> return_jobj(jnienv, AddLocalRef<jobject>(jnienv, return_value.Get()));
513 val.l = return_jobj.get();
514 RunEventCallback<ArtJvmtiEvent::kMethodExit>(
515 event_handler_,
516 self,
517 jnienv,
518 art::jni::EncodeArtMethod(method),
519 /*was_popped_by_exception*/ static_cast<jboolean>(JNI_FALSE),
520 val);
521 }
522 }
523
524 // Call-back for when a method is exited.
MethodExited(art::Thread * self,art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,art::ArtMethod * method,uint32_t dex_pc ATTRIBUTE_UNUSED,const art::JValue & return_value)525 void MethodExited(art::Thread* self,
526 art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,
527 art::ArtMethod* method,
528 uint32_t dex_pc ATTRIBUTE_UNUSED,
529 const art::JValue& return_value)
530 REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
531 if (!method->IsRuntimeMethod() &&
532 event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMethodExit)) {
533 DCHECK_NE(method->GetReturnTypePrimitive(), art::Primitive::kPrimNot)
534 << method->PrettyMethod();
535 DCHECK(!self->IsExceptionPending());
536 jvalue val;
537 art::JNIEnvExt* jnienv = self->GetJniEnv();
538 // 64bit integer is the largest value in the union so we should be fine simply copying it into
539 // the union.
540 val.j = return_value.GetJ();
541 RunEventCallback<ArtJvmtiEvent::kMethodExit>(
542 event_handler_,
543 self,
544 jnienv,
545 art::jni::EncodeArtMethod(method),
546 /*was_popped_by_exception*/ static_cast<jboolean>(JNI_FALSE),
547 val);
548 }
549 }
550
551 // Call-back for when a method is popped due to an exception throw. A method will either cause a
552 // MethodExited call-back or a MethodUnwind call-back when its activation is removed.
MethodUnwind(art::Thread * self,art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,art::ArtMethod * method,uint32_t dex_pc ATTRIBUTE_UNUSED)553 void MethodUnwind(art::Thread* self,
554 art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,
555 art::ArtMethod* method,
556 uint32_t dex_pc ATTRIBUTE_UNUSED)
557 REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
558 if (!method->IsRuntimeMethod() &&
559 event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMethodExit)) {
560 jvalue val;
561 // Just set this to 0xffffffffffffffff so it's not uninitialized.
562 val.j = static_cast<jlong>(-1);
563 art::JNIEnvExt* jnienv = self->GetJniEnv();
564 art::StackHandleScope<1> hs(self);
565 art::Handle<art::mirror::Throwable> old_exception(hs.NewHandle(self->GetException()));
566 CHECK(!old_exception.IsNull());
567 self->ClearException();
568 RunEventCallback<ArtJvmtiEvent::kMethodExit>(
569 event_handler_,
570 self,
571 jnienv,
572 art::jni::EncodeArtMethod(method),
573 /*was_popped_by_exception*/ static_cast<jboolean>(JNI_TRUE),
574 val);
575 // Match RI behavior of just throwing away original exception if a new one is thrown.
576 if (LIKELY(!self->IsExceptionPending())) {
577 self->SetException(old_exception.Get());
578 }
579 }
580 }
581
582 // Call-back for when the dex pc moves in a method.
DexPcMoved(art::Thread * self,art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,art::ArtMethod * method,uint32_t new_dex_pc)583 void DexPcMoved(art::Thread* self,
584 art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,
585 art::ArtMethod* method,
586 uint32_t new_dex_pc)
587 REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
588 DCHECK(!method->IsRuntimeMethod());
589 // Default methods might be copied to multiple classes. We need to get the canonical version of
590 // this method so that we can check for breakpoints correctly.
591 // TODO We should maybe do this on other events to ensure that we are consistent WRT default
592 // methods. This could interact with obsolete methods if we ever let interface redefinition
593 // happen though.
594 method = method->GetCanonicalMethod();
595 art::JNIEnvExt* jnienv = self->GetJniEnv();
596 jmethodID jmethod = art::jni::EncodeArtMethod(method);
597 jlocation location = static_cast<jlocation>(new_dex_pc);
598 // Step event is reported first according to the spec.
599 if (event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kSingleStep)) {
600 RunEventCallback<ArtJvmtiEvent::kSingleStep>(event_handler_, self, jnienv, jmethod, location);
601 }
602 // Next we do the Breakpoint events. The Dispatch code will filter the individual
603 if (event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kBreakpoint)) {
604 RunEventCallback<ArtJvmtiEvent::kBreakpoint>(event_handler_, self, jnienv, jmethod, location);
605 }
606 }
607
608 // Call-back for when we read from a field.
FieldRead(art::Thread * self,art::Handle<art::mirror::Object> this_object,art::ArtMethod * method,uint32_t dex_pc,art::ArtField * field)609 void FieldRead(art::Thread* self,
610 art::Handle<art::mirror::Object> this_object,
611 art::ArtMethod* method,
612 uint32_t dex_pc,
613 art::ArtField* field)
614 REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
615 if (event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kFieldAccess)) {
616 art::JNIEnvExt* jnienv = self->GetJniEnv();
617 // DCHECK(!self->IsExceptionPending());
618 ScopedLocalRef<jobject> this_ref(jnienv, AddLocalRef<jobject>(jnienv, this_object.Get()));
619 ScopedLocalRef<jobject> fklass(jnienv,
620 AddLocalRef<jobject>(jnienv,
621 field->GetDeclaringClass().Ptr()));
622 RunEventCallback<ArtJvmtiEvent::kFieldAccess>(event_handler_,
623 self,
624 jnienv,
625 art::jni::EncodeArtMethod(method),
626 static_cast<jlocation>(dex_pc),
627 static_cast<jclass>(fklass.get()),
628 this_ref.get(),
629 art::jni::EncodeArtField(field));
630 }
631 }
632
FieldWritten(art::Thread * self,art::Handle<art::mirror::Object> this_object,art::ArtMethod * method,uint32_t dex_pc,art::ArtField * field,art::Handle<art::mirror::Object> new_val)633 void FieldWritten(art::Thread* self,
634 art::Handle<art::mirror::Object> this_object,
635 art::ArtMethod* method,
636 uint32_t dex_pc,
637 art::ArtField* field,
638 art::Handle<art::mirror::Object> new_val)
639 REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
640 if (event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kFieldModification)) {
641 art::JNIEnvExt* jnienv = self->GetJniEnv();
642 // DCHECK(!self->IsExceptionPending());
643 ScopedLocalRef<jobject> this_ref(jnienv, AddLocalRef<jobject>(jnienv, this_object.Get()));
644 ScopedLocalRef<jobject> fklass(jnienv,
645 AddLocalRef<jobject>(jnienv,
646 field->GetDeclaringClass().Ptr()));
647 ScopedLocalRef<jobject> fval(jnienv, AddLocalRef<jobject>(jnienv, new_val.Get()));
648 jvalue val;
649 val.l = fval.get();
650 RunEventCallback<ArtJvmtiEvent::kFieldModification>(
651 event_handler_,
652 self,
653 jnienv,
654 art::jni::EncodeArtMethod(method),
655 static_cast<jlocation>(dex_pc),
656 static_cast<jclass>(fklass.get()),
657 field->IsStatic() ? nullptr : this_ref.get(),
658 art::jni::EncodeArtField(field),
659 'L', // type_char
660 val);
661 }
662 }
663
664 // Call-back for when we write into a field.
FieldWritten(art::Thread * self,art::Handle<art::mirror::Object> this_object,art::ArtMethod * method,uint32_t dex_pc,art::ArtField * field,const art::JValue & field_value)665 void FieldWritten(art::Thread* self,
666 art::Handle<art::mirror::Object> this_object,
667 art::ArtMethod* method,
668 uint32_t dex_pc,
669 art::ArtField* field,
670 const art::JValue& field_value)
671 REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
672 if (event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kFieldModification)) {
673 art::JNIEnvExt* jnienv = self->GetJniEnv();
674 DCHECK(!self->IsExceptionPending());
675 ScopedLocalRef<jobject> this_ref(jnienv, AddLocalRef<jobject>(jnienv, this_object.Get()));
676 ScopedLocalRef<jobject> fklass(jnienv,
677 AddLocalRef<jobject>(jnienv,
678 field->GetDeclaringClass().Ptr()));
679 char type_char = art::Primitive::Descriptor(field->GetTypeAsPrimitiveType())[0];
680 jvalue val;
681 // 64bit integer is the largest value in the union so we should be fine simply copying it into
682 // the union.
683 val.j = field_value.GetJ();
684 RunEventCallback<ArtJvmtiEvent::kFieldModification>(
685 event_handler_,
686 self,
687 jnienv,
688 art::jni::EncodeArtMethod(method),
689 static_cast<jlocation>(dex_pc),
690 static_cast<jclass>(fklass.get()),
691 field->IsStatic() ? nullptr : this_ref.get(), // nb static field modification get given
692 // the class as this_object for some
693 // reason.
694 art::jni::EncodeArtField(field),
695 type_char,
696 val);
697 }
698 }
699
WatchedFramePop(art::Thread * self,const art::ShadowFrame & frame)700 void WatchedFramePop(art::Thread* self, const art::ShadowFrame& frame)
701 REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
702 art::JNIEnvExt* jnienv = self->GetJniEnv();
703 jboolean is_exception_pending = self->IsExceptionPending();
704 RunEventCallback<ArtJvmtiEvent::kFramePop>(
705 event_handler_,
706 self,
707 jnienv,
708 art::jni::EncodeArtMethod(frame.GetMethod()),
709 is_exception_pending,
710 &frame);
711 }
712
FindCatchMethodsFromThrow(art::Thread * self,art::Handle<art::mirror::Throwable> exception,art::ArtMethod ** out_method,uint32_t * dex_pc)713 static void FindCatchMethodsFromThrow(art::Thread* self,
714 art::Handle<art::mirror::Throwable> exception,
715 /*out*/ art::ArtMethod** out_method,
716 /*out*/ uint32_t* dex_pc)
717 REQUIRES_SHARED(art::Locks::mutator_lock_) {
718 // Finds the location where this exception will most likely be caught. We ignore intervening
719 // native frames (which could catch the exception) and return the closest java frame with a
720 // compatible catch statement.
721 class CatchLocationFinder FINAL : public art::StackVisitor {
722 public:
723 CatchLocationFinder(art::Thread* target,
724 art::Handle<art::mirror::Class> exception_class,
725 art::Context* context,
726 /*out*/ art::ArtMethod** out_catch_method,
727 /*out*/ uint32_t* out_catch_pc)
728 REQUIRES_SHARED(art::Locks::mutator_lock_)
729 : StackVisitor(target, context, art::StackVisitor::StackWalkKind::kIncludeInlinedFrames),
730 exception_class_(exception_class),
731 catch_method_ptr_(out_catch_method),
732 catch_dex_pc_ptr_(out_catch_pc) {}
733
734 bool VisitFrame() OVERRIDE REQUIRES_SHARED(art::Locks::mutator_lock_) {
735 art::ArtMethod* method = GetMethod();
736 DCHECK(method != nullptr);
737 if (method->IsRuntimeMethod()) {
738 return true;
739 }
740
741 if (!method->IsNative()) {
742 uint32_t cur_dex_pc = GetDexPc();
743 if (cur_dex_pc == art::dex::kDexNoIndex) {
744 // This frame looks opaque. Just keep on going.
745 return true;
746 }
747 bool has_no_move_exception = false;
748 uint32_t found_dex_pc = method->FindCatchBlock(
749 exception_class_, cur_dex_pc, &has_no_move_exception);
750 if (found_dex_pc != art::dex::kDexNoIndex) {
751 // We found the catch. Store the result and return.
752 *catch_method_ptr_ = method;
753 *catch_dex_pc_ptr_ = found_dex_pc;
754 return false;
755 }
756 }
757 return true;
758 }
759
760 private:
761 art::Handle<art::mirror::Class> exception_class_;
762 art::ArtMethod** catch_method_ptr_;
763 uint32_t* catch_dex_pc_ptr_;
764
765 DISALLOW_COPY_AND_ASSIGN(CatchLocationFinder);
766 };
767
768 art::StackHandleScope<1> hs(self);
769 *out_method = nullptr;
770 *dex_pc = 0;
771 std::unique_ptr<art::Context> context(art::Context::Create());
772
773 CatchLocationFinder clf(self,
774 hs.NewHandle(exception->GetClass()),
775 context.get(),
776 /*out*/ out_method,
777 /*out*/ dex_pc);
778 clf.WalkStack(/* include_transitions */ false);
779 }
780
781 // Call-back when an exception is thrown.
ExceptionThrown(art::Thread * self,art::Handle<art::mirror::Throwable> exception_object)782 void ExceptionThrown(art::Thread* self, art::Handle<art::mirror::Throwable> exception_object)
783 REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
784 DCHECK(self->IsExceptionThrownByCurrentMethod(exception_object.Get()));
785 // The instrumentation events get rid of this for us.
786 DCHECK(!self->IsExceptionPending());
787 if (event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kException)) {
788 art::JNIEnvExt* jnienv = self->GetJniEnv();
789 art::ArtMethod* catch_method;
790 uint32_t catch_pc;
791 FindCatchMethodsFromThrow(self, exception_object, &catch_method, &catch_pc);
792 uint32_t dex_pc = 0;
793 art::ArtMethod* method = self->GetCurrentMethod(&dex_pc,
794 /* check_suspended */ true,
795 /* abort_on_error */ art::kIsDebugBuild);
796 ScopedLocalRef<jobject> exception(jnienv,
797 AddLocalRef<jobject>(jnienv, exception_object.Get()));
798 RunEventCallback<ArtJvmtiEvent::kException>(
799 event_handler_,
800 self,
801 jnienv,
802 art::jni::EncodeArtMethod(method),
803 static_cast<jlocation>(dex_pc),
804 exception.get(),
805 art::jni::EncodeArtMethod(catch_method),
806 static_cast<jlocation>(catch_pc));
807 }
808 return;
809 }
810
811 // Call-back when an exception is handled.
ExceptionHandled(art::Thread * self,art::Handle<art::mirror::Throwable> exception_object)812 void ExceptionHandled(art::Thread* self, art::Handle<art::mirror::Throwable> exception_object)
813 REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
814 // Since the exception has already been handled there shouldn't be one pending.
815 DCHECK(!self->IsExceptionPending());
816 if (event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kExceptionCatch)) {
817 art::JNIEnvExt* jnienv = self->GetJniEnv();
818 uint32_t dex_pc;
819 art::ArtMethod* method = self->GetCurrentMethod(&dex_pc,
820 /* check_suspended */ true,
821 /* abort_on_error */ art::kIsDebugBuild);
822 ScopedLocalRef<jobject> exception(jnienv,
823 AddLocalRef<jobject>(jnienv, exception_object.Get()));
824 RunEventCallback<ArtJvmtiEvent::kExceptionCatch>(
825 event_handler_,
826 self,
827 jnienv,
828 art::jni::EncodeArtMethod(method),
829 static_cast<jlocation>(dex_pc),
830 exception.get());
831 }
832 return;
833 }
834
835 // Call-back for when we execute a branch.
Branch(art::Thread * self ATTRIBUTE_UNUSED,art::ArtMethod * method ATTRIBUTE_UNUSED,uint32_t dex_pc ATTRIBUTE_UNUSED,int32_t dex_pc_offset ATTRIBUTE_UNUSED)836 void Branch(art::Thread* self ATTRIBUTE_UNUSED,
837 art::ArtMethod* method ATTRIBUTE_UNUSED,
838 uint32_t dex_pc ATTRIBUTE_UNUSED,
839 int32_t dex_pc_offset ATTRIBUTE_UNUSED)
840 REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
841 return;
842 }
843
844 // Call-back for when we get an invokevirtual or an invokeinterface.
InvokeVirtualOrInterface(art::Thread * self ATTRIBUTE_UNUSED,art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,art::ArtMethod * caller ATTRIBUTE_UNUSED,uint32_t dex_pc ATTRIBUTE_UNUSED,art::ArtMethod * callee ATTRIBUTE_UNUSED)845 void InvokeVirtualOrInterface(art::Thread* self ATTRIBUTE_UNUSED,
846 art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,
847 art::ArtMethod* caller ATTRIBUTE_UNUSED,
848 uint32_t dex_pc ATTRIBUTE_UNUSED,
849 art::ArtMethod* callee ATTRIBUTE_UNUSED)
850 REQUIRES_SHARED(art::Locks::mutator_lock_) OVERRIDE {
851 return;
852 }
853
854 private:
855 EventHandler* const event_handler_;
856 };
857
GetInstrumentationEventsFor(ArtJvmtiEvent event)858 static uint32_t GetInstrumentationEventsFor(ArtJvmtiEvent event) {
859 switch (event) {
860 case ArtJvmtiEvent::kMethodEntry:
861 return art::instrumentation::Instrumentation::kMethodEntered;
862 case ArtJvmtiEvent::kMethodExit:
863 return art::instrumentation::Instrumentation::kMethodExited |
864 art::instrumentation::Instrumentation::kMethodUnwind;
865 case ArtJvmtiEvent::kFieldModification:
866 return art::instrumentation::Instrumentation::kFieldWritten;
867 case ArtJvmtiEvent::kFieldAccess:
868 return art::instrumentation::Instrumentation::kFieldRead;
869 case ArtJvmtiEvent::kBreakpoint:
870 case ArtJvmtiEvent::kSingleStep:
871 return art::instrumentation::Instrumentation::kDexPcMoved;
872 case ArtJvmtiEvent::kFramePop:
873 return art::instrumentation::Instrumentation::kWatchedFramePop;
874 case ArtJvmtiEvent::kException:
875 return art::instrumentation::Instrumentation::kExceptionThrown;
876 case ArtJvmtiEvent::kExceptionCatch:
877 return art::instrumentation::Instrumentation::kExceptionHandled;
878 default:
879 LOG(FATAL) << "Unknown event ";
880 return 0;
881 }
882 }
883
EventNeedsFullDeopt(ArtJvmtiEvent event)884 static bool EventNeedsFullDeopt(ArtJvmtiEvent event) {
885 switch (event) {
886 case ArtJvmtiEvent::kBreakpoint:
887 case ArtJvmtiEvent::kException:
888 return false;
889 // TODO We should support more of these or at least do something to make them discriminate by
890 // thread.
891 case ArtJvmtiEvent::kMethodEntry:
892 case ArtJvmtiEvent::kExceptionCatch:
893 case ArtJvmtiEvent::kMethodExit:
894 case ArtJvmtiEvent::kFieldModification:
895 case ArtJvmtiEvent::kFieldAccess:
896 case ArtJvmtiEvent::kSingleStep:
897 case ArtJvmtiEvent::kFramePop:
898 return true;
899 default:
900 LOG(FATAL) << "Unexpected event type!";
901 UNREACHABLE();
902 }
903 }
904
SetupTraceListener(JvmtiMethodTraceListener * listener,ArtJvmtiEvent event,bool enable)905 void EventHandler::SetupTraceListener(JvmtiMethodTraceListener* listener,
906 ArtJvmtiEvent event,
907 bool enable) {
908 bool needs_full_deopt = EventNeedsFullDeopt(event);
909 // Make sure we can deopt.
910 {
911 art::ScopedObjectAccess soa(art::Thread::Current());
912 DeoptManager* deopt_manager = DeoptManager::Get();
913 if (enable) {
914 deopt_manager->AddDeoptimizationRequester();
915 if (needs_full_deopt) {
916 deopt_manager->AddDeoptimizeAllMethods();
917 }
918 } else {
919 if (needs_full_deopt) {
920 deopt_manager->RemoveDeoptimizeAllMethods();
921 }
922 deopt_manager->RemoveDeoptimizationRequester();
923 }
924 }
925
926 // Add the actual listeners.
927 uint32_t new_events = GetInstrumentationEventsFor(event);
928 if (new_events == art::instrumentation::Instrumentation::kDexPcMoved) {
929 // Need to skip adding the listeners if the event is breakpoint/single-step since those events
930 // share the same art-instrumentation underlying event. We need to give them their own deopt
931 // request though so the test waits until here.
932 DCHECK(event == ArtJvmtiEvent::kBreakpoint || event == ArtJvmtiEvent::kSingleStep);
933 ArtJvmtiEvent other = event == ArtJvmtiEvent::kBreakpoint ? ArtJvmtiEvent::kSingleStep
934 : ArtJvmtiEvent::kBreakpoint;
935 if (IsEventEnabledAnywhere(other)) {
936 // The event needs to be kept around/is already enabled by the other jvmti event that uses the
937 // same instrumentation event.
938 return;
939 }
940 }
941 art::ScopedThreadStateChange stsc(art::Thread::Current(), art::ThreadState::kNative);
942 art::instrumentation::Instrumentation* instr = art::Runtime::Current()->GetInstrumentation();
943 art::ScopedSuspendAll ssa("jvmti method tracing installation");
944 if (enable) {
945 instr->AddListener(listener, new_events);
946 } else {
947 instr->RemoveListener(listener, new_events);
948 }
949 }
950
951 // Makes sure that all compiled methods are AsyncDeoptimizable so we can deoptimize (and force to
952 // the switch interpreter) when we try to get or set a local variable.
HandleLocalAccessCapabilityAdded()953 void EventHandler::HandleLocalAccessCapabilityAdded() {
954 class UpdateEntryPointsClassVisitor : public art::ClassVisitor {
955 public:
956 explicit UpdateEntryPointsClassVisitor(art::Runtime* runtime)
957 : runtime_(runtime) {}
958
959 bool operator()(art::ObjPtr<art::mirror::Class> klass)
960 OVERRIDE REQUIRES(art::Locks::mutator_lock_) {
961 if (!klass->IsLoaded()) {
962 // Skip classes that aren't loaded since they might not have fully allocated and initialized
963 // their methods. Furthemore since the jvmti-plugin must have been loaded by this point
964 // these methods will definitately be using debuggable code.
965 return true;
966 }
967 for (auto& m : klass->GetMethods(art::kRuntimePointerSize)) {
968 const void* code = m.GetEntryPointFromQuickCompiledCode();
969 if (m.IsNative() || m.IsProxyMethod()) {
970 continue;
971 } else if (!runtime_->GetClassLinker()->IsQuickToInterpreterBridge(code) &&
972 !runtime_->IsAsyncDeoptimizeable(reinterpret_cast<uintptr_t>(code))) {
973 runtime_->GetInstrumentation()->UpdateMethodsCodeToInterpreterEntryPoint(&m);
974 }
975 }
976 return true;
977 }
978
979 private:
980 art::Runtime* runtime_;
981 };
982 art::ScopedObjectAccess soa(art::Thread::Current());
983 UpdateEntryPointsClassVisitor visitor(art::Runtime::Current());
984 art::Runtime::Current()->GetClassLinker()->VisitClasses(&visitor);
985 }
986
OtherMonitorEventsEnabledAnywhere(ArtJvmtiEvent event)987 bool EventHandler::OtherMonitorEventsEnabledAnywhere(ArtJvmtiEvent event) {
988 std::array<ArtJvmtiEvent, 4> events {
989 {
990 ArtJvmtiEvent::kMonitorContendedEnter,
991 ArtJvmtiEvent::kMonitorContendedEntered,
992 ArtJvmtiEvent::kMonitorWait,
993 ArtJvmtiEvent::kMonitorWaited
994 }
995 };
996 for (ArtJvmtiEvent e : events) {
997 if (e != event && IsEventEnabledAnywhere(e)) {
998 return true;
999 }
1000 }
1001 return false;
1002 }
1003
1004 // Handle special work for the given event type, if necessary.
HandleEventType(ArtJvmtiEvent event,bool enable)1005 void EventHandler::HandleEventType(ArtJvmtiEvent event, bool enable) {
1006 switch (event) {
1007 case ArtJvmtiEvent::kDdmPublishChunk:
1008 SetupDdmTracking(ddm_listener_.get(), enable);
1009 return;
1010 case ArtJvmtiEvent::kVmObjectAlloc:
1011 SetupObjectAllocationTracking(alloc_listener_.get(), enable);
1012 return;
1013
1014 case ArtJvmtiEvent::kGarbageCollectionStart:
1015 case ArtJvmtiEvent::kGarbageCollectionFinish:
1016 SetupGcPauseTracking(gc_pause_listener_.get(), event, enable);
1017 return;
1018 // FramePop can never be disabled once it's been turned on since we would either need to deal
1019 // with dangling pointers or have missed events.
1020 // TODO We really need to make this not the case anymore.
1021 case ArtJvmtiEvent::kFramePop:
1022 if (!enable || (enable && frame_pop_enabled)) {
1023 break;
1024 } else {
1025 SetupTraceListener(method_trace_listener_.get(), event, enable);
1026 break;
1027 }
1028 case ArtJvmtiEvent::kMethodEntry:
1029 case ArtJvmtiEvent::kMethodExit:
1030 case ArtJvmtiEvent::kFieldAccess:
1031 case ArtJvmtiEvent::kFieldModification:
1032 case ArtJvmtiEvent::kException:
1033 case ArtJvmtiEvent::kExceptionCatch:
1034 case ArtJvmtiEvent::kBreakpoint:
1035 case ArtJvmtiEvent::kSingleStep:
1036 SetupTraceListener(method_trace_listener_.get(), event, enable);
1037 return;
1038 case ArtJvmtiEvent::kMonitorContendedEnter:
1039 case ArtJvmtiEvent::kMonitorContendedEntered:
1040 case ArtJvmtiEvent::kMonitorWait:
1041 case ArtJvmtiEvent::kMonitorWaited:
1042 if (!OtherMonitorEventsEnabledAnywhere(event)) {
1043 SetupMonitorListener(monitor_listener_.get(), enable);
1044 }
1045 return;
1046 default:
1047 break;
1048 }
1049 }
1050
1051 // Checks to see if the env has the capabilities associated with the given event.
HasAssociatedCapability(ArtJvmTiEnv * env,ArtJvmtiEvent event)1052 static bool HasAssociatedCapability(ArtJvmTiEnv* env,
1053 ArtJvmtiEvent event) {
1054 jvmtiCapabilities caps = env->capabilities;
1055 switch (event) {
1056 case ArtJvmtiEvent::kBreakpoint:
1057 return caps.can_generate_breakpoint_events == 1;
1058
1059 case ArtJvmtiEvent::kCompiledMethodLoad:
1060 case ArtJvmtiEvent::kCompiledMethodUnload:
1061 return caps.can_generate_compiled_method_load_events == 1;
1062
1063 case ArtJvmtiEvent::kException:
1064 case ArtJvmtiEvent::kExceptionCatch:
1065 return caps.can_generate_exception_events == 1;
1066
1067 case ArtJvmtiEvent::kFieldAccess:
1068 return caps.can_generate_field_access_events == 1;
1069
1070 case ArtJvmtiEvent::kFieldModification:
1071 return caps.can_generate_field_modification_events == 1;
1072
1073 case ArtJvmtiEvent::kFramePop:
1074 return caps.can_generate_frame_pop_events == 1;
1075
1076 case ArtJvmtiEvent::kGarbageCollectionStart:
1077 case ArtJvmtiEvent::kGarbageCollectionFinish:
1078 return caps.can_generate_garbage_collection_events == 1;
1079
1080 case ArtJvmtiEvent::kMethodEntry:
1081 return caps.can_generate_method_entry_events == 1;
1082
1083 case ArtJvmtiEvent::kMethodExit:
1084 return caps.can_generate_method_exit_events == 1;
1085
1086 case ArtJvmtiEvent::kMonitorContendedEnter:
1087 case ArtJvmtiEvent::kMonitorContendedEntered:
1088 case ArtJvmtiEvent::kMonitorWait:
1089 case ArtJvmtiEvent::kMonitorWaited:
1090 return caps.can_generate_monitor_events == 1;
1091
1092 case ArtJvmtiEvent::kNativeMethodBind:
1093 return caps.can_generate_native_method_bind_events == 1;
1094
1095 case ArtJvmtiEvent::kObjectFree:
1096 return caps.can_generate_object_free_events == 1;
1097
1098 case ArtJvmtiEvent::kSingleStep:
1099 return caps.can_generate_single_step_events == 1;
1100
1101 case ArtJvmtiEvent::kVmObjectAlloc:
1102 return caps.can_generate_vm_object_alloc_events == 1;
1103
1104 default:
1105 return true;
1106 }
1107 }
1108
SetEvent(ArtJvmTiEnv * env,art::Thread * thread,ArtJvmtiEvent event,jvmtiEventMode mode)1109 jvmtiError EventHandler::SetEvent(ArtJvmTiEnv* env,
1110 art::Thread* thread,
1111 ArtJvmtiEvent event,
1112 jvmtiEventMode mode) {
1113 if (thread != nullptr) {
1114 art::ThreadState state = thread->GetState();
1115 if (state == art::ThreadState::kStarting ||
1116 state == art::ThreadState::kTerminated ||
1117 thread->IsStillStarting()) {
1118 return ERR(THREAD_NOT_ALIVE);
1119 }
1120 if (!IsThreadControllable(event)) {
1121 return ERR(ILLEGAL_ARGUMENT);
1122 }
1123 }
1124
1125 if (mode != JVMTI_ENABLE && mode != JVMTI_DISABLE) {
1126 return ERR(ILLEGAL_ARGUMENT);
1127 }
1128
1129 if (!EventMask::EventIsInRange(event)) {
1130 return ERR(INVALID_EVENT_TYPE);
1131 }
1132
1133 if (!HasAssociatedCapability(env, event)) {
1134 return ERR(MUST_POSSESS_CAPABILITY);
1135 }
1136
1137 bool old_state;
1138 bool new_state;
1139
1140 {
1141 // Change the event masks atomically.
1142 art::Thread* self = art::Thread::Current();
1143 art::WriterMutexLock mu(self, envs_lock_);
1144 art::WriterMutexLock mu_env_info(self, env->event_info_mutex_);
1145 old_state = global_mask.Test(event);
1146 if (mode == JVMTI_ENABLE) {
1147 env->event_masks.EnableEvent(env, thread, event);
1148 global_mask.Set(event);
1149 new_state = true;
1150 } else {
1151 DCHECK_EQ(mode, JVMTI_DISABLE);
1152
1153 env->event_masks.DisableEvent(env, thread, event);
1154 RecalculateGlobalEventMaskLocked(event);
1155 new_state = global_mask.Test(event);
1156 }
1157 }
1158
1159 // Handle any special work required for the event type.
1160 if (new_state != old_state) {
1161 HandleEventType(event, mode == JVMTI_ENABLE);
1162 }
1163
1164 return ERR(NONE);
1165 }
1166
HandleBreakpointEventsChanged(bool added)1167 void EventHandler::HandleBreakpointEventsChanged(bool added) {
1168 if (added) {
1169 DeoptManager::Get()->AddDeoptimizationRequester();
1170 } else {
1171 DeoptManager::Get()->RemoveDeoptimizationRequester();
1172 }
1173 }
1174
Shutdown()1175 void EventHandler::Shutdown() {
1176 // Need to remove the method_trace_listener_ if it's there.
1177 art::Thread* self = art::Thread::Current();
1178 art::gc::ScopedGCCriticalSection gcs(self,
1179 art::gc::kGcCauseInstrumentation,
1180 art::gc::kCollectorTypeInstrumentation);
1181 art::ScopedSuspendAll ssa("jvmti method tracing uninstallation");
1182 // Just remove every possible event.
1183 art::Runtime::Current()->GetInstrumentation()->RemoveListener(method_trace_listener_.get(), ~0);
1184 }
1185
EventHandler()1186 EventHandler::EventHandler()
1187 : envs_lock_("JVMTI Environment List Lock", art::LockLevel::kTopLockLevel),
1188 frame_pop_enabled(false) {
1189 alloc_listener_.reset(new JvmtiAllocationListener(this));
1190 ddm_listener_.reset(new JvmtiDdmChunkListener(this));
1191 gc_pause_listener_.reset(new JvmtiGcPauseListener(this));
1192 method_trace_listener_.reset(new JvmtiMethodTraceListener(this));
1193 monitor_listener_.reset(new JvmtiMonitorListener(this));
1194 }
1195
~EventHandler()1196 EventHandler::~EventHandler() {
1197 }
1198
1199 } // namespace openjdkjvmti
1200