1 /*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "instrumentation.h"
18
19 #include <sstream>
20
21 #include "arch/context.h"
22 #include "art_field-inl.h"
23 #include "art_method-inl.h"
24 #include "base/atomic.h"
25 #include "base/callee_save_type.h"
26 #include "class_linker.h"
27 #include "debugger.h"
28 #include "dex/dex_file-inl.h"
29 #include "dex/dex_file_types.h"
30 #include "dex/dex_instruction-inl.h"
31 #include "entrypoints/quick/quick_alloc_entrypoints.h"
32 #include "entrypoints/quick/quick_entrypoints.h"
33 #include "entrypoints/runtime_asm_entrypoints.h"
34 #include "gc_root-inl.h"
35 #include "interpreter/interpreter.h"
36 #include "interpreter/interpreter_common.h"
37 #include "jit/jit.h"
38 #include "jit/jit_code_cache.h"
39 #include "jvalue-inl.h"
40 #include "mirror/class-inl.h"
41 #include "mirror/dex_cache.h"
42 #include "mirror/object-inl.h"
43 #include "mirror/object_array-inl.h"
44 #include "nth_caller_visitor.h"
45 #include "oat_quick_method_header.h"
46 #include "thread.h"
47 #include "thread_list.h"
48
49 namespace art {
50 namespace instrumentation {
51
52 constexpr bool kVerboseInstrumentation = false;
53
MethodExited(Thread * thread,Handle<mirror::Object> this_object,ArtMethod * method,uint32_t dex_pc,Handle<mirror::Object> return_value)54 void InstrumentationListener::MethodExited(Thread* thread,
55 Handle<mirror::Object> this_object,
56 ArtMethod* method,
57 uint32_t dex_pc,
58 Handle<mirror::Object> return_value) {
59 DCHECK_EQ(method->GetInterfaceMethodIfProxy(kRuntimePointerSize)->GetReturnTypePrimitive(),
60 Primitive::kPrimNot);
61 JValue v;
62 v.SetL(return_value.Get());
63 MethodExited(thread, this_object, method, dex_pc, v);
64 }
65
FieldWritten(Thread * thread,Handle<mirror::Object> this_object,ArtMethod * method,uint32_t dex_pc,ArtField * field,Handle<mirror::Object> field_value)66 void InstrumentationListener::FieldWritten(Thread* thread,
67 Handle<mirror::Object> this_object,
68 ArtMethod* method,
69 uint32_t dex_pc,
70 ArtField* field,
71 Handle<mirror::Object> field_value) {
72 DCHECK(!field->IsPrimitiveType());
73 JValue v;
74 v.SetL(field_value.Get());
75 FieldWritten(thread, this_object, method, dex_pc, field, v);
76 }
77
78 // Instrumentation works on non-inlined frames by updating returned PCs
79 // of compiled frames.
80 static constexpr StackVisitor::StackWalkKind kInstrumentationStackWalk =
81 StackVisitor::StackWalkKind::kSkipInlinedFrames;
82
83 class InstallStubsClassVisitor : public ClassVisitor {
84 public:
InstallStubsClassVisitor(Instrumentation * instrumentation)85 explicit InstallStubsClassVisitor(Instrumentation* instrumentation)
86 : instrumentation_(instrumentation) {}
87
operator ()(ObjPtr<mirror::Class> klass)88 bool operator()(ObjPtr<mirror::Class> klass) OVERRIDE REQUIRES(Locks::mutator_lock_) {
89 instrumentation_->InstallStubsForClass(klass.Ptr());
90 return true; // we visit all classes.
91 }
92
93 private:
94 Instrumentation* const instrumentation_;
95 };
96
97
Instrumentation()98 Instrumentation::Instrumentation()
99 : instrumentation_stubs_installed_(false),
100 entry_exit_stubs_installed_(false),
101 interpreter_stubs_installed_(false),
102 interpret_only_(false),
103 forced_interpret_only_(false),
104 have_method_entry_listeners_(false),
105 have_method_exit_listeners_(false),
106 have_method_unwind_listeners_(false),
107 have_dex_pc_listeners_(false),
108 have_field_read_listeners_(false),
109 have_field_write_listeners_(false),
110 have_exception_thrown_listeners_(false),
111 have_watched_frame_pop_listeners_(false),
112 have_branch_listeners_(false),
113 have_invoke_virtual_or_interface_listeners_(false),
114 have_exception_handled_listeners_(false),
115 deoptimized_methods_lock_("deoptimized methods lock", kDeoptimizedMethodsLock),
116 deoptimization_enabled_(false),
117 interpreter_handler_table_(kMainHandlerTable),
118 quick_alloc_entry_points_instrumentation_counter_(0),
119 alloc_entrypoints_instrumented_(false) {
120 }
121
InstallStubsForClass(mirror::Class * klass)122 void Instrumentation::InstallStubsForClass(mirror::Class* klass) {
123 if (!klass->IsResolved()) {
124 // We need the class to be resolved to install/uninstall stubs. Otherwise its methods
125 // could not be initialized or linked with regards to class inheritance.
126 } else if (klass->IsErroneousResolved()) {
127 // We can't execute code in a erroneous class: do nothing.
128 } else {
129 for (ArtMethod& method : klass->GetMethods(kRuntimePointerSize)) {
130 InstallStubsForMethod(&method);
131 }
132 }
133 }
134
UpdateEntrypoints(ArtMethod * method,const void * quick_code)135 static void UpdateEntrypoints(ArtMethod* method, const void* quick_code)
136 REQUIRES_SHARED(Locks::mutator_lock_) {
137 method->SetEntryPointFromQuickCompiledCode(quick_code);
138 }
139
NeedDebugVersionFor(ArtMethod * method) const140 bool Instrumentation::NeedDebugVersionFor(ArtMethod* method) const
141 REQUIRES_SHARED(Locks::mutator_lock_) {
142 art::Runtime* runtime = Runtime::Current();
143 // If anything says we need the debug version or we are debuggable we will need the debug version
144 // of the method.
145 return (runtime->GetRuntimeCallbacks()->MethodNeedsDebugVersion(method) ||
146 runtime->IsJavaDebuggable()) &&
147 !method->IsNative() &&
148 !method->IsProxyMethod();
149 }
150
InstallStubsForMethod(ArtMethod * method)151 void Instrumentation::InstallStubsForMethod(ArtMethod* method) {
152 if (!method->IsInvokable() || method->IsProxyMethod()) {
153 // Do not change stubs for these methods.
154 return;
155 }
156 // Don't stub Proxy.<init>. Note that the Proxy class itself is not a proxy class.
157 if (method->IsConstructor() &&
158 method->GetDeclaringClass()->DescriptorEquals("Ljava/lang/reflect/Proxy;")) {
159 return;
160 }
161 const void* new_quick_code;
162 bool uninstall = !entry_exit_stubs_installed_ && !interpreter_stubs_installed_;
163 Runtime* const runtime = Runtime::Current();
164 ClassLinker* const class_linker = runtime->GetClassLinker();
165 bool is_class_initialized = method->GetDeclaringClass()->IsInitialized();
166 if (uninstall) {
167 if ((forced_interpret_only_ || IsDeoptimized(method)) && !method->IsNative()) {
168 new_quick_code = GetQuickToInterpreterBridge();
169 } else if (is_class_initialized || !method->IsStatic() || method->IsConstructor()) {
170 if (NeedDebugVersionFor(method)) {
171 new_quick_code = GetQuickToInterpreterBridge();
172 } else {
173 new_quick_code = class_linker->GetQuickOatCodeFor(method);
174 }
175 } else {
176 new_quick_code = GetQuickResolutionStub();
177 }
178 } else { // !uninstall
179 if ((interpreter_stubs_installed_ || forced_interpret_only_ || IsDeoptimized(method)) &&
180 !method->IsNative()) {
181 new_quick_code = GetQuickToInterpreterBridge();
182 } else {
183 // Do not overwrite resolution trampoline. When the trampoline initializes the method's
184 // class, all its static methods code will be set to the instrumentation entry point.
185 // For more details, see ClassLinker::FixupStaticTrampolines.
186 if (is_class_initialized || !method->IsStatic() || method->IsConstructor()) {
187 if (NeedDebugVersionFor(method)) {
188 // Oat code should not be used. Don't install instrumentation stub and
189 // use interpreter for instrumentation.
190 new_quick_code = GetQuickToInterpreterBridge();
191 } else if (entry_exit_stubs_installed_) {
192 new_quick_code = GetQuickInstrumentationEntryPoint();
193 } else {
194 new_quick_code = class_linker->GetQuickOatCodeFor(method);
195 }
196 } else {
197 new_quick_code = GetQuickResolutionStub();
198 }
199 }
200 }
201 UpdateEntrypoints(method, new_quick_code);
202 }
203
204 // Places the instrumentation exit pc as the return PC for every quick frame. This also allows
205 // deoptimization of quick frames to interpreter frames.
206 // Since we may already have done this previously, we need to push new instrumentation frame before
207 // existing instrumentation frames.
InstrumentationInstallStack(Thread * thread,void * arg)208 static void InstrumentationInstallStack(Thread* thread, void* arg)
209 REQUIRES_SHARED(Locks::mutator_lock_) {
210 struct InstallStackVisitor FINAL : public StackVisitor {
211 InstallStackVisitor(Thread* thread_in, Context* context, uintptr_t instrumentation_exit_pc)
212 : StackVisitor(thread_in, context, kInstrumentationStackWalk),
213 instrumentation_stack_(thread_in->GetInstrumentationStack()),
214 instrumentation_exit_pc_(instrumentation_exit_pc),
215 reached_existing_instrumentation_frames_(false), instrumentation_stack_depth_(0),
216 last_return_pc_(0) {
217 }
218
219 bool VisitFrame() OVERRIDE REQUIRES_SHARED(Locks::mutator_lock_) {
220 ArtMethod* m = GetMethod();
221 if (m == nullptr) {
222 if (kVerboseInstrumentation) {
223 LOG(INFO) << " Skipping upcall. Frame " << GetFrameId();
224 }
225 last_return_pc_ = 0;
226 return true; // Ignore upcalls.
227 }
228 if (GetCurrentQuickFrame() == nullptr) {
229 bool interpreter_frame = true;
230 InstrumentationStackFrame instrumentation_frame(GetThisObject(), m, 0, GetFrameId(),
231 interpreter_frame);
232 if (kVerboseInstrumentation) {
233 LOG(INFO) << "Pushing shadow frame " << instrumentation_frame.Dump();
234 }
235 shadow_stack_.push_back(instrumentation_frame);
236 return true; // Continue.
237 }
238 uintptr_t return_pc = GetReturnPc();
239 if (kVerboseInstrumentation) {
240 LOG(INFO) << " Installing exit stub in " << DescribeLocation();
241 }
242 if (return_pc == instrumentation_exit_pc_) {
243 CHECK_LT(instrumentation_stack_depth_, instrumentation_stack_->size());
244
245 if (m->IsRuntimeMethod()) {
246 const InstrumentationStackFrame& frame =
247 instrumentation_stack_->at(instrumentation_stack_depth_);
248 if (frame.interpreter_entry_) {
249 // This instrumentation frame is for an interpreter bridge and is
250 // pushed when executing the instrumented interpreter bridge. So method
251 // enter event must have been reported. However we need to push a DEX pc
252 // into the dex_pcs_ list to match size of instrumentation stack.
253 uint32_t dex_pc = dex::kDexNoIndex;
254 dex_pcs_.push_back(dex_pc);
255 last_return_pc_ = frame.return_pc_;
256 ++instrumentation_stack_depth_;
257 return true;
258 }
259 }
260
261 // We've reached a frame which has already been installed with instrumentation exit stub.
262 // We should have already installed instrumentation or be interpreter on previous frames.
263 reached_existing_instrumentation_frames_ = true;
264
265 const InstrumentationStackFrame& frame =
266 instrumentation_stack_->at(instrumentation_stack_depth_);
267 CHECK_EQ(m, frame.method_) << "Expected " << ArtMethod::PrettyMethod(m)
268 << ", Found " << ArtMethod::PrettyMethod(frame.method_);
269 return_pc = frame.return_pc_;
270 if (kVerboseInstrumentation) {
271 LOG(INFO) << "Ignoring already instrumented " << frame.Dump();
272 }
273 } else {
274 CHECK_NE(return_pc, 0U);
275 if (UNLIKELY(reached_existing_instrumentation_frames_ && !m->IsRuntimeMethod())) {
276 // We already saw an existing instrumentation frame so this should be a runtime-method
277 // inserted by the interpreter or runtime.
278 std::string thread_name;
279 GetThread()->GetThreadName(thread_name);
280 uint32_t dex_pc = dex::kDexNoIndex;
281 if (last_return_pc_ != 0 &&
282 GetCurrentOatQuickMethodHeader() != nullptr) {
283 dex_pc = GetCurrentOatQuickMethodHeader()->ToDexPc(m, last_return_pc_);
284 }
285 LOG(FATAL) << "While walking " << thread_name << " found unexpected non-runtime method"
286 << " without instrumentation exit return or interpreter frame."
287 << " method is " << GetMethod()->PrettyMethod()
288 << " return_pc is " << std::hex << return_pc
289 << " dex pc: " << dex_pc;
290 UNREACHABLE();
291 }
292 InstrumentationStackFrame instrumentation_frame(
293 m->IsRuntimeMethod() ? nullptr : GetThisObject(),
294 m,
295 return_pc,
296 GetFrameId(), // A runtime method still gets a frame id.
297 false);
298 if (kVerboseInstrumentation) {
299 LOG(INFO) << "Pushing frame " << instrumentation_frame.Dump();
300 }
301
302 // Insert frame at the right position so we do not corrupt the instrumentation stack.
303 // Instrumentation stack frames are in descending frame id order.
304 auto it = instrumentation_stack_->begin();
305 for (auto end = instrumentation_stack_->end(); it != end; ++it) {
306 const InstrumentationStackFrame& current = *it;
307 if (instrumentation_frame.frame_id_ >= current.frame_id_) {
308 break;
309 }
310 }
311 instrumentation_stack_->insert(it, instrumentation_frame);
312 SetReturnPc(instrumentation_exit_pc_);
313 }
314 uint32_t dex_pc = dex::kDexNoIndex;
315 if (last_return_pc_ != 0 &&
316 GetCurrentOatQuickMethodHeader() != nullptr) {
317 dex_pc = GetCurrentOatQuickMethodHeader()->ToDexPc(m, last_return_pc_);
318 }
319 dex_pcs_.push_back(dex_pc);
320 last_return_pc_ = return_pc;
321 ++instrumentation_stack_depth_;
322 return true; // Continue.
323 }
324 std::deque<InstrumentationStackFrame>* const instrumentation_stack_;
325 std::vector<InstrumentationStackFrame> shadow_stack_;
326 std::vector<uint32_t> dex_pcs_;
327 const uintptr_t instrumentation_exit_pc_;
328 bool reached_existing_instrumentation_frames_;
329 size_t instrumentation_stack_depth_;
330 uintptr_t last_return_pc_;
331 };
332 if (kVerboseInstrumentation) {
333 std::string thread_name;
334 thread->GetThreadName(thread_name);
335 LOG(INFO) << "Installing exit stubs in " << thread_name;
336 }
337
338 Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
339 std::unique_ptr<Context> context(Context::Create());
340 uintptr_t instrumentation_exit_pc = reinterpret_cast<uintptr_t>(GetQuickInstrumentationExitPc());
341 InstallStackVisitor visitor(thread, context.get(), instrumentation_exit_pc);
342 visitor.WalkStack(true);
343 CHECK_EQ(visitor.dex_pcs_.size(), thread->GetInstrumentationStack()->size());
344
345 if (instrumentation->ShouldNotifyMethodEnterExitEvents()) {
346 // Create method enter events for all methods currently on the thread's stack. We only do this
347 // if no debugger is attached to prevent from posting events twice.
348 auto ssi = visitor.shadow_stack_.rbegin();
349 for (auto isi = thread->GetInstrumentationStack()->rbegin(),
350 end = thread->GetInstrumentationStack()->rend(); isi != end; ++isi) {
351 while (ssi != visitor.shadow_stack_.rend() && (*ssi).frame_id_ < (*isi).frame_id_) {
352 instrumentation->MethodEnterEvent(thread, (*ssi).this_object_, (*ssi).method_, 0);
353 ++ssi;
354 }
355 uint32_t dex_pc = visitor.dex_pcs_.back();
356 visitor.dex_pcs_.pop_back();
357 if (!isi->interpreter_entry_) {
358 instrumentation->MethodEnterEvent(thread, (*isi).this_object_, (*isi).method_, dex_pc);
359 }
360 }
361 }
362 thread->VerifyStack();
363 }
364
InstrumentThreadStack(Thread * thread)365 void Instrumentation::InstrumentThreadStack(Thread* thread) {
366 instrumentation_stubs_installed_ = true;
367 InstrumentationInstallStack(thread, this);
368 }
369
370 // Removes the instrumentation exit pc as the return PC for every quick frame.
InstrumentationRestoreStack(Thread * thread,void * arg)371 static void InstrumentationRestoreStack(Thread* thread, void* arg)
372 REQUIRES(Locks::mutator_lock_) {
373 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
374
375 struct RestoreStackVisitor FINAL : public StackVisitor {
376 RestoreStackVisitor(Thread* thread_in, uintptr_t instrumentation_exit_pc,
377 Instrumentation* instrumentation)
378 : StackVisitor(thread_in, nullptr, kInstrumentationStackWalk),
379 thread_(thread_in),
380 instrumentation_exit_pc_(instrumentation_exit_pc),
381 instrumentation_(instrumentation),
382 instrumentation_stack_(thread_in->GetInstrumentationStack()),
383 frames_removed_(0) {}
384
385 bool VisitFrame() OVERRIDE REQUIRES_SHARED(Locks::mutator_lock_) {
386 if (instrumentation_stack_->size() == 0) {
387 return false; // Stop.
388 }
389 ArtMethod* m = GetMethod();
390 if (GetCurrentQuickFrame() == nullptr) {
391 if (kVerboseInstrumentation) {
392 LOG(INFO) << " Ignoring a shadow frame. Frame " << GetFrameId()
393 << " Method=" << ArtMethod::PrettyMethod(m);
394 }
395 return true; // Ignore shadow frames.
396 }
397 if (m == nullptr) {
398 if (kVerboseInstrumentation) {
399 LOG(INFO) << " Skipping upcall. Frame " << GetFrameId();
400 }
401 return true; // Ignore upcalls.
402 }
403 bool removed_stub = false;
404 // TODO: make this search more efficient?
405 const size_t frameId = GetFrameId();
406 for (const InstrumentationStackFrame& instrumentation_frame : *instrumentation_stack_) {
407 if (instrumentation_frame.frame_id_ == frameId) {
408 if (kVerboseInstrumentation) {
409 LOG(INFO) << " Removing exit stub in " << DescribeLocation();
410 }
411 if (instrumentation_frame.interpreter_entry_) {
412 CHECK(m == Runtime::Current()->GetCalleeSaveMethod(CalleeSaveType::kSaveRefsAndArgs));
413 } else {
414 CHECK(m == instrumentation_frame.method_) << ArtMethod::PrettyMethod(m);
415 }
416 SetReturnPc(instrumentation_frame.return_pc_);
417 if (instrumentation_->ShouldNotifyMethodEnterExitEvents() &&
418 !m->IsRuntimeMethod()) {
419 // Create the method exit events. As the methods didn't really exit the result is 0.
420 // We only do this if no debugger is attached to prevent from posting events twice.
421 instrumentation_->MethodExitEvent(thread_, instrumentation_frame.this_object_, m,
422 GetDexPc(), JValue());
423 }
424 frames_removed_++;
425 removed_stub = true;
426 break;
427 }
428 }
429 if (!removed_stub) {
430 if (kVerboseInstrumentation) {
431 LOG(INFO) << " No exit stub in " << DescribeLocation();
432 }
433 }
434 return true; // Continue.
435 }
436 Thread* const thread_;
437 const uintptr_t instrumentation_exit_pc_;
438 Instrumentation* const instrumentation_;
439 std::deque<instrumentation::InstrumentationStackFrame>* const instrumentation_stack_;
440 size_t frames_removed_;
441 };
442 if (kVerboseInstrumentation) {
443 std::string thread_name;
444 thread->GetThreadName(thread_name);
445 LOG(INFO) << "Removing exit stubs in " << thread_name;
446 }
447 std::deque<instrumentation::InstrumentationStackFrame>* stack = thread->GetInstrumentationStack();
448 if (stack->size() > 0) {
449 Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
450 uintptr_t instrumentation_exit_pc =
451 reinterpret_cast<uintptr_t>(GetQuickInstrumentationExitPc());
452 RestoreStackVisitor visitor(thread, instrumentation_exit_pc, instrumentation);
453 visitor.WalkStack(true);
454 CHECK_EQ(visitor.frames_removed_, stack->size());
455 while (stack->size() > 0) {
456 stack->pop_front();
457 }
458 }
459 }
460
HasEvent(Instrumentation::InstrumentationEvent expected,uint32_t events)461 static bool HasEvent(Instrumentation::InstrumentationEvent expected, uint32_t events) {
462 return (events & expected) != 0;
463 }
464
PotentiallyAddListenerTo(Instrumentation::InstrumentationEvent event,uint32_t events,std::list<InstrumentationListener * > & list,InstrumentationListener * listener,bool * has_listener)465 static void PotentiallyAddListenerTo(Instrumentation::InstrumentationEvent event,
466 uint32_t events,
467 std::list<InstrumentationListener*>& list,
468 InstrumentationListener* listener,
469 bool* has_listener)
470 REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !Locks::classlinker_classes_lock_) {
471 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
472 if (!HasEvent(event, events)) {
473 return;
474 }
475 // If there is a free slot in the list, we insert the listener in that slot.
476 // Otherwise we add it to the end of the list.
477 auto it = std::find(list.begin(), list.end(), nullptr);
478 if (it != list.end()) {
479 *it = listener;
480 } else {
481 list.push_back(listener);
482 }
483 *has_listener = true;
484 }
485
AddListener(InstrumentationListener * listener,uint32_t events)486 void Instrumentation::AddListener(InstrumentationListener* listener, uint32_t events) {
487 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
488 PotentiallyAddListenerTo(kMethodEntered,
489 events,
490 method_entry_listeners_,
491 listener,
492 &have_method_entry_listeners_);
493 PotentiallyAddListenerTo(kMethodExited,
494 events,
495 method_exit_listeners_,
496 listener,
497 &have_method_exit_listeners_);
498 PotentiallyAddListenerTo(kMethodUnwind,
499 events,
500 method_unwind_listeners_,
501 listener,
502 &have_method_unwind_listeners_);
503 PotentiallyAddListenerTo(kBranch,
504 events,
505 branch_listeners_,
506 listener,
507 &have_branch_listeners_);
508 PotentiallyAddListenerTo(kInvokeVirtualOrInterface,
509 events,
510 invoke_virtual_or_interface_listeners_,
511 listener,
512 &have_invoke_virtual_or_interface_listeners_);
513 PotentiallyAddListenerTo(kDexPcMoved,
514 events,
515 dex_pc_listeners_,
516 listener,
517 &have_dex_pc_listeners_);
518 PotentiallyAddListenerTo(kFieldRead,
519 events,
520 field_read_listeners_,
521 listener,
522 &have_field_read_listeners_);
523 PotentiallyAddListenerTo(kFieldWritten,
524 events,
525 field_write_listeners_,
526 listener,
527 &have_field_write_listeners_);
528 PotentiallyAddListenerTo(kExceptionThrown,
529 events,
530 exception_thrown_listeners_,
531 listener,
532 &have_exception_thrown_listeners_);
533 PotentiallyAddListenerTo(kWatchedFramePop,
534 events,
535 watched_frame_pop_listeners_,
536 listener,
537 &have_watched_frame_pop_listeners_);
538 PotentiallyAddListenerTo(kExceptionHandled,
539 events,
540 exception_handled_listeners_,
541 listener,
542 &have_exception_handled_listeners_);
543 UpdateInterpreterHandlerTable();
544 }
545
PotentiallyRemoveListenerFrom(Instrumentation::InstrumentationEvent event,uint32_t events,std::list<InstrumentationListener * > & list,InstrumentationListener * listener,bool * has_listener)546 static void PotentiallyRemoveListenerFrom(Instrumentation::InstrumentationEvent event,
547 uint32_t events,
548 std::list<InstrumentationListener*>& list,
549 InstrumentationListener* listener,
550 bool* has_listener)
551 REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !Locks::classlinker_classes_lock_) {
552 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
553 if (!HasEvent(event, events)) {
554 return;
555 }
556 auto it = std::find(list.begin(), list.end(), listener);
557 if (it != list.end()) {
558 // Just update the entry, do not remove from the list. Removing entries in the list
559 // is unsafe when mutators are iterating over it.
560 *it = nullptr;
561 }
562
563 // Check if the list contains any non-null listener, and update 'has_listener'.
564 for (InstrumentationListener* l : list) {
565 if (l != nullptr) {
566 *has_listener = true;
567 return;
568 }
569 }
570 *has_listener = false;
571 }
572
RemoveListener(InstrumentationListener * listener,uint32_t events)573 void Instrumentation::RemoveListener(InstrumentationListener* listener, uint32_t events) {
574 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
575 PotentiallyRemoveListenerFrom(kMethodEntered,
576 events,
577 method_entry_listeners_,
578 listener,
579 &have_method_entry_listeners_);
580 PotentiallyRemoveListenerFrom(kMethodExited,
581 events,
582 method_exit_listeners_,
583 listener,
584 &have_method_exit_listeners_);
585 PotentiallyRemoveListenerFrom(kMethodUnwind,
586 events,
587 method_unwind_listeners_,
588 listener,
589 &have_method_unwind_listeners_);
590 PotentiallyRemoveListenerFrom(kBranch,
591 events,
592 branch_listeners_,
593 listener,
594 &have_branch_listeners_);
595 PotentiallyRemoveListenerFrom(kInvokeVirtualOrInterface,
596 events,
597 invoke_virtual_or_interface_listeners_,
598 listener,
599 &have_invoke_virtual_or_interface_listeners_);
600 PotentiallyRemoveListenerFrom(kDexPcMoved,
601 events,
602 dex_pc_listeners_,
603 listener,
604 &have_dex_pc_listeners_);
605 PotentiallyRemoveListenerFrom(kFieldRead,
606 events,
607 field_read_listeners_,
608 listener,
609 &have_field_read_listeners_);
610 PotentiallyRemoveListenerFrom(kFieldWritten,
611 events,
612 field_write_listeners_,
613 listener,
614 &have_field_write_listeners_);
615 PotentiallyRemoveListenerFrom(kExceptionThrown,
616 events,
617 exception_thrown_listeners_,
618 listener,
619 &have_exception_thrown_listeners_);
620 PotentiallyRemoveListenerFrom(kWatchedFramePop,
621 events,
622 watched_frame_pop_listeners_,
623 listener,
624 &have_watched_frame_pop_listeners_);
625 PotentiallyRemoveListenerFrom(kExceptionHandled,
626 events,
627 exception_handled_listeners_,
628 listener,
629 &have_exception_handled_listeners_);
630 UpdateInterpreterHandlerTable();
631 }
632
GetCurrentInstrumentationLevel() const633 Instrumentation::InstrumentationLevel Instrumentation::GetCurrentInstrumentationLevel() const {
634 if (interpreter_stubs_installed_) {
635 return InstrumentationLevel::kInstrumentWithInterpreter;
636 } else if (entry_exit_stubs_installed_) {
637 return InstrumentationLevel::kInstrumentWithInstrumentationStubs;
638 } else {
639 return InstrumentationLevel::kInstrumentNothing;
640 }
641 }
642
RequiresInstrumentationInstallation(InstrumentationLevel new_level) const643 bool Instrumentation::RequiresInstrumentationInstallation(InstrumentationLevel new_level) const {
644 // We need to reinstall instrumentation if we go to a different level.
645 return GetCurrentInstrumentationLevel() != new_level;
646 }
647
ConfigureStubs(const char * key,InstrumentationLevel desired_level)648 void Instrumentation::ConfigureStubs(const char* key, InstrumentationLevel desired_level) {
649 // Store the instrumentation level for this key or remove it.
650 if (desired_level == InstrumentationLevel::kInstrumentNothing) {
651 // The client no longer needs instrumentation.
652 requested_instrumentation_levels_.erase(key);
653 } else {
654 // The client needs instrumentation.
655 requested_instrumentation_levels_.Overwrite(key, desired_level);
656 }
657
658 // Look for the highest required instrumentation level.
659 InstrumentationLevel requested_level = InstrumentationLevel::kInstrumentNothing;
660 for (const auto& v : requested_instrumentation_levels_) {
661 requested_level = std::max(requested_level, v.second);
662 }
663
664 interpret_only_ = (requested_level == InstrumentationLevel::kInstrumentWithInterpreter) ||
665 forced_interpret_only_;
666
667 if (!RequiresInstrumentationInstallation(requested_level)) {
668 // We're already set.
669 return;
670 }
671 Thread* const self = Thread::Current();
672 Runtime* runtime = Runtime::Current();
673 Locks::mutator_lock_->AssertExclusiveHeld(self);
674 Locks::thread_list_lock_->AssertNotHeld(self);
675 if (requested_level > InstrumentationLevel::kInstrumentNothing) {
676 if (requested_level == InstrumentationLevel::kInstrumentWithInterpreter) {
677 interpreter_stubs_installed_ = true;
678 entry_exit_stubs_installed_ = true;
679 } else {
680 CHECK_EQ(requested_level, InstrumentationLevel::kInstrumentWithInstrumentationStubs);
681 entry_exit_stubs_installed_ = true;
682 interpreter_stubs_installed_ = false;
683 }
684 InstallStubsClassVisitor visitor(this);
685 runtime->GetClassLinker()->VisitClasses(&visitor);
686 instrumentation_stubs_installed_ = true;
687 MutexLock mu(self, *Locks::thread_list_lock_);
688 runtime->GetThreadList()->ForEach(InstrumentationInstallStack, this);
689 } else {
690 interpreter_stubs_installed_ = false;
691 entry_exit_stubs_installed_ = false;
692 InstallStubsClassVisitor visitor(this);
693 runtime->GetClassLinker()->VisitClasses(&visitor);
694 // Restore stack only if there is no method currently deoptimized.
695 bool empty;
696 {
697 ReaderMutexLock mu(self, deoptimized_methods_lock_);
698 empty = IsDeoptimizedMethodsEmpty(); // Avoid lock violation.
699 }
700 if (empty) {
701 MutexLock mu(self, *Locks::thread_list_lock_);
702 Runtime::Current()->GetThreadList()->ForEach(InstrumentationRestoreStack, this);
703 // Only do this after restoring, as walking the stack when restoring will see
704 // the instrumentation exit pc.
705 instrumentation_stubs_installed_ = false;
706 }
707 }
708 }
709
ResetQuickAllocEntryPointsForThread(Thread * thread,void * arg ATTRIBUTE_UNUSED)710 static void ResetQuickAllocEntryPointsForThread(Thread* thread, void* arg ATTRIBUTE_UNUSED) {
711 thread->ResetQuickAllocEntryPointsForThread(kUseReadBarrier && thread->GetIsGcMarking());
712 }
713
SetEntrypointsInstrumented(bool instrumented)714 void Instrumentation::SetEntrypointsInstrumented(bool instrumented) {
715 Thread* self = Thread::Current();
716 Runtime* runtime = Runtime::Current();
717 Locks::mutator_lock_->AssertNotHeld(self);
718 Locks::instrument_entrypoints_lock_->AssertHeld(self);
719 if (runtime->IsStarted()) {
720 ScopedSuspendAll ssa(__FUNCTION__);
721 MutexLock mu(self, *Locks::runtime_shutdown_lock_);
722 SetQuickAllocEntryPointsInstrumented(instrumented);
723 ResetQuickAllocEntryPoints();
724 alloc_entrypoints_instrumented_ = instrumented;
725 } else {
726 MutexLock mu(self, *Locks::runtime_shutdown_lock_);
727 SetQuickAllocEntryPointsInstrumented(instrumented);
728
729 // Note: ResetQuickAllocEntryPoints only works when the runtime is started. Manually run the
730 // update for just this thread.
731 // Note: self may be null. One of those paths is setting instrumentation in the Heap
732 // constructor for gcstress mode.
733 if (self != nullptr) {
734 ResetQuickAllocEntryPointsForThread(self, nullptr);
735 }
736
737 alloc_entrypoints_instrumented_ = instrumented;
738 }
739 }
740
InstrumentQuickAllocEntryPoints()741 void Instrumentation::InstrumentQuickAllocEntryPoints() {
742 MutexLock mu(Thread::Current(), *Locks::instrument_entrypoints_lock_);
743 InstrumentQuickAllocEntryPointsLocked();
744 }
745
UninstrumentQuickAllocEntryPoints()746 void Instrumentation::UninstrumentQuickAllocEntryPoints() {
747 MutexLock mu(Thread::Current(), *Locks::instrument_entrypoints_lock_);
748 UninstrumentQuickAllocEntryPointsLocked();
749 }
750
InstrumentQuickAllocEntryPointsLocked()751 void Instrumentation::InstrumentQuickAllocEntryPointsLocked() {
752 Locks::instrument_entrypoints_lock_->AssertHeld(Thread::Current());
753 if (quick_alloc_entry_points_instrumentation_counter_ == 0) {
754 SetEntrypointsInstrumented(true);
755 }
756 ++quick_alloc_entry_points_instrumentation_counter_;
757 }
758
UninstrumentQuickAllocEntryPointsLocked()759 void Instrumentation::UninstrumentQuickAllocEntryPointsLocked() {
760 Locks::instrument_entrypoints_lock_->AssertHeld(Thread::Current());
761 CHECK_GT(quick_alloc_entry_points_instrumentation_counter_, 0U);
762 --quick_alloc_entry_points_instrumentation_counter_;
763 if (quick_alloc_entry_points_instrumentation_counter_ == 0) {
764 SetEntrypointsInstrumented(false);
765 }
766 }
767
ResetQuickAllocEntryPoints()768 void Instrumentation::ResetQuickAllocEntryPoints() {
769 Runtime* runtime = Runtime::Current();
770 if (runtime->IsStarted()) {
771 MutexLock mu(Thread::Current(), *Locks::thread_list_lock_);
772 runtime->GetThreadList()->ForEach(ResetQuickAllocEntryPointsForThread, nullptr);
773 }
774 }
775
UpdateMethodsCodeImpl(ArtMethod * method,const void * quick_code)776 void Instrumentation::UpdateMethodsCodeImpl(ArtMethod* method, const void* quick_code) {
777 const void* new_quick_code;
778 if (LIKELY(!instrumentation_stubs_installed_)) {
779 new_quick_code = quick_code;
780 } else {
781 if ((interpreter_stubs_installed_ || IsDeoptimized(method)) && !method->IsNative()) {
782 new_quick_code = GetQuickToInterpreterBridge();
783 } else {
784 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
785 if (class_linker->IsQuickResolutionStub(quick_code) ||
786 class_linker->IsQuickToInterpreterBridge(quick_code)) {
787 new_quick_code = quick_code;
788 } else if (entry_exit_stubs_installed_) {
789 new_quick_code = GetQuickInstrumentationEntryPoint();
790 } else {
791 new_quick_code = quick_code;
792 }
793 }
794 }
795 UpdateEntrypoints(method, new_quick_code);
796 }
797
UpdateNativeMethodsCodeToJitCode(ArtMethod * method,const void * quick_code)798 void Instrumentation::UpdateNativeMethodsCodeToJitCode(ArtMethod* method, const void* quick_code) {
799 // We don't do any read barrier on `method`'s declaring class in this code, as the JIT might
800 // enter here on a soon-to-be deleted ArtMethod. Updating the entrypoint is OK though, as
801 // the ArtMethod is still in memory.
802 const void* new_quick_code = quick_code;
803 if (UNLIKELY(instrumentation_stubs_installed_) && entry_exit_stubs_installed_) {
804 new_quick_code = GetQuickInstrumentationEntryPoint();
805 }
806 UpdateEntrypoints(method, new_quick_code);
807 }
808
UpdateMethodsCode(ArtMethod * method,const void * quick_code)809 void Instrumentation::UpdateMethodsCode(ArtMethod* method, const void* quick_code) {
810 DCHECK(method->GetDeclaringClass()->IsResolved());
811 UpdateMethodsCodeImpl(method, quick_code);
812 }
813
UpdateMethodsCodeToInterpreterEntryPoint(ArtMethod * method)814 void Instrumentation::UpdateMethodsCodeToInterpreterEntryPoint(ArtMethod* method) {
815 UpdateMethodsCodeImpl(method, GetQuickToInterpreterBridge());
816 }
817
UpdateMethodsCodeForJavaDebuggable(ArtMethod * method,const void * quick_code)818 void Instrumentation::UpdateMethodsCodeForJavaDebuggable(ArtMethod* method,
819 const void* quick_code) {
820 // When the runtime is set to Java debuggable, we may update the entry points of
821 // all methods of a class to the interpreter bridge. A method's declaring class
822 // might not be in resolved state yet in that case, so we bypass the DCHECK in
823 // UpdateMethodsCode.
824 UpdateMethodsCodeImpl(method, quick_code);
825 }
826
AddDeoptimizedMethod(ArtMethod * method)827 bool Instrumentation::AddDeoptimizedMethod(ArtMethod* method) {
828 if (IsDeoptimizedMethod(method)) {
829 // Already in the map. Return.
830 return false;
831 }
832 // Not found. Add it.
833 deoptimized_methods_.insert(method);
834 return true;
835 }
836
IsDeoptimizedMethod(ArtMethod * method)837 bool Instrumentation::IsDeoptimizedMethod(ArtMethod* method) {
838 return deoptimized_methods_.find(method) != deoptimized_methods_.end();
839 }
840
BeginDeoptimizedMethod()841 ArtMethod* Instrumentation::BeginDeoptimizedMethod() {
842 if (deoptimized_methods_.empty()) {
843 // Empty.
844 return nullptr;
845 }
846 return *deoptimized_methods_.begin();
847 }
848
RemoveDeoptimizedMethod(ArtMethod * method)849 bool Instrumentation::RemoveDeoptimizedMethod(ArtMethod* method) {
850 auto it = deoptimized_methods_.find(method);
851 if (it == deoptimized_methods_.end()) {
852 return false;
853 }
854 deoptimized_methods_.erase(it);
855 return true;
856 }
857
IsDeoptimizedMethodsEmpty() const858 bool Instrumentation::IsDeoptimizedMethodsEmpty() const {
859 return deoptimized_methods_.empty();
860 }
861
Deoptimize(ArtMethod * method)862 void Instrumentation::Deoptimize(ArtMethod* method) {
863 CHECK(!method->IsNative());
864 CHECK(!method->IsProxyMethod());
865 CHECK(method->IsInvokable());
866
867 Thread* self = Thread::Current();
868 {
869 WriterMutexLock mu(self, deoptimized_methods_lock_);
870 bool has_not_been_deoptimized = AddDeoptimizedMethod(method);
871 CHECK(has_not_been_deoptimized) << "Method " << ArtMethod::PrettyMethod(method)
872 << " is already deoptimized";
873 }
874 if (!interpreter_stubs_installed_) {
875 UpdateEntrypoints(method, GetQuickInstrumentationEntryPoint());
876
877 // Install instrumentation exit stub and instrumentation frames. We may already have installed
878 // these previously so it will only cover the newly created frames.
879 instrumentation_stubs_installed_ = true;
880 MutexLock mu(self, *Locks::thread_list_lock_);
881 Runtime::Current()->GetThreadList()->ForEach(InstrumentationInstallStack, this);
882 }
883 }
884
Undeoptimize(ArtMethod * method)885 void Instrumentation::Undeoptimize(ArtMethod* method) {
886 CHECK(!method->IsNative());
887 CHECK(!method->IsProxyMethod());
888 CHECK(method->IsInvokable());
889
890 Thread* self = Thread::Current();
891 bool empty;
892 {
893 WriterMutexLock mu(self, deoptimized_methods_lock_);
894 bool found_and_erased = RemoveDeoptimizedMethod(method);
895 CHECK(found_and_erased) << "Method " << ArtMethod::PrettyMethod(method)
896 << " is not deoptimized";
897 empty = IsDeoptimizedMethodsEmpty();
898 }
899
900 // Restore code and possibly stack only if we did not deoptimize everything.
901 if (!interpreter_stubs_installed_) {
902 // Restore its code or resolution trampoline.
903 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
904 if (method->IsStatic() && !method->IsConstructor() &&
905 !method->GetDeclaringClass()->IsInitialized()) {
906 UpdateEntrypoints(method, GetQuickResolutionStub());
907 } else {
908 const void* quick_code = NeedDebugVersionFor(method)
909 ? GetQuickToInterpreterBridge()
910 : class_linker->GetQuickOatCodeFor(method);
911 UpdateEntrypoints(method, quick_code);
912 }
913
914 // If there is no deoptimized method left, we can restore the stack of each thread.
915 if (empty) {
916 MutexLock mu(self, *Locks::thread_list_lock_);
917 Runtime::Current()->GetThreadList()->ForEach(InstrumentationRestoreStack, this);
918 instrumentation_stubs_installed_ = false;
919 }
920 }
921 }
922
IsDeoptimized(ArtMethod * method)923 bool Instrumentation::IsDeoptimized(ArtMethod* method) {
924 DCHECK(method != nullptr);
925 ReaderMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
926 return IsDeoptimizedMethod(method);
927 }
928
EnableDeoptimization()929 void Instrumentation::EnableDeoptimization() {
930 ReaderMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
931 CHECK(IsDeoptimizedMethodsEmpty());
932 CHECK_EQ(deoptimization_enabled_, false);
933 deoptimization_enabled_ = true;
934 }
935
DisableDeoptimization(const char * key)936 void Instrumentation::DisableDeoptimization(const char* key) {
937 CHECK_EQ(deoptimization_enabled_, true);
938 // If we deoptimized everything, undo it.
939 InstrumentationLevel level = GetCurrentInstrumentationLevel();
940 if (level == InstrumentationLevel::kInstrumentWithInterpreter) {
941 UndeoptimizeEverything(key);
942 }
943 // Undeoptimized selected methods.
944 while (true) {
945 ArtMethod* method;
946 {
947 ReaderMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
948 if (IsDeoptimizedMethodsEmpty()) {
949 break;
950 }
951 method = BeginDeoptimizedMethod();
952 CHECK(method != nullptr);
953 }
954 Undeoptimize(method);
955 }
956 deoptimization_enabled_ = false;
957 }
958
959 // Indicates if instrumentation should notify method enter/exit events to the listeners.
ShouldNotifyMethodEnterExitEvents() const960 bool Instrumentation::ShouldNotifyMethodEnterExitEvents() const {
961 if (!HasMethodEntryListeners() && !HasMethodExitListeners()) {
962 return false;
963 }
964 return !deoptimization_enabled_ && !interpreter_stubs_installed_;
965 }
966
DeoptimizeEverything(const char * key)967 void Instrumentation::DeoptimizeEverything(const char* key) {
968 CHECK(deoptimization_enabled_);
969 ConfigureStubs(key, InstrumentationLevel::kInstrumentWithInterpreter);
970 }
971
UndeoptimizeEverything(const char * key)972 void Instrumentation::UndeoptimizeEverything(const char* key) {
973 CHECK(interpreter_stubs_installed_);
974 CHECK(deoptimization_enabled_);
975 ConfigureStubs(key, InstrumentationLevel::kInstrumentNothing);
976 }
977
EnableMethodTracing(const char * key,bool needs_interpreter)978 void Instrumentation::EnableMethodTracing(const char* key, bool needs_interpreter) {
979 InstrumentationLevel level;
980 if (needs_interpreter) {
981 level = InstrumentationLevel::kInstrumentWithInterpreter;
982 } else {
983 level = InstrumentationLevel::kInstrumentWithInstrumentationStubs;
984 }
985 ConfigureStubs(key, level);
986 }
987
DisableMethodTracing(const char * key)988 void Instrumentation::DisableMethodTracing(const char* key) {
989 ConfigureStubs(key, InstrumentationLevel::kInstrumentNothing);
990 }
991
GetQuickCodeFor(ArtMethod * method,PointerSize pointer_size) const992 const void* Instrumentation::GetQuickCodeFor(ArtMethod* method, PointerSize pointer_size) const {
993 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
994 if (LIKELY(!instrumentation_stubs_installed_)) {
995 const void* code = method->GetEntryPointFromQuickCompiledCodePtrSize(pointer_size);
996 DCHECK(code != nullptr);
997 if (LIKELY(!class_linker->IsQuickResolutionStub(code) &&
998 !class_linker->IsQuickToInterpreterBridge(code)) &&
999 !class_linker->IsQuickResolutionStub(code) &&
1000 !class_linker->IsQuickToInterpreterBridge(code)) {
1001 return code;
1002 }
1003 }
1004 return class_linker->GetQuickOatCodeFor(method);
1005 }
1006
MethodEnterEventImpl(Thread * thread,ObjPtr<mirror::Object> this_object,ArtMethod * method,uint32_t dex_pc) const1007 void Instrumentation::MethodEnterEventImpl(Thread* thread,
1008 ObjPtr<mirror::Object> this_object,
1009 ArtMethod* method,
1010 uint32_t dex_pc) const {
1011 DCHECK(!method->IsRuntimeMethod());
1012 if (HasMethodEntryListeners()) {
1013 Thread* self = Thread::Current();
1014 StackHandleScope<1> hs(self);
1015 Handle<mirror::Object> thiz(hs.NewHandle(this_object));
1016 for (InstrumentationListener* listener : method_entry_listeners_) {
1017 if (listener != nullptr) {
1018 listener->MethodEntered(thread, thiz, method, dex_pc);
1019 }
1020 }
1021 }
1022 }
1023
MethodExitEventImpl(Thread * thread,ObjPtr<mirror::Object> this_object,ArtMethod * method,uint32_t dex_pc,const JValue & return_value) const1024 void Instrumentation::MethodExitEventImpl(Thread* thread,
1025 ObjPtr<mirror::Object> this_object,
1026 ArtMethod* method,
1027 uint32_t dex_pc,
1028 const JValue& return_value) const {
1029 if (HasMethodExitListeners()) {
1030 Thread* self = Thread::Current();
1031 StackHandleScope<2> hs(self);
1032 Handle<mirror::Object> thiz(hs.NewHandle(this_object));
1033 if (method->GetInterfaceMethodIfProxy(kRuntimePointerSize)
1034 ->GetReturnTypePrimitive() != Primitive::kPrimNot) {
1035 for (InstrumentationListener* listener : method_exit_listeners_) {
1036 if (listener != nullptr) {
1037 listener->MethodExited(thread, thiz, method, dex_pc, return_value);
1038 }
1039 }
1040 } else {
1041 Handle<mirror::Object> ret(hs.NewHandle(return_value.GetL()));
1042 for (InstrumentationListener* listener : method_exit_listeners_) {
1043 if (listener != nullptr) {
1044 listener->MethodExited(thread, thiz, method, dex_pc, ret);
1045 }
1046 }
1047 }
1048 }
1049 }
1050
MethodUnwindEvent(Thread * thread,mirror::Object * this_object,ArtMethod * method,uint32_t dex_pc) const1051 void Instrumentation::MethodUnwindEvent(Thread* thread,
1052 mirror::Object* this_object,
1053 ArtMethod* method,
1054 uint32_t dex_pc) const {
1055 if (HasMethodUnwindListeners()) {
1056 Thread* self = Thread::Current();
1057 StackHandleScope<1> hs(self);
1058 Handle<mirror::Object> thiz(hs.NewHandle(this_object));
1059 for (InstrumentationListener* listener : method_unwind_listeners_) {
1060 if (listener != nullptr) {
1061 listener->MethodUnwind(thread, thiz, method, dex_pc);
1062 }
1063 }
1064 }
1065 }
1066
DexPcMovedEventImpl(Thread * thread,ObjPtr<mirror::Object> this_object,ArtMethod * method,uint32_t dex_pc) const1067 void Instrumentation::DexPcMovedEventImpl(Thread* thread,
1068 ObjPtr<mirror::Object> this_object,
1069 ArtMethod* method,
1070 uint32_t dex_pc) const {
1071 Thread* self = Thread::Current();
1072 StackHandleScope<1> hs(self);
1073 Handle<mirror::Object> thiz(hs.NewHandle(this_object));
1074 for (InstrumentationListener* listener : dex_pc_listeners_) {
1075 if (listener != nullptr) {
1076 listener->DexPcMoved(thread, thiz, method, dex_pc);
1077 }
1078 }
1079 }
1080
BranchImpl(Thread * thread,ArtMethod * method,uint32_t dex_pc,int32_t offset) const1081 void Instrumentation::BranchImpl(Thread* thread,
1082 ArtMethod* method,
1083 uint32_t dex_pc,
1084 int32_t offset) const {
1085 for (InstrumentationListener* listener : branch_listeners_) {
1086 if (listener != nullptr) {
1087 listener->Branch(thread, method, dex_pc, offset);
1088 }
1089 }
1090 }
1091
InvokeVirtualOrInterfaceImpl(Thread * thread,ObjPtr<mirror::Object> this_object,ArtMethod * caller,uint32_t dex_pc,ArtMethod * callee) const1092 void Instrumentation::InvokeVirtualOrInterfaceImpl(Thread* thread,
1093 ObjPtr<mirror::Object> this_object,
1094 ArtMethod* caller,
1095 uint32_t dex_pc,
1096 ArtMethod* callee) const {
1097 Thread* self = Thread::Current();
1098 StackHandleScope<1> hs(self);
1099 Handle<mirror::Object> thiz(hs.NewHandle(this_object));
1100 for (InstrumentationListener* listener : invoke_virtual_or_interface_listeners_) {
1101 if (listener != nullptr) {
1102 listener->InvokeVirtualOrInterface(thread, thiz, caller, dex_pc, callee);
1103 }
1104 }
1105 }
1106
WatchedFramePopImpl(Thread * thread,const ShadowFrame & frame) const1107 void Instrumentation::WatchedFramePopImpl(Thread* thread, const ShadowFrame& frame) const {
1108 for (InstrumentationListener* listener : watched_frame_pop_listeners_) {
1109 if (listener != nullptr) {
1110 listener->WatchedFramePop(thread, frame);
1111 }
1112 }
1113 }
1114
FieldReadEventImpl(Thread * thread,ObjPtr<mirror::Object> this_object,ArtMethod * method,uint32_t dex_pc,ArtField * field) const1115 void Instrumentation::FieldReadEventImpl(Thread* thread,
1116 ObjPtr<mirror::Object> this_object,
1117 ArtMethod* method,
1118 uint32_t dex_pc,
1119 ArtField* field) const {
1120 Thread* self = Thread::Current();
1121 StackHandleScope<1> hs(self);
1122 Handle<mirror::Object> thiz(hs.NewHandle(this_object));
1123 for (InstrumentationListener* listener : field_read_listeners_) {
1124 if (listener != nullptr) {
1125 listener->FieldRead(thread, thiz, method, dex_pc, field);
1126 }
1127 }
1128 }
1129
FieldWriteEventImpl(Thread * thread,ObjPtr<mirror::Object> this_object,ArtMethod * method,uint32_t dex_pc,ArtField * field,const JValue & field_value) const1130 void Instrumentation::FieldWriteEventImpl(Thread* thread,
1131 ObjPtr<mirror::Object> this_object,
1132 ArtMethod* method,
1133 uint32_t dex_pc,
1134 ArtField* field,
1135 const JValue& field_value) const {
1136 Thread* self = Thread::Current();
1137 StackHandleScope<2> hs(self);
1138 Handle<mirror::Object> thiz(hs.NewHandle(this_object));
1139 if (field->IsPrimitiveType()) {
1140 for (InstrumentationListener* listener : field_write_listeners_) {
1141 if (listener != nullptr) {
1142 listener->FieldWritten(thread, thiz, method, dex_pc, field, field_value);
1143 }
1144 }
1145 } else {
1146 Handle<mirror::Object> val(hs.NewHandle(field_value.GetL()));
1147 for (InstrumentationListener* listener : field_write_listeners_) {
1148 if (listener != nullptr) {
1149 listener->FieldWritten(thread, thiz, method, dex_pc, field, val);
1150 }
1151 }
1152 }
1153 }
1154
ExceptionThrownEvent(Thread * thread,mirror::Throwable * exception_object) const1155 void Instrumentation::ExceptionThrownEvent(Thread* thread,
1156 mirror::Throwable* exception_object) const {
1157 Thread* self = Thread::Current();
1158 StackHandleScope<1> hs(self);
1159 Handle<mirror::Throwable> h_exception(hs.NewHandle(exception_object));
1160 if (HasExceptionThrownListeners()) {
1161 DCHECK_EQ(thread->GetException(), h_exception.Get());
1162 thread->ClearException();
1163 for (InstrumentationListener* listener : exception_thrown_listeners_) {
1164 if (listener != nullptr) {
1165 listener->ExceptionThrown(thread, h_exception);
1166 }
1167 }
1168 // See b/65049545 for discussion about this behavior.
1169 thread->AssertNoPendingException();
1170 thread->SetException(h_exception.Get());
1171 }
1172 }
1173
ExceptionHandledEvent(Thread * thread,mirror::Throwable * exception_object) const1174 void Instrumentation::ExceptionHandledEvent(Thread* thread,
1175 mirror::Throwable* exception_object) const {
1176 Thread* self = Thread::Current();
1177 StackHandleScope<1> hs(self);
1178 Handle<mirror::Throwable> h_exception(hs.NewHandle(exception_object));
1179 if (HasExceptionHandledListeners()) {
1180 // We should have cleared the exception so that callers can detect a new one.
1181 DCHECK(thread->GetException() == nullptr);
1182 for (InstrumentationListener* listener : exception_handled_listeners_) {
1183 if (listener != nullptr) {
1184 listener->ExceptionHandled(thread, h_exception);
1185 }
1186 }
1187 }
1188 }
1189
1190 // Computes a frame ID by ignoring inlined frames.
ComputeFrameId(Thread * self,size_t frame_depth,size_t inlined_frames_before_frame)1191 size_t Instrumentation::ComputeFrameId(Thread* self,
1192 size_t frame_depth,
1193 size_t inlined_frames_before_frame) {
1194 CHECK_GE(frame_depth, inlined_frames_before_frame);
1195 size_t no_inline_depth = frame_depth - inlined_frames_before_frame;
1196 return StackVisitor::ComputeNumFrames(self, kInstrumentationStackWalk) - no_inline_depth;
1197 }
1198
CheckStackDepth(Thread * self,const InstrumentationStackFrame & instrumentation_frame,int delta)1199 static void CheckStackDepth(Thread* self, const InstrumentationStackFrame& instrumentation_frame,
1200 int delta)
1201 REQUIRES_SHARED(Locks::mutator_lock_) {
1202 size_t frame_id = StackVisitor::ComputeNumFrames(self, kInstrumentationStackWalk) + delta;
1203 if (frame_id != instrumentation_frame.frame_id_) {
1204 LOG(ERROR) << "Expected frame_id=" << frame_id << " but found "
1205 << instrumentation_frame.frame_id_;
1206 StackVisitor::DescribeStack(self);
1207 CHECK_EQ(frame_id, instrumentation_frame.frame_id_);
1208 }
1209 }
1210
PushInstrumentationStackFrame(Thread * self,mirror::Object * this_object,ArtMethod * method,uintptr_t lr,bool interpreter_entry)1211 void Instrumentation::PushInstrumentationStackFrame(Thread* self, mirror::Object* this_object,
1212 ArtMethod* method,
1213 uintptr_t lr, bool interpreter_entry) {
1214 DCHECK(!self->IsExceptionPending());
1215 std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
1216 if (kVerboseInstrumentation) {
1217 LOG(INFO) << "Entering " << ArtMethod::PrettyMethod(method) << " from PC "
1218 << reinterpret_cast<void*>(lr);
1219 }
1220
1221 // We send the enter event before pushing the instrumentation frame to make cleanup easier. If the
1222 // event causes an exception we can simply send the unwind event and return.
1223 StackHandleScope<1> hs(self);
1224 Handle<mirror::Object> h_this(hs.NewHandle(this_object));
1225 if (!interpreter_entry) {
1226 MethodEnterEvent(self, h_this.Get(), method, 0);
1227 if (self->IsExceptionPending()) {
1228 MethodUnwindEvent(self, h_this.Get(), method, 0);
1229 return;
1230 }
1231 }
1232
1233 // We have a callee-save frame meaning this value is guaranteed to never be 0.
1234 DCHECK(!self->IsExceptionPending());
1235 size_t frame_id = StackVisitor::ComputeNumFrames(self, kInstrumentationStackWalk);
1236
1237 instrumentation::InstrumentationStackFrame instrumentation_frame(h_this.Get(), method, lr,
1238 frame_id, interpreter_entry);
1239 stack->push_front(instrumentation_frame);
1240 }
1241
GetDeoptimizationMethodType(ArtMethod * method)1242 DeoptimizationMethodType Instrumentation::GetDeoptimizationMethodType(ArtMethod* method) {
1243 if (method->IsRuntimeMethod()) {
1244 // Certain methods have strict requirement on whether the dex instruction
1245 // should be re-executed upon deoptimization.
1246 if (method == Runtime::Current()->GetCalleeSaveMethod(
1247 CalleeSaveType::kSaveEverythingForClinit)) {
1248 return DeoptimizationMethodType::kKeepDexPc;
1249 }
1250 if (method == Runtime::Current()->GetCalleeSaveMethod(
1251 CalleeSaveType::kSaveEverythingForSuspendCheck)) {
1252 return DeoptimizationMethodType::kKeepDexPc;
1253 }
1254 }
1255 return DeoptimizationMethodType::kDefault;
1256 }
1257
1258 // Try to get the shorty of a runtime method if it's an invocation stub.
1259 struct RuntimeMethodShortyVisitor : public StackVisitor {
RuntimeMethodShortyVisitorart::instrumentation::RuntimeMethodShortyVisitor1260 explicit RuntimeMethodShortyVisitor(Thread* thread)
1261 : StackVisitor(thread, nullptr, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
1262 shorty('V') {}
1263
VisitFrameart::instrumentation::RuntimeMethodShortyVisitor1264 bool VisitFrame() REQUIRES_SHARED(Locks::mutator_lock_) {
1265 ArtMethod* m = GetMethod();
1266 if (m != nullptr && !m->IsRuntimeMethod()) {
1267 // The first Java method.
1268 if (m->IsNative()) {
1269 // Use JNI method's shorty for the jni stub.
1270 shorty = m->GetShorty()[0];
1271 return false;
1272 }
1273 if (m->IsProxyMethod()) {
1274 // Proxy method just invokes its proxied method via
1275 // art_quick_proxy_invoke_handler.
1276 shorty = m->GetInterfaceMethodIfProxy(kRuntimePointerSize)->GetShorty()[0];
1277 return false;
1278 }
1279 const Instruction& instr = m->DexInstructions().InstructionAt(GetDexPc());
1280 if (instr.IsInvoke()) {
1281 const DexFile* dex_file = m->GetDexFile();
1282 if (interpreter::IsStringInit(dex_file, instr.VRegB())) {
1283 // Invoking string init constructor is turned into invoking
1284 // StringFactory.newStringFromChars() which returns a string.
1285 shorty = 'L';
1286 return false;
1287 }
1288 // A regular invoke, use callee's shorty.
1289 uint32_t method_idx = instr.VRegB();
1290 shorty = dex_file->GetMethodShorty(method_idx)[0];
1291 }
1292 // Stop stack walking since we've seen a Java frame.
1293 return false;
1294 }
1295 return true;
1296 }
1297
1298 char shorty;
1299 };
1300
PopInstrumentationStackFrame(Thread * self,uintptr_t * return_pc,uint64_t * gpr_result,uint64_t * fpr_result)1301 TwoWordReturn Instrumentation::PopInstrumentationStackFrame(Thread* self,
1302 uintptr_t* return_pc,
1303 uint64_t* gpr_result,
1304 uint64_t* fpr_result) {
1305 DCHECK(gpr_result != nullptr);
1306 DCHECK(fpr_result != nullptr);
1307 // Do the pop.
1308 std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
1309 CHECK_GT(stack->size(), 0U);
1310 InstrumentationStackFrame instrumentation_frame = stack->front();
1311 stack->pop_front();
1312
1313 // Set return PC and check the sanity of the stack.
1314 *return_pc = instrumentation_frame.return_pc_;
1315 CheckStackDepth(self, instrumentation_frame, 0);
1316 self->VerifyStack();
1317
1318 ArtMethod* method = instrumentation_frame.method_;
1319 uint32_t length;
1320 const PointerSize pointer_size = Runtime::Current()->GetClassLinker()->GetImagePointerSize();
1321 char return_shorty;
1322
1323 // Runtime method does not call into MethodExitEvent() so there should not be
1324 // suspension point below.
1325 ScopedAssertNoThreadSuspension ants(__FUNCTION__, method->IsRuntimeMethod());
1326 if (method->IsRuntimeMethod()) {
1327 if (method != Runtime::Current()->GetCalleeSaveMethod(
1328 CalleeSaveType::kSaveEverythingForClinit)) {
1329 // If the caller is at an invocation point and the runtime method is not
1330 // for clinit, we need to pass return results to the caller.
1331 // We need the correct shorty to decide whether we need to pass the return
1332 // result for deoptimization below.
1333 RuntimeMethodShortyVisitor visitor(self);
1334 visitor.WalkStack();
1335 return_shorty = visitor.shorty;
1336 } else {
1337 // Some runtime methods such as allocations, unresolved field getters, etc.
1338 // have return value. We don't need to set return_value since MethodExitEvent()
1339 // below isn't called for runtime methods. Deoptimization doesn't need the
1340 // value either since the dex instruction will be re-executed by the
1341 // interpreter, except these two cases:
1342 // (1) For an invoke, which is handled above to get the correct shorty.
1343 // (2) For MONITOR_ENTER/EXIT, which cannot be re-executed since it's not
1344 // idempotent. However there is no return value for it anyway.
1345 return_shorty = 'V';
1346 }
1347 } else {
1348 return_shorty = method->GetInterfaceMethodIfProxy(pointer_size)->GetShorty(&length)[0];
1349 }
1350
1351 bool is_ref = return_shorty == '[' || return_shorty == 'L';
1352 StackHandleScope<1> hs(self);
1353 MutableHandle<mirror::Object> res(hs.NewHandle<mirror::Object>(nullptr));
1354 JValue return_value;
1355 if (return_shorty == 'V') {
1356 return_value.SetJ(0);
1357 } else if (return_shorty == 'F' || return_shorty == 'D') {
1358 return_value.SetJ(*fpr_result);
1359 } else {
1360 return_value.SetJ(*gpr_result);
1361 }
1362 if (is_ref) {
1363 // Take a handle to the return value so we won't lose it if we suspend.
1364 res.Assign(return_value.GetL());
1365 }
1366 // TODO: improve the dex pc information here, requires knowledge of current PC as opposed to
1367 // return_pc.
1368 uint32_t dex_pc = dex::kDexNoIndex;
1369 mirror::Object* this_object = instrumentation_frame.this_object_;
1370 if (!method->IsRuntimeMethod() && !instrumentation_frame.interpreter_entry_) {
1371 MethodExitEvent(self, this_object, instrumentation_frame.method_, dex_pc, return_value);
1372 }
1373
1374 // Deoptimize if the caller needs to continue execution in the interpreter. Do nothing if we get
1375 // back to an upcall.
1376 NthCallerVisitor visitor(self, 1, true);
1377 visitor.WalkStack(true);
1378 bool deoptimize = (visitor.caller != nullptr) &&
1379 (interpreter_stubs_installed_ || IsDeoptimized(visitor.caller) ||
1380 Dbg::IsForcedInterpreterNeededForUpcall(self, visitor.caller));
1381 if (is_ref) {
1382 // Restore the return value if it's a reference since it might have moved.
1383 *reinterpret_cast<mirror::Object**>(gpr_result) = res.Get();
1384 }
1385 if (deoptimize && Runtime::Current()->IsAsyncDeoptimizeable(*return_pc)) {
1386 if (kVerboseInstrumentation) {
1387 LOG(INFO) << "Deoptimizing "
1388 << visitor.caller->PrettyMethod()
1389 << " by returning from "
1390 << method->PrettyMethod()
1391 << " with result "
1392 << std::hex << return_value.GetJ() << std::dec
1393 << " in "
1394 << *self;
1395 }
1396 DeoptimizationMethodType deopt_method_type = GetDeoptimizationMethodType(method);
1397 self->PushDeoptimizationContext(return_value,
1398 return_shorty == 'L' || return_shorty == '[',
1399 nullptr /* no pending exception */,
1400 false /* from_code */,
1401 deopt_method_type);
1402 return GetTwoWordSuccessValue(*return_pc,
1403 reinterpret_cast<uintptr_t>(GetQuickDeoptimizationEntryPoint()));
1404 } else {
1405 if (deoptimize && !Runtime::Current()->IsAsyncDeoptimizeable(*return_pc)) {
1406 VLOG(deopt) << "Got a deoptimization request on un-deoptimizable " << method->PrettyMethod()
1407 << " at PC " << reinterpret_cast<void*>(*return_pc);
1408 }
1409 if (kVerboseInstrumentation) {
1410 LOG(INFO) << "Returning from " << method->PrettyMethod()
1411 << " to PC " << reinterpret_cast<void*>(*return_pc);
1412 }
1413 return GetTwoWordSuccessValue(0, *return_pc);
1414 }
1415 }
1416
PopMethodForUnwind(Thread * self,bool is_deoptimization) const1417 uintptr_t Instrumentation::PopMethodForUnwind(Thread* self, bool is_deoptimization) const {
1418 // Do the pop.
1419 std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
1420 CHECK_GT(stack->size(), 0U);
1421 size_t idx = stack->size();
1422 InstrumentationStackFrame instrumentation_frame = stack->front();
1423
1424 ArtMethod* method = instrumentation_frame.method_;
1425 if (is_deoptimization) {
1426 if (kVerboseInstrumentation) {
1427 LOG(INFO) << "Popping for deoptimization " << ArtMethod::PrettyMethod(method);
1428 }
1429 } else {
1430 if (kVerboseInstrumentation) {
1431 LOG(INFO) << "Popping for unwind " << ArtMethod::PrettyMethod(method);
1432 }
1433
1434 // Notify listeners of method unwind.
1435 // TODO: improve the dex pc information here, requires knowledge of current PC as opposed to
1436 // return_pc.
1437 uint32_t dex_pc = dex::kDexNoIndex;
1438 if (!method->IsRuntimeMethod()) {
1439 MethodUnwindEvent(self, instrumentation_frame.this_object_, method, dex_pc);
1440 }
1441 }
1442 // TODO: bring back CheckStackDepth(self, instrumentation_frame, 2);
1443 CHECK_EQ(stack->size(), idx);
1444 DCHECK(instrumentation_frame.method_ == stack->front().method_);
1445 stack->pop_front();
1446 return instrumentation_frame.return_pc_;
1447 }
1448
Dump() const1449 std::string InstrumentationStackFrame::Dump() const {
1450 std::ostringstream os;
1451 os << "Frame " << frame_id_ << " " << ArtMethod::PrettyMethod(method_) << ":"
1452 << reinterpret_cast<void*>(return_pc_) << " this=" << reinterpret_cast<void*>(this_object_);
1453 return os.str();
1454 }
1455
1456 } // namespace instrumentation
1457 } // namespace art
1458