1 /*
2  * Copyright (C) 2011 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "instrumentation.h"
18 
19 #include <sstream>
20 
21 #include "arch/context.h"
22 #include "art_method-inl.h"
23 #include "atomic.h"
24 #include "class_linker.h"
25 #include "debugger.h"
26 #include "dex_file-inl.h"
27 #include "entrypoints/quick/quick_entrypoints.h"
28 #include "entrypoints/quick/quick_alloc_entrypoints.h"
29 #include "entrypoints/runtime_asm_entrypoints.h"
30 #include "gc_root-inl.h"
31 #include "interpreter/interpreter.h"
32 #include "jit/jit.h"
33 #include "jit/jit_code_cache.h"
34 #include "mirror/class-inl.h"
35 #include "mirror/dex_cache.h"
36 #include "mirror/object_array-inl.h"
37 #include "mirror/object-inl.h"
38 #include "nth_caller_visitor.h"
39 #include "thread.h"
40 #include "thread_list.h"
41 
42 namespace art {
43 namespace instrumentation {
44 
45 constexpr bool kVerboseInstrumentation = false;
46 
47 // Instrumentation works on non-inlined frames by updating returned PCs
48 // of compiled frames.
49 static constexpr StackVisitor::StackWalkKind kInstrumentationStackWalk =
50     StackVisitor::StackWalkKind::kSkipInlinedFrames;
51 
InstallStubsClassVisitor(mirror::Class * klass,void * arg)52 static bool InstallStubsClassVisitor(mirror::Class* klass, void* arg)
53     EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_) {
54   Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
55   instrumentation->InstallStubsForClass(klass);
56   return true;  // we visit all classes.
57 }
58 
Instrumentation()59 Instrumentation::Instrumentation()
60     : instrumentation_stubs_installed_(false), entry_exit_stubs_installed_(false),
61       interpreter_stubs_installed_(false),
62       interpret_only_(false), forced_interpret_only_(false),
63       have_method_entry_listeners_(false), have_method_exit_listeners_(false),
64       have_method_unwind_listeners_(false), have_dex_pc_listeners_(false),
65       have_field_read_listeners_(false), have_field_write_listeners_(false),
66       have_exception_caught_listeners_(false), have_backward_branch_listeners_(false),
67       deoptimized_methods_lock_("deoptimized methods lock"),
68       deoptimization_enabled_(false),
69       interpreter_handler_table_(kMainHandlerTable),
70       quick_alloc_entry_points_instrumentation_counter_(0) {
71 }
72 
InstallStubsForClass(mirror::Class * klass)73 void Instrumentation::InstallStubsForClass(mirror::Class* klass) {
74   if (klass->IsErroneous()) {
75     // We can't execute code in a erroneous class: do nothing.
76   } else if (!klass->IsResolved()) {
77     // We need the class to be resolved to install/uninstall stubs. Otherwise its methods
78     // could not be initialized or linked with regards to class inheritance.
79   } else {
80     for (size_t i = 0, e = klass->NumDirectMethods(); i < e; i++) {
81       InstallStubsForMethod(klass->GetDirectMethod(i, sizeof(void*)));
82     }
83     for (size_t i = 0, e = klass->NumVirtualMethods(); i < e; i++) {
84       InstallStubsForMethod(klass->GetVirtualMethod(i, sizeof(void*)));
85     }
86   }
87 }
88 
UpdateEntrypoints(ArtMethod * method,const void * quick_code)89 static void UpdateEntrypoints(ArtMethod* method, const void* quick_code)
90     SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
91   Runtime* const runtime = Runtime::Current();
92   jit::Jit* jit = runtime->GetJit();
93   if (jit != nullptr) {
94     const void* old_code_ptr = method->GetEntryPointFromQuickCompiledCode();
95     jit::JitCodeCache* code_cache = jit->GetCodeCache();
96     if (code_cache->ContainsCodePtr(old_code_ptr)) {
97       // Save the old compiled code since we need it to implement ClassLinker::GetQuickOatCodeFor.
98       code_cache->SaveCompiledCode(method, old_code_ptr);
99     }
100   }
101   method->SetEntryPointFromQuickCompiledCode(quick_code);
102   if (!method->IsResolutionMethod()) {
103     ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
104     if (class_linker->IsQuickToInterpreterBridge(quick_code) ||
105         (class_linker->IsQuickResolutionStub(quick_code) &&
106          Runtime::Current()->GetInstrumentation()->IsForcedInterpretOnly() &&
107          !method->IsNative() && !method->IsProxyMethod())) {
108       DCHECK(!method->IsNative()) << PrettyMethod(method);
109       DCHECK(!method->IsProxyMethod()) << PrettyMethod(method);
110       method->SetEntryPointFromInterpreter(art::artInterpreterToInterpreterBridge);
111     } else {
112       method->SetEntryPointFromInterpreter(art::artInterpreterToCompiledCodeBridge);
113     }
114   }
115 }
116 
InstallStubsForMethod(ArtMethod * method)117 void Instrumentation::InstallStubsForMethod(ArtMethod* method) {
118   if (method->IsAbstract() || method->IsProxyMethod()) {
119     // Do not change stubs for these methods.
120     return;
121   }
122   // Don't stub Proxy.<init>. Note that the Proxy class itself is not a proxy class.
123   if (method->IsConstructor() &&
124       method->GetDeclaringClass()->DescriptorEquals("Ljava/lang/reflect/Proxy;")) {
125     return;
126   }
127   const void* new_quick_code;
128   bool uninstall = !entry_exit_stubs_installed_ && !interpreter_stubs_installed_;
129   Runtime* const runtime = Runtime::Current();
130   ClassLinker* const class_linker = runtime->GetClassLinker();
131   bool is_class_initialized = method->GetDeclaringClass()->IsInitialized();
132   if (uninstall) {
133     if ((forced_interpret_only_ || IsDeoptimized(method)) && !method->IsNative()) {
134       new_quick_code = GetQuickToInterpreterBridge();
135     } else if (is_class_initialized || !method->IsStatic() || method->IsConstructor()) {
136       new_quick_code = class_linker->GetQuickOatCodeFor(method);
137     } else {
138       new_quick_code = GetQuickResolutionStub();
139     }
140   } else {  // !uninstall
141     if ((interpreter_stubs_installed_ || forced_interpret_only_ || IsDeoptimized(method)) &&
142         !method->IsNative()) {
143       new_quick_code = GetQuickToInterpreterBridge();
144     } else {
145       // Do not overwrite resolution trampoline. When the trampoline initializes the method's
146       // class, all its static methods code will be set to the instrumentation entry point.
147       // For more details, see ClassLinker::FixupStaticTrampolines.
148       if (is_class_initialized || !method->IsStatic() || method->IsConstructor()) {
149         if (entry_exit_stubs_installed_) {
150           new_quick_code = GetQuickInstrumentationEntryPoint();
151         } else {
152           new_quick_code = class_linker->GetQuickOatCodeFor(method);
153         }
154       } else {
155         new_quick_code = GetQuickResolutionStub();
156       }
157     }
158   }
159   UpdateEntrypoints(method, new_quick_code);
160 }
161 
162 // Places the instrumentation exit pc as the return PC for every quick frame. This also allows
163 // deoptimization of quick frames to interpreter frames.
164 // Since we may already have done this previously, we need to push new instrumentation frame before
165 // existing instrumentation frames.
InstrumentationInstallStack(Thread * thread,void * arg)166 static void InstrumentationInstallStack(Thread* thread, void* arg)
167     SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
168   struct InstallStackVisitor FINAL : public StackVisitor {
169     InstallStackVisitor(Thread* thread_in, Context* context, uintptr_t instrumentation_exit_pc)
170         : StackVisitor(thread_in, context, kInstrumentationStackWalk),
171           instrumentation_stack_(thread_in->GetInstrumentationStack()),
172           instrumentation_exit_pc_(instrumentation_exit_pc),
173           reached_existing_instrumentation_frames_(false), instrumentation_stack_depth_(0),
174           last_return_pc_(0) {
175     }
176 
177     bool VisitFrame() OVERRIDE SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
178       ArtMethod* m = GetMethod();
179       if (m == nullptr) {
180         if (kVerboseInstrumentation) {
181           LOG(INFO) << "  Skipping upcall. Frame " << GetFrameId();
182         }
183         last_return_pc_ = 0;
184         return true;  // Ignore upcalls.
185       }
186       if (GetCurrentQuickFrame() == nullptr) {
187         bool interpreter_frame = true;
188         InstrumentationStackFrame instrumentation_frame(GetThisObject(), m, 0, GetFrameId(),
189                                                         interpreter_frame);
190         if (kVerboseInstrumentation) {
191           LOG(INFO) << "Pushing shadow frame " << instrumentation_frame.Dump();
192         }
193         shadow_stack_.push_back(instrumentation_frame);
194         return true;  // Continue.
195       }
196       uintptr_t return_pc = GetReturnPc();
197       if (m->IsRuntimeMethod()) {
198         if (return_pc == instrumentation_exit_pc_) {
199           if (kVerboseInstrumentation) {
200             LOG(INFO) << "  Handling quick to interpreter transition. Frame " << GetFrameId();
201           }
202           CHECK_LT(instrumentation_stack_depth_, instrumentation_stack_->size());
203           const InstrumentationStackFrame& frame =
204               instrumentation_stack_->at(instrumentation_stack_depth_);
205           CHECK(frame.interpreter_entry_);
206           // This is an interpreter frame so method enter event must have been reported. However we
207           // need to push a DEX pc into the dex_pcs_ list to match size of instrumentation stack.
208           // Since we won't report method entry here, we can safely push any DEX pc.
209           dex_pcs_.push_back(0);
210           last_return_pc_ = frame.return_pc_;
211           ++instrumentation_stack_depth_;
212           return true;
213         } else {
214           if (kVerboseInstrumentation) {
215             LOG(INFO) << "  Skipping runtime method. Frame " << GetFrameId();
216           }
217           last_return_pc_ = GetReturnPc();
218           return true;  // Ignore unresolved methods since they will be instrumented after resolution.
219         }
220       }
221       if (kVerboseInstrumentation) {
222         LOG(INFO) << "  Installing exit stub in " << DescribeLocation();
223       }
224       if (return_pc == instrumentation_exit_pc_) {
225         // We've reached a frame which has already been installed with instrumentation exit stub.
226         // We should have already installed instrumentation on previous frames.
227         reached_existing_instrumentation_frames_ = true;
228 
229         CHECK_LT(instrumentation_stack_depth_, instrumentation_stack_->size());
230         const InstrumentationStackFrame& frame =
231             instrumentation_stack_->at(instrumentation_stack_depth_);
232         CHECK_EQ(m, frame.method_) << "Expected " << PrettyMethod(m)
233                                    << ", Found " << PrettyMethod(frame.method_);
234         return_pc = frame.return_pc_;
235         if (kVerboseInstrumentation) {
236           LOG(INFO) << "Ignoring already instrumented " << frame.Dump();
237         }
238       } else {
239         CHECK_NE(return_pc, 0U);
240         CHECK(!reached_existing_instrumentation_frames_);
241         InstrumentationStackFrame instrumentation_frame(GetThisObject(), m, return_pc, GetFrameId(),
242                                                         false);
243         if (kVerboseInstrumentation) {
244           LOG(INFO) << "Pushing frame " << instrumentation_frame.Dump();
245         }
246 
247         // Insert frame at the right position so we do not corrupt the instrumentation stack.
248         // Instrumentation stack frames are in descending frame id order.
249         auto it = instrumentation_stack_->begin();
250         for (auto end = instrumentation_stack_->end(); it != end; ++it) {
251           const InstrumentationStackFrame& current = *it;
252           if (instrumentation_frame.frame_id_ >= current.frame_id_) {
253             break;
254           }
255         }
256         instrumentation_stack_->insert(it, instrumentation_frame);
257         SetReturnPc(instrumentation_exit_pc_);
258       }
259       dex_pcs_.push_back(m->ToDexPc(last_return_pc_));
260       last_return_pc_ = return_pc;
261       ++instrumentation_stack_depth_;
262       return true;  // Continue.
263     }
264     std::deque<InstrumentationStackFrame>* const instrumentation_stack_;
265     std::vector<InstrumentationStackFrame> shadow_stack_;
266     std::vector<uint32_t> dex_pcs_;
267     const uintptr_t instrumentation_exit_pc_;
268     bool reached_existing_instrumentation_frames_;
269     size_t instrumentation_stack_depth_;
270     uintptr_t last_return_pc_;
271   };
272   if (kVerboseInstrumentation) {
273     std::string thread_name;
274     thread->GetThreadName(thread_name);
275     LOG(INFO) << "Installing exit stubs in " << thread_name;
276   }
277 
278   Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
279   std::unique_ptr<Context> context(Context::Create());
280   uintptr_t instrumentation_exit_pc = reinterpret_cast<uintptr_t>(GetQuickInstrumentationExitPc());
281   InstallStackVisitor visitor(thread, context.get(), instrumentation_exit_pc);
282   visitor.WalkStack(true);
283   CHECK_EQ(visitor.dex_pcs_.size(), thread->GetInstrumentationStack()->size());
284 
285   if (instrumentation->ShouldNotifyMethodEnterExitEvents()) {
286     // Create method enter events for all methods currently on the thread's stack. We only do this
287     // if no debugger is attached to prevent from posting events twice.
288     auto ssi = visitor.shadow_stack_.rbegin();
289     for (auto isi = thread->GetInstrumentationStack()->rbegin(),
290         end = thread->GetInstrumentationStack()->rend(); isi != end; ++isi) {
291       while (ssi != visitor.shadow_stack_.rend() && (*ssi).frame_id_ < (*isi).frame_id_) {
292         instrumentation->MethodEnterEvent(thread, (*ssi).this_object_, (*ssi).method_, 0);
293         ++ssi;
294       }
295       uint32_t dex_pc = visitor.dex_pcs_.back();
296       visitor.dex_pcs_.pop_back();
297       if (!isi->interpreter_entry_) {
298         instrumentation->MethodEnterEvent(thread, (*isi).this_object_, (*isi).method_, dex_pc);
299       }
300     }
301   }
302   thread->VerifyStack();
303 }
304 
305 // Removes the instrumentation exit pc as the return PC for every quick frame.
InstrumentationRestoreStack(Thread * thread,void * arg)306 static void InstrumentationRestoreStack(Thread* thread, void* arg)
307     SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
308   struct RestoreStackVisitor FINAL : public StackVisitor {
309     RestoreStackVisitor(Thread* thread_in, uintptr_t instrumentation_exit_pc,
310                         Instrumentation* instrumentation)
311         : StackVisitor(thread_in, nullptr, kInstrumentationStackWalk),
312           thread_(thread_in),
313           instrumentation_exit_pc_(instrumentation_exit_pc),
314           instrumentation_(instrumentation),
315           instrumentation_stack_(thread_in->GetInstrumentationStack()),
316           frames_removed_(0) {}
317 
318     bool VisitFrame() OVERRIDE SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
319       if (instrumentation_stack_->size() == 0) {
320         return false;  // Stop.
321       }
322       ArtMethod* m = GetMethod();
323       if (GetCurrentQuickFrame() == nullptr) {
324         if (kVerboseInstrumentation) {
325           LOG(INFO) << "  Ignoring a shadow frame. Frame " << GetFrameId()
326               << " Method=" << PrettyMethod(m);
327         }
328         return true;  // Ignore shadow frames.
329       }
330       if (m == nullptr) {
331         if (kVerboseInstrumentation) {
332           LOG(INFO) << "  Skipping upcall. Frame " << GetFrameId();
333         }
334         return true;  // Ignore upcalls.
335       }
336       bool removed_stub = false;
337       // TODO: make this search more efficient?
338       const size_t frameId = GetFrameId();
339       for (const InstrumentationStackFrame& instrumentation_frame : *instrumentation_stack_) {
340         if (instrumentation_frame.frame_id_ == frameId) {
341           if (kVerboseInstrumentation) {
342             LOG(INFO) << "  Removing exit stub in " << DescribeLocation();
343           }
344           if (instrumentation_frame.interpreter_entry_) {
345             CHECK(m == Runtime::Current()->GetCalleeSaveMethod(Runtime::kRefsAndArgs));
346           } else {
347             CHECK(m == instrumentation_frame.method_) << PrettyMethod(m);
348           }
349           SetReturnPc(instrumentation_frame.return_pc_);
350           if (instrumentation_->ShouldNotifyMethodEnterExitEvents()) {
351             // Create the method exit events. As the methods didn't really exit the result is 0.
352             // We only do this if no debugger is attached to prevent from posting events twice.
353             instrumentation_->MethodExitEvent(thread_, instrumentation_frame.this_object_, m,
354                                               GetDexPc(), JValue());
355           }
356           frames_removed_++;
357           removed_stub = true;
358           break;
359         }
360       }
361       if (!removed_stub) {
362         if (kVerboseInstrumentation) {
363           LOG(INFO) << "  No exit stub in " << DescribeLocation();
364         }
365       }
366       return true;  // Continue.
367     }
368     Thread* const thread_;
369     const uintptr_t instrumentation_exit_pc_;
370     Instrumentation* const instrumentation_;
371     std::deque<instrumentation::InstrumentationStackFrame>* const instrumentation_stack_;
372     size_t frames_removed_;
373   };
374   if (kVerboseInstrumentation) {
375     std::string thread_name;
376     thread->GetThreadName(thread_name);
377     LOG(INFO) << "Removing exit stubs in " << thread_name;
378   }
379   std::deque<instrumentation::InstrumentationStackFrame>* stack = thread->GetInstrumentationStack();
380   if (stack->size() > 0) {
381     Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
382     uintptr_t instrumentation_exit_pc =
383         reinterpret_cast<uintptr_t>(GetQuickInstrumentationExitPc());
384     RestoreStackVisitor visitor(thread, instrumentation_exit_pc, instrumentation);
385     visitor.WalkStack(true);
386     CHECK_EQ(visitor.frames_removed_, stack->size());
387     while (stack->size() > 0) {
388       stack->pop_front();
389     }
390   }
391 }
392 
HasEvent(Instrumentation::InstrumentationEvent expected,uint32_t events)393 static bool HasEvent(Instrumentation::InstrumentationEvent expected, uint32_t events) {
394   return (events & expected) != 0;
395 }
396 
AddListener(InstrumentationListener * listener,uint32_t events)397 void Instrumentation::AddListener(InstrumentationListener* listener, uint32_t events) {
398   Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
399   if (HasEvent(kMethodEntered, events)) {
400     method_entry_listeners_.push_back(listener);
401     have_method_entry_listeners_ = true;
402   }
403   if (HasEvent(kMethodExited, events)) {
404     method_exit_listeners_.push_back(listener);
405     have_method_exit_listeners_ = true;
406   }
407   if (HasEvent(kMethodUnwind, events)) {
408     method_unwind_listeners_.push_back(listener);
409     have_method_unwind_listeners_ = true;
410   }
411   if (HasEvent(kBackwardBranch, events)) {
412     backward_branch_listeners_.push_back(listener);
413     have_backward_branch_listeners_ = true;
414   }
415   if (HasEvent(kDexPcMoved, events)) {
416     std::list<InstrumentationListener*>* modified;
417     if (have_dex_pc_listeners_) {
418       modified = new std::list<InstrumentationListener*>(*dex_pc_listeners_.get());
419     } else {
420       modified = new std::list<InstrumentationListener*>();
421     }
422     modified->push_back(listener);
423     dex_pc_listeners_.reset(modified);
424     have_dex_pc_listeners_ = true;
425   }
426   if (HasEvent(kFieldRead, events)) {
427     std::list<InstrumentationListener*>* modified;
428     if (have_field_read_listeners_) {
429       modified = new std::list<InstrumentationListener*>(*field_read_listeners_.get());
430     } else {
431       modified = new std::list<InstrumentationListener*>();
432     }
433     modified->push_back(listener);
434     field_read_listeners_.reset(modified);
435     have_field_read_listeners_ = true;
436   }
437   if (HasEvent(kFieldWritten, events)) {
438     std::list<InstrumentationListener*>* modified;
439     if (have_field_write_listeners_) {
440       modified = new std::list<InstrumentationListener*>(*field_write_listeners_.get());
441     } else {
442       modified = new std::list<InstrumentationListener*>();
443     }
444     modified->push_back(listener);
445     field_write_listeners_.reset(modified);
446     have_field_write_listeners_ = true;
447   }
448   if (HasEvent(kExceptionCaught, events)) {
449     std::list<InstrumentationListener*>* modified;
450     if (have_exception_caught_listeners_) {
451       modified = new std::list<InstrumentationListener*>(*exception_caught_listeners_.get());
452     } else {
453       modified = new std::list<InstrumentationListener*>();
454     }
455     modified->push_back(listener);
456     exception_caught_listeners_.reset(modified);
457     have_exception_caught_listeners_ = true;
458   }
459   UpdateInterpreterHandlerTable();
460 }
461 
RemoveListener(InstrumentationListener * listener,uint32_t events)462 void Instrumentation::RemoveListener(InstrumentationListener* listener, uint32_t events) {
463   Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
464 
465   if (HasEvent(kMethodEntered, events) && have_method_entry_listeners_) {
466     method_entry_listeners_.remove(listener);
467     have_method_entry_listeners_ = !method_entry_listeners_.empty();
468   }
469   if (HasEvent(kMethodExited, events) && have_method_exit_listeners_) {
470     method_exit_listeners_.remove(listener);
471     have_method_exit_listeners_ = !method_exit_listeners_.empty();
472   }
473   if (HasEvent(kMethodUnwind, events) && have_method_unwind_listeners_) {
474       method_unwind_listeners_.remove(listener);
475       have_method_unwind_listeners_ = !method_unwind_listeners_.empty();
476   }
477   if (HasEvent(kBackwardBranch, events) && have_backward_branch_listeners_) {
478       backward_branch_listeners_.remove(listener);
479       have_backward_branch_listeners_ = !backward_branch_listeners_.empty();
480     }
481   if (HasEvent(kDexPcMoved, events) && have_dex_pc_listeners_) {
482     std::list<InstrumentationListener*>* modified =
483         new std::list<InstrumentationListener*>(*dex_pc_listeners_.get());
484     modified->remove(listener);
485     have_dex_pc_listeners_ = !modified->empty();
486     if (have_dex_pc_listeners_) {
487       dex_pc_listeners_.reset(modified);
488     } else {
489       dex_pc_listeners_.reset();
490       delete modified;
491     }
492   }
493   if (HasEvent(kFieldRead, events) && have_field_read_listeners_) {
494     std::list<InstrumentationListener*>* modified =
495         new std::list<InstrumentationListener*>(*field_read_listeners_.get());
496     modified->remove(listener);
497     have_field_read_listeners_ = !modified->empty();
498     if (have_field_read_listeners_) {
499       field_read_listeners_.reset(modified);
500     } else {
501       field_read_listeners_.reset();
502       delete modified;
503     }
504   }
505   if (HasEvent(kFieldWritten, events) && have_field_write_listeners_) {
506     std::list<InstrumentationListener*>* modified =
507         new std::list<InstrumentationListener*>(*field_write_listeners_.get());
508     modified->remove(listener);
509     have_field_write_listeners_ = !modified->empty();
510     if (have_field_write_listeners_) {
511       field_write_listeners_.reset(modified);
512     } else {
513       field_write_listeners_.reset();
514       delete modified;
515     }
516   }
517   if (HasEvent(kExceptionCaught, events) && have_exception_caught_listeners_) {
518     std::list<InstrumentationListener*>* modified =
519         new std::list<InstrumentationListener*>(*exception_caught_listeners_.get());
520     modified->remove(listener);
521     have_exception_caught_listeners_ = !modified->empty();
522     if (have_exception_caught_listeners_) {
523       exception_caught_listeners_.reset(modified);
524     } else {
525       exception_caught_listeners_.reset();
526       delete modified;
527     }
528   }
529   UpdateInterpreterHandlerTable();
530 }
531 
GetCurrentInstrumentationLevel() const532 Instrumentation::InstrumentationLevel Instrumentation::GetCurrentInstrumentationLevel() const {
533   if (interpreter_stubs_installed_) {
534     return InstrumentationLevel::kInstrumentWithInterpreter;
535   } else if (entry_exit_stubs_installed_) {
536     return InstrumentationLevel::kInstrumentWithInstrumentationStubs;
537   } else {
538     return InstrumentationLevel::kInstrumentNothing;
539   }
540 }
541 
ConfigureStubs(const char * key,InstrumentationLevel desired_level)542 void Instrumentation::ConfigureStubs(const char* key, InstrumentationLevel desired_level) {
543   // Store the instrumentation level for this key or remove it.
544   if (desired_level == InstrumentationLevel::kInstrumentNothing) {
545     // The client no longer needs instrumentation.
546     requested_instrumentation_levels_.erase(key);
547   } else {
548     // The client needs instrumentation.
549     requested_instrumentation_levels_.Overwrite(key, desired_level);
550   }
551 
552   // Look for the highest required instrumentation level.
553   InstrumentationLevel requested_level = InstrumentationLevel::kInstrumentNothing;
554   for (const auto& v : requested_instrumentation_levels_) {
555     requested_level = std::max(requested_level, v.second);
556   }
557 
558   interpret_only_ = (requested_level == InstrumentationLevel::kInstrumentWithInterpreter) ||
559                     forced_interpret_only_;
560 
561   InstrumentationLevel current_level = GetCurrentInstrumentationLevel();
562   if (requested_level == current_level) {
563     // We're already set.
564     return;
565   }
566   Thread* const self = Thread::Current();
567   Runtime* runtime = Runtime::Current();
568   Locks::mutator_lock_->AssertExclusiveHeld(self);
569   Locks::thread_list_lock_->AssertNotHeld(self);
570   if (requested_level > InstrumentationLevel::kInstrumentNothing) {
571     if (requested_level == InstrumentationLevel::kInstrumentWithInterpreter) {
572       interpreter_stubs_installed_ = true;
573       entry_exit_stubs_installed_ = true;
574     } else {
575       CHECK_EQ(requested_level, InstrumentationLevel::kInstrumentWithInstrumentationStubs);
576       entry_exit_stubs_installed_ = true;
577       interpreter_stubs_installed_ = false;
578     }
579     runtime->GetClassLinker()->VisitClasses(InstallStubsClassVisitor, this);
580     instrumentation_stubs_installed_ = true;
581     MutexLock mu(self, *Locks::thread_list_lock_);
582     runtime->GetThreadList()->ForEach(InstrumentationInstallStack, this);
583   } else {
584     interpreter_stubs_installed_ = false;
585     entry_exit_stubs_installed_ = false;
586     runtime->GetClassLinker()->VisitClasses(InstallStubsClassVisitor, this);
587     // Restore stack only if there is no method currently deoptimized.
588     bool empty;
589     {
590       ReaderMutexLock mu(self, deoptimized_methods_lock_);
591       empty = IsDeoptimizedMethodsEmpty();  // Avoid lock violation.
592     }
593     if (empty) {
594       instrumentation_stubs_installed_ = false;
595       MutexLock mu(self, *Locks::thread_list_lock_);
596       Runtime::Current()->GetThreadList()->ForEach(InstrumentationRestoreStack, this);
597     }
598   }
599 }
600 
ResetQuickAllocEntryPointsForThread(Thread * thread,void * arg ATTRIBUTE_UNUSED)601 static void ResetQuickAllocEntryPointsForThread(Thread* thread, void* arg ATTRIBUTE_UNUSED) {
602   thread->ResetQuickAllocEntryPointsForThread();
603 }
604 
SetEntrypointsInstrumented(bool instrumented)605 void Instrumentation::SetEntrypointsInstrumented(bool instrumented) {
606   Thread* self = Thread::Current();
607   Runtime* runtime = Runtime::Current();
608   ThreadList* tl = runtime->GetThreadList();
609   Locks::mutator_lock_->AssertNotHeld(self);
610   Locks::instrument_entrypoints_lock_->AssertHeld(self);
611   if (runtime->IsStarted()) {
612     tl->SuspendAll(__FUNCTION__);
613   }
614   {
615     MutexLock mu(self, *Locks::runtime_shutdown_lock_);
616     SetQuickAllocEntryPointsInstrumented(instrumented);
617     ResetQuickAllocEntryPoints();
618   }
619   if (runtime->IsStarted()) {
620     tl->ResumeAll();
621   }
622 }
623 
InstrumentQuickAllocEntryPoints()624 void Instrumentation::InstrumentQuickAllocEntryPoints() {
625   MutexLock mu(Thread::Current(), *Locks::instrument_entrypoints_lock_);
626   InstrumentQuickAllocEntryPointsLocked();
627 }
628 
UninstrumentQuickAllocEntryPoints()629 void Instrumentation::UninstrumentQuickAllocEntryPoints() {
630   MutexLock mu(Thread::Current(), *Locks::instrument_entrypoints_lock_);
631   UninstrumentQuickAllocEntryPointsLocked();
632 }
633 
InstrumentQuickAllocEntryPointsLocked()634 void Instrumentation::InstrumentQuickAllocEntryPointsLocked() {
635   Locks::instrument_entrypoints_lock_->AssertHeld(Thread::Current());
636   if (quick_alloc_entry_points_instrumentation_counter_ == 0) {
637     SetEntrypointsInstrumented(true);
638   }
639   ++quick_alloc_entry_points_instrumentation_counter_;
640 }
641 
UninstrumentQuickAllocEntryPointsLocked()642 void Instrumentation::UninstrumentQuickAllocEntryPointsLocked() {
643   Locks::instrument_entrypoints_lock_->AssertHeld(Thread::Current());
644   CHECK_GT(quick_alloc_entry_points_instrumentation_counter_, 0U);
645   --quick_alloc_entry_points_instrumentation_counter_;
646   if (quick_alloc_entry_points_instrumentation_counter_ == 0) {
647     SetEntrypointsInstrumented(false);
648   }
649 }
650 
ResetQuickAllocEntryPoints()651 void Instrumentation::ResetQuickAllocEntryPoints() {
652   Runtime* runtime = Runtime::Current();
653   if (runtime->IsStarted()) {
654     MutexLock mu(Thread::Current(), *Locks::thread_list_lock_);
655     runtime->GetThreadList()->ForEach(ResetQuickAllocEntryPointsForThread, nullptr);
656   }
657 }
658 
UpdateMethodsCode(ArtMethod * method,const void * quick_code)659 void Instrumentation::UpdateMethodsCode(ArtMethod* method, const void* quick_code) {
660   DCHECK(method->GetDeclaringClass()->IsResolved());
661   const void* new_quick_code;
662   if (LIKELY(!instrumentation_stubs_installed_)) {
663     new_quick_code = quick_code;
664   } else {
665     if ((interpreter_stubs_installed_ || IsDeoptimized(method)) && !method->IsNative()) {
666       new_quick_code = GetQuickToInterpreterBridge();
667     } else {
668       ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
669       if (class_linker->IsQuickResolutionStub(quick_code) ||
670           class_linker->IsQuickToInterpreterBridge(quick_code)) {
671         new_quick_code = quick_code;
672       } else if (entry_exit_stubs_installed_) {
673         new_quick_code = GetQuickInstrumentationEntryPoint();
674       } else {
675         new_quick_code = quick_code;
676       }
677     }
678   }
679   UpdateEntrypoints(method, new_quick_code);
680 }
681 
AddDeoptimizedMethod(ArtMethod * method)682 bool Instrumentation::AddDeoptimizedMethod(ArtMethod* method) {
683   if (IsDeoptimizedMethod(method)) {
684     // Already in the map. Return.
685     return false;
686   }
687   // Not found. Add it.
688   deoptimized_methods_.insert(method);
689   return true;
690 }
691 
IsDeoptimizedMethod(ArtMethod * method)692 bool Instrumentation::IsDeoptimizedMethod(ArtMethod* method) {
693   return deoptimized_methods_.find(method) != deoptimized_methods_.end();
694 }
695 
BeginDeoptimizedMethod()696 ArtMethod* Instrumentation::BeginDeoptimizedMethod() {
697   if (deoptimized_methods_.empty()) {
698     // Empty.
699     return nullptr;
700   }
701   return *deoptimized_methods_.begin();
702 }
703 
RemoveDeoptimizedMethod(ArtMethod * method)704 bool Instrumentation::RemoveDeoptimizedMethod(ArtMethod* method) {
705   auto it = deoptimized_methods_.find(method);
706   if (it == deoptimized_methods_.end()) {
707     return false;
708   }
709   deoptimized_methods_.erase(it);
710   return true;
711 }
712 
IsDeoptimizedMethodsEmpty() const713 bool Instrumentation::IsDeoptimizedMethodsEmpty() const {
714   return deoptimized_methods_.empty();
715 }
716 
Deoptimize(ArtMethod * method)717 void Instrumentation::Deoptimize(ArtMethod* method) {
718   CHECK(!method->IsNative());
719   CHECK(!method->IsProxyMethod());
720   CHECK(!method->IsAbstract());
721 
722   Thread* self = Thread::Current();
723   {
724     WriterMutexLock mu(self, deoptimized_methods_lock_);
725     bool has_not_been_deoptimized = AddDeoptimizedMethod(method);
726     CHECK(has_not_been_deoptimized) << "Method " << PrettyMethod(method)
727         << " is already deoptimized";
728   }
729   if (!interpreter_stubs_installed_) {
730     UpdateEntrypoints(method, GetQuickInstrumentationEntryPoint());
731 
732     // Install instrumentation exit stub and instrumentation frames. We may already have installed
733     // these previously so it will only cover the newly created frames.
734     instrumentation_stubs_installed_ = true;
735     MutexLock mu(self, *Locks::thread_list_lock_);
736     Runtime::Current()->GetThreadList()->ForEach(InstrumentationInstallStack, this);
737   }
738 }
739 
Undeoptimize(ArtMethod * method)740 void Instrumentation::Undeoptimize(ArtMethod* method) {
741   CHECK(!method->IsNative());
742   CHECK(!method->IsProxyMethod());
743   CHECK(!method->IsAbstract());
744 
745   Thread* self = Thread::Current();
746   bool empty;
747   {
748     WriterMutexLock mu(self, deoptimized_methods_lock_);
749     bool found_and_erased = RemoveDeoptimizedMethod(method);
750     CHECK(found_and_erased) << "Method " << PrettyMethod(method)
751         << " is not deoptimized";
752     empty = IsDeoptimizedMethodsEmpty();
753   }
754 
755   // Restore code and possibly stack only if we did not deoptimize everything.
756   if (!interpreter_stubs_installed_) {
757     // Restore its code or resolution trampoline.
758     ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
759     if (method->IsStatic() && !method->IsConstructor() &&
760         !method->GetDeclaringClass()->IsInitialized()) {
761       UpdateEntrypoints(method, GetQuickResolutionStub());
762     } else {
763       const void* quick_code = class_linker->GetQuickOatCodeFor(method);
764       UpdateEntrypoints(method, quick_code);
765     }
766 
767     // If there is no deoptimized method left, we can restore the stack of each thread.
768     if (empty) {
769       MutexLock mu(self, *Locks::thread_list_lock_);
770       Runtime::Current()->GetThreadList()->ForEach(InstrumentationRestoreStack, this);
771       instrumentation_stubs_installed_ = false;
772     }
773   }
774 }
775 
IsDeoptimized(ArtMethod * method)776 bool Instrumentation::IsDeoptimized(ArtMethod* method) {
777   DCHECK(method != nullptr);
778   ReaderMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
779   return IsDeoptimizedMethod(method);
780 }
781 
EnableDeoptimization()782 void Instrumentation::EnableDeoptimization() {
783   ReaderMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
784   CHECK(IsDeoptimizedMethodsEmpty());
785   CHECK_EQ(deoptimization_enabled_, false);
786   deoptimization_enabled_ = true;
787 }
788 
DisableDeoptimization(const char * key)789 void Instrumentation::DisableDeoptimization(const char* key) {
790   CHECK_EQ(deoptimization_enabled_, true);
791   // If we deoptimized everything, undo it.
792   if (interpreter_stubs_installed_) {
793     UndeoptimizeEverything(key);
794   }
795   // Undeoptimized selected methods.
796   while (true) {
797     ArtMethod* method;
798     {
799       ReaderMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
800       if (IsDeoptimizedMethodsEmpty()) {
801         break;
802       }
803       method = BeginDeoptimizedMethod();
804       CHECK(method != nullptr);
805     }
806     Undeoptimize(method);
807   }
808   deoptimization_enabled_ = false;
809 }
810 
811 // Indicates if instrumentation should notify method enter/exit events to the listeners.
ShouldNotifyMethodEnterExitEvents() const812 bool Instrumentation::ShouldNotifyMethodEnterExitEvents() const {
813   if (!HasMethodEntryListeners() && !HasMethodExitListeners()) {
814     return false;
815   }
816   return !deoptimization_enabled_ && !interpreter_stubs_installed_;
817 }
818 
DeoptimizeEverything(const char * key)819 void Instrumentation::DeoptimizeEverything(const char* key) {
820   CHECK(deoptimization_enabled_);
821   ConfigureStubs(key, InstrumentationLevel::kInstrumentWithInterpreter);
822 }
823 
UndeoptimizeEverything(const char * key)824 void Instrumentation::UndeoptimizeEverything(const char* key) {
825   CHECK(interpreter_stubs_installed_);
826   CHECK(deoptimization_enabled_);
827   ConfigureStubs(key, InstrumentationLevel::kInstrumentNothing);
828 }
829 
EnableMethodTracing(const char * key,bool needs_interpreter)830 void Instrumentation::EnableMethodTracing(const char* key, bool needs_interpreter) {
831   InstrumentationLevel level;
832   if (needs_interpreter) {
833     level = InstrumentationLevel::kInstrumentWithInterpreter;
834   } else {
835     level = InstrumentationLevel::kInstrumentWithInstrumentationStubs;
836   }
837   ConfigureStubs(key, level);
838 }
839 
DisableMethodTracing(const char * key)840 void Instrumentation::DisableMethodTracing(const char* key) {
841   ConfigureStubs(key, InstrumentationLevel::kInstrumentNothing);
842 }
843 
GetQuickCodeFor(ArtMethod * method,size_t pointer_size) const844 const void* Instrumentation::GetQuickCodeFor(ArtMethod* method, size_t pointer_size) const {
845   Runtime* runtime = Runtime::Current();
846   if (LIKELY(!instrumentation_stubs_installed_)) {
847     const void* code = method->GetEntryPointFromQuickCompiledCodePtrSize(pointer_size);
848     DCHECK(code != nullptr);
849     ClassLinker* class_linker = runtime->GetClassLinker();
850     if (LIKELY(!class_linker->IsQuickResolutionStub(code) &&
851                !class_linker->IsQuickToInterpreterBridge(code)) &&
852                !class_linker->IsQuickResolutionStub(code) &&
853                !class_linker->IsQuickToInterpreterBridge(code)) {
854       return code;
855     }
856   }
857   return runtime->GetClassLinker()->GetQuickOatCodeFor(method);
858 }
859 
MethodEnterEventImpl(Thread * thread,mirror::Object * this_object,ArtMethod * method,uint32_t dex_pc) const860 void Instrumentation::MethodEnterEventImpl(Thread* thread, mirror::Object* this_object,
861                                            ArtMethod* method,
862                                            uint32_t dex_pc) const {
863   auto it = method_entry_listeners_.begin();
864   bool is_end = (it == method_entry_listeners_.end());
865   // Implemented this way to prevent problems caused by modification of the list while iterating.
866   while (!is_end) {
867     InstrumentationListener* cur = *it;
868     ++it;
869     is_end = (it == method_entry_listeners_.end());
870     cur->MethodEntered(thread, this_object, method, dex_pc);
871   }
872 }
873 
MethodExitEventImpl(Thread * thread,mirror::Object * this_object,ArtMethod * method,uint32_t dex_pc,const JValue & return_value) const874 void Instrumentation::MethodExitEventImpl(Thread* thread, mirror::Object* this_object,
875                                           ArtMethod* method,
876                                           uint32_t dex_pc, const JValue& return_value) const {
877   auto it = method_exit_listeners_.begin();
878   bool is_end = (it == method_exit_listeners_.end());
879   // Implemented this way to prevent problems caused by modification of the list while iterating.
880   while (!is_end) {
881     InstrumentationListener* cur = *it;
882     ++it;
883     is_end = (it == method_exit_listeners_.end());
884     cur->MethodExited(thread, this_object, method, dex_pc, return_value);
885   }
886 }
887 
MethodUnwindEvent(Thread * thread,mirror::Object * this_object,ArtMethod * method,uint32_t dex_pc) const888 void Instrumentation::MethodUnwindEvent(Thread* thread, mirror::Object* this_object,
889                                         ArtMethod* method,
890                                         uint32_t dex_pc) const {
891   if (HasMethodUnwindListeners()) {
892     for (InstrumentationListener* listener : method_unwind_listeners_) {
893       listener->MethodUnwind(thread, this_object, method, dex_pc);
894     }
895   }
896 }
897 
DexPcMovedEventImpl(Thread * thread,mirror::Object * this_object,ArtMethod * method,uint32_t dex_pc) const898 void Instrumentation::DexPcMovedEventImpl(Thread* thread, mirror::Object* this_object,
899                                           ArtMethod* method,
900                                           uint32_t dex_pc) const {
901   std::shared_ptr<std::list<InstrumentationListener*>> original(dex_pc_listeners_);
902   for (InstrumentationListener* listener : *original.get()) {
903     listener->DexPcMoved(thread, this_object, method, dex_pc);
904   }
905 }
906 
BackwardBranchImpl(Thread * thread,ArtMethod * method,int32_t offset) const907 void Instrumentation::BackwardBranchImpl(Thread* thread, ArtMethod* method,
908                                          int32_t offset) const {
909   for (InstrumentationListener* listener : backward_branch_listeners_) {
910     listener->BackwardBranch(thread, method, offset);
911   }
912 }
913 
FieldReadEventImpl(Thread * thread,mirror::Object * this_object,ArtMethod * method,uint32_t dex_pc,ArtField * field) const914 void Instrumentation::FieldReadEventImpl(Thread* thread, mirror::Object* this_object,
915                                          ArtMethod* method, uint32_t dex_pc,
916                                          ArtField* field) const {
917   std::shared_ptr<std::list<InstrumentationListener*>> original(field_read_listeners_);
918   for (InstrumentationListener* listener : *original.get()) {
919     listener->FieldRead(thread, this_object, method, dex_pc, field);
920   }
921 }
922 
FieldWriteEventImpl(Thread * thread,mirror::Object * this_object,ArtMethod * method,uint32_t dex_pc,ArtField * field,const JValue & field_value) const923 void Instrumentation::FieldWriteEventImpl(Thread* thread, mirror::Object* this_object,
924                                          ArtMethod* method, uint32_t dex_pc,
925                                          ArtField* field, const JValue& field_value) const {
926   std::shared_ptr<std::list<InstrumentationListener*>> original(field_write_listeners_);
927   for (InstrumentationListener* listener : *original.get()) {
928     listener->FieldWritten(thread, this_object, method, dex_pc, field, field_value);
929   }
930 }
931 
ExceptionCaughtEvent(Thread * thread,mirror::Throwable * exception_object) const932 void Instrumentation::ExceptionCaughtEvent(Thread* thread,
933                                            mirror::Throwable* exception_object) const {
934   if (HasExceptionCaughtListeners()) {
935     DCHECK_EQ(thread->GetException(), exception_object);
936     thread->ClearException();
937     std::shared_ptr<std::list<InstrumentationListener*>> original(exception_caught_listeners_);
938     for (InstrumentationListener* listener : *original.get()) {
939       listener->ExceptionCaught(thread, exception_object);
940     }
941     thread->SetException(exception_object);
942   }
943 }
944 
CheckStackDepth(Thread * self,const InstrumentationStackFrame & instrumentation_frame,int delta)945 static void CheckStackDepth(Thread* self, const InstrumentationStackFrame& instrumentation_frame,
946                             int delta)
947     SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
948   size_t frame_id = StackVisitor::ComputeNumFrames(self, kInstrumentationStackWalk) + delta;
949   if (frame_id != instrumentation_frame.frame_id_) {
950     LOG(ERROR) << "Expected frame_id=" << frame_id << " but found "
951         << instrumentation_frame.frame_id_;
952     StackVisitor::DescribeStack(self);
953     CHECK_EQ(frame_id, instrumentation_frame.frame_id_);
954   }
955 }
956 
PushInstrumentationStackFrame(Thread * self,mirror::Object * this_object,ArtMethod * method,uintptr_t lr,bool interpreter_entry)957 void Instrumentation::PushInstrumentationStackFrame(Thread* self, mirror::Object* this_object,
958                                                     ArtMethod* method,
959                                                     uintptr_t lr, bool interpreter_entry) {
960   // We have a callee-save frame meaning this value is guaranteed to never be 0.
961   size_t frame_id = StackVisitor::ComputeNumFrames(self, kInstrumentationStackWalk);
962   std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
963   if (kVerboseInstrumentation) {
964     LOG(INFO) << "Entering " << PrettyMethod(method) << " from PC " << reinterpret_cast<void*>(lr);
965   }
966   instrumentation::InstrumentationStackFrame instrumentation_frame(this_object, method, lr,
967                                                                    frame_id, interpreter_entry);
968   stack->push_front(instrumentation_frame);
969 
970   if (!interpreter_entry) {
971     MethodEnterEvent(self, this_object, method, 0);
972   }
973 }
974 
PopInstrumentationStackFrame(Thread * self,uintptr_t * return_pc,uint64_t gpr_result,uint64_t fpr_result)975 TwoWordReturn Instrumentation::PopInstrumentationStackFrame(Thread* self, uintptr_t* return_pc,
976                                                             uint64_t gpr_result,
977                                                             uint64_t fpr_result) {
978   // Do the pop.
979   std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
980   CHECK_GT(stack->size(), 0U);
981   InstrumentationStackFrame instrumentation_frame = stack->front();
982   stack->pop_front();
983 
984   // Set return PC and check the sanity of the stack.
985   *return_pc = instrumentation_frame.return_pc_;
986   CheckStackDepth(self, instrumentation_frame, 0);
987   self->VerifyStack();
988 
989   ArtMethod* method = instrumentation_frame.method_;
990   uint32_t length;
991   char return_shorty = method->GetShorty(&length)[0];
992   JValue return_value;
993   if (return_shorty == 'V') {
994     return_value.SetJ(0);
995   } else if (return_shorty == 'F' || return_shorty == 'D') {
996     return_value.SetJ(fpr_result);
997   } else {
998     return_value.SetJ(gpr_result);
999   }
1000   // TODO: improve the dex pc information here, requires knowledge of current PC as opposed to
1001   //       return_pc.
1002   uint32_t dex_pc = DexFile::kDexNoIndex;
1003   mirror::Object* this_object = instrumentation_frame.this_object_;
1004   if (!instrumentation_frame.interpreter_entry_) {
1005     MethodExitEvent(self, this_object, instrumentation_frame.method_, dex_pc, return_value);
1006   }
1007 
1008   // Deoptimize if the caller needs to continue execution in the interpreter. Do nothing if we get
1009   // back to an upcall.
1010   NthCallerVisitor visitor(self, 1, true);
1011   visitor.WalkStack(true);
1012   bool deoptimize = (visitor.caller != nullptr) &&
1013                     (interpreter_stubs_installed_ || IsDeoptimized(visitor.caller) ||
1014                     Dbg::IsForcedInterpreterNeededForUpcall(self, visitor.caller));
1015   if (deoptimize) {
1016     if (kVerboseInstrumentation) {
1017       LOG(INFO) << StringPrintf("Deoptimizing %s by returning from %s with result %#" PRIx64 " in ",
1018                                 PrettyMethod(visitor.caller).c_str(),
1019                                 PrettyMethod(method).c_str(),
1020                                 return_value.GetJ()) << *self;
1021     }
1022     self->SetDeoptimizationReturnValue(return_value, return_shorty == 'L');
1023     return GetTwoWordSuccessValue(*return_pc,
1024                                   reinterpret_cast<uintptr_t>(GetQuickDeoptimizationEntryPoint()));
1025   } else {
1026     if (kVerboseInstrumentation) {
1027       LOG(INFO) << "Returning from " << PrettyMethod(method)
1028                 << " to PC " << reinterpret_cast<void*>(*return_pc);
1029     }
1030     return GetTwoWordSuccessValue(0, *return_pc);
1031   }
1032 }
1033 
PopMethodForUnwind(Thread * self,bool is_deoptimization) const1034 void Instrumentation::PopMethodForUnwind(Thread* self, bool is_deoptimization) const {
1035   // Do the pop.
1036   std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
1037   CHECK_GT(stack->size(), 0U);
1038   InstrumentationStackFrame instrumentation_frame = stack->front();
1039   // TODO: bring back CheckStackDepth(self, instrumentation_frame, 2);
1040   stack->pop_front();
1041 
1042   ArtMethod* method = instrumentation_frame.method_;
1043   if (is_deoptimization) {
1044     if (kVerboseInstrumentation) {
1045       LOG(INFO) << "Popping for deoptimization " << PrettyMethod(method);
1046     }
1047   } else {
1048     if (kVerboseInstrumentation) {
1049       LOG(INFO) << "Popping for unwind " << PrettyMethod(method);
1050     }
1051 
1052     // Notify listeners of method unwind.
1053     // TODO: improve the dex pc information here, requires knowledge of current PC as opposed to
1054     //       return_pc.
1055     uint32_t dex_pc = DexFile::kDexNoIndex;
1056     MethodUnwindEvent(self, instrumentation_frame.this_object_, method, dex_pc);
1057   }
1058 }
1059 
Dump() const1060 std::string InstrumentationStackFrame::Dump() const {
1061   std::ostringstream os;
1062   os << "Frame " << frame_id_ << " " << PrettyMethod(method_) << ":"
1063       << reinterpret_cast<void*>(return_pc_) << " this=" << reinterpret_cast<void*>(this_object_);
1064   return os.str();
1065 }
1066 
1067 }  // namespace instrumentation
1068 }  // namespace art
1069