1 /*
2  * Copyright (C) 2011 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "instrumentation.h"
18 
19 #include <sstream>
20 
21 #include "arch/context.h"
22 #include "art_method-inl.h"
23 #include "atomic.h"
24 #include "class_linker.h"
25 #include "debugger.h"
26 #include "dex_file-inl.h"
27 #include "entrypoints/quick/quick_entrypoints.h"
28 #include "entrypoints/quick/quick_alloc_entrypoints.h"
29 #include "entrypoints/runtime_asm_entrypoints.h"
30 #include "gc_root-inl.h"
31 #include "interpreter/interpreter.h"
32 #include "jit/jit.h"
33 #include "jit/jit_code_cache.h"
34 #include "mirror/class-inl.h"
35 #include "mirror/dex_cache.h"
36 #include "mirror/object_array-inl.h"
37 #include "mirror/object-inl.h"
38 #include "nth_caller_visitor.h"
39 #include "oat_quick_method_header.h"
40 #include "thread.h"
41 #include "thread_list.h"
42 
43 namespace art {
44 namespace instrumentation {
45 
46 constexpr bool kVerboseInstrumentation = false;
47 
48 // Instrumentation works on non-inlined frames by updating returned PCs
49 // of compiled frames.
50 static constexpr StackVisitor::StackWalkKind kInstrumentationStackWalk =
51     StackVisitor::StackWalkKind::kSkipInlinedFrames;
52 
53 class InstallStubsClassVisitor : public ClassVisitor {
54  public:
InstallStubsClassVisitor(Instrumentation * instrumentation)55   explicit InstallStubsClassVisitor(Instrumentation* instrumentation)
56       : instrumentation_(instrumentation) {}
57 
operator ()(ObjPtr<mirror::Class> klass)58   bool operator()(ObjPtr<mirror::Class> klass) OVERRIDE REQUIRES(Locks::mutator_lock_) {
59     instrumentation_->InstallStubsForClass(klass.Ptr());
60     return true;  // we visit all classes.
61   }
62 
63  private:
64   Instrumentation* const instrumentation_;
65 };
66 
67 
Instrumentation()68 Instrumentation::Instrumentation()
69     : instrumentation_stubs_installed_(false),
70       entry_exit_stubs_installed_(false),
71       interpreter_stubs_installed_(false),
72       interpret_only_(false),
73       forced_interpret_only_(false),
74       have_method_entry_listeners_(false),
75       have_method_exit_listeners_(false),
76       have_method_unwind_listeners_(false),
77       have_dex_pc_listeners_(false),
78       have_field_read_listeners_(false),
79       have_field_write_listeners_(false),
80       have_exception_caught_listeners_(false),
81       have_branch_listeners_(false),
82       have_invoke_virtual_or_interface_listeners_(false),
83       deoptimized_methods_lock_("deoptimized methods lock", kDeoptimizedMethodsLock),
84       deoptimization_enabled_(false),
85       interpreter_handler_table_(kMainHandlerTable),
86       quick_alloc_entry_points_instrumentation_counter_(0),
87       alloc_entrypoints_instrumented_(false) {
88 }
89 
InstallStubsForClass(mirror::Class * klass)90 void Instrumentation::InstallStubsForClass(mirror::Class* klass) {
91   if (!klass->IsResolved()) {
92     // We need the class to be resolved to install/uninstall stubs. Otherwise its methods
93     // could not be initialized or linked with regards to class inheritance.
94   } else if (klass->IsErroneousResolved()) {
95     // We can't execute code in a erroneous class: do nothing.
96   } else {
97     for (ArtMethod& method : klass->GetMethods(kRuntimePointerSize)) {
98       InstallStubsForMethod(&method);
99     }
100   }
101 }
102 
UpdateEntrypoints(ArtMethod * method,const void * quick_code)103 static void UpdateEntrypoints(ArtMethod* method, const void* quick_code)
104     REQUIRES_SHARED(Locks::mutator_lock_) {
105   method->SetEntryPointFromQuickCompiledCode(quick_code);
106 }
107 
NeedDebugVersionFor(ArtMethod * method) const108 bool Instrumentation::NeedDebugVersionFor(ArtMethod* method) const REQUIRES_SHARED(Locks::mutator_lock_) {
109   return Dbg::IsDebuggerActive() &&
110          Runtime::Current()->IsJavaDebuggable() &&
111          !method->IsNative() &&
112          !method->IsProxyMethod();
113 }
114 
InstallStubsForMethod(ArtMethod * method)115 void Instrumentation::InstallStubsForMethod(ArtMethod* method) {
116   if (!method->IsInvokable() || method->IsProxyMethod()) {
117     // Do not change stubs for these methods.
118     return;
119   }
120   // Don't stub Proxy.<init>. Note that the Proxy class itself is not a proxy class.
121   if (method->IsConstructor() &&
122       method->GetDeclaringClass()->DescriptorEquals("Ljava/lang/reflect/Proxy;")) {
123     return;
124   }
125   const void* new_quick_code;
126   bool uninstall = !entry_exit_stubs_installed_ && !interpreter_stubs_installed_;
127   Runtime* const runtime = Runtime::Current();
128   ClassLinker* const class_linker = runtime->GetClassLinker();
129   bool is_class_initialized = method->GetDeclaringClass()->IsInitialized();
130   if (uninstall) {
131     if ((forced_interpret_only_ || IsDeoptimized(method)) && !method->IsNative()) {
132       new_quick_code = GetQuickToInterpreterBridge();
133     } else if (is_class_initialized || !method->IsStatic() || method->IsConstructor()) {
134       if (NeedDebugVersionFor(method)) {
135         new_quick_code = GetQuickToInterpreterBridge();
136       } else {
137         new_quick_code = class_linker->GetQuickOatCodeFor(method);
138       }
139     } else {
140       new_quick_code = GetQuickResolutionStub();
141     }
142   } else {  // !uninstall
143     if ((interpreter_stubs_installed_ || forced_interpret_only_ || IsDeoptimized(method)) &&
144         !method->IsNative()) {
145       new_quick_code = GetQuickToInterpreterBridge();
146     } else {
147       // Do not overwrite resolution trampoline. When the trampoline initializes the method's
148       // class, all its static methods code will be set to the instrumentation entry point.
149       // For more details, see ClassLinker::FixupStaticTrampolines.
150       if (is_class_initialized || !method->IsStatic() || method->IsConstructor()) {
151         if (NeedDebugVersionFor(method)) {
152           // Oat code should not be used. Don't install instrumentation stub and
153           // use interpreter for instrumentation.
154           new_quick_code = GetQuickToInterpreterBridge();
155         } else if (entry_exit_stubs_installed_) {
156           new_quick_code = GetQuickInstrumentationEntryPoint();
157         } else {
158           new_quick_code = class_linker->GetQuickOatCodeFor(method);
159         }
160       } else {
161         new_quick_code = GetQuickResolutionStub();
162       }
163     }
164   }
165   UpdateEntrypoints(method, new_quick_code);
166 }
167 
168 // Places the instrumentation exit pc as the return PC for every quick frame. This also allows
169 // deoptimization of quick frames to interpreter frames.
170 // Since we may already have done this previously, we need to push new instrumentation frame before
171 // existing instrumentation frames.
InstrumentationInstallStack(Thread * thread,void * arg)172 static void InstrumentationInstallStack(Thread* thread, void* arg)
173     REQUIRES_SHARED(Locks::mutator_lock_) {
174   struct InstallStackVisitor FINAL : public StackVisitor {
175     InstallStackVisitor(Thread* thread_in, Context* context, uintptr_t instrumentation_exit_pc)
176         : StackVisitor(thread_in, context, kInstrumentationStackWalk),
177           instrumentation_stack_(thread_in->GetInstrumentationStack()),
178           instrumentation_exit_pc_(instrumentation_exit_pc),
179           reached_existing_instrumentation_frames_(false), instrumentation_stack_depth_(0),
180           last_return_pc_(0) {
181     }
182 
183     bool VisitFrame() OVERRIDE REQUIRES_SHARED(Locks::mutator_lock_) {
184       ArtMethod* m = GetMethod();
185       if (m == nullptr) {
186         if (kVerboseInstrumentation) {
187           LOG(INFO) << "  Skipping upcall. Frame " << GetFrameId();
188         }
189         last_return_pc_ = 0;
190         return true;  // Ignore upcalls.
191       }
192       if (GetCurrentQuickFrame() == nullptr) {
193         bool interpreter_frame = true;
194         InstrumentationStackFrame instrumentation_frame(GetThisObject(), m, 0, GetFrameId(),
195                                                         interpreter_frame);
196         if (kVerboseInstrumentation) {
197           LOG(INFO) << "Pushing shadow frame " << instrumentation_frame.Dump();
198         }
199         shadow_stack_.push_back(instrumentation_frame);
200         return true;  // Continue.
201       }
202       uintptr_t return_pc = GetReturnPc();
203       if (m->IsRuntimeMethod()) {
204         if (return_pc == instrumentation_exit_pc_) {
205           if (kVerboseInstrumentation) {
206             LOG(INFO) << "  Handling quick to interpreter transition. Frame " << GetFrameId();
207           }
208           CHECK_LT(instrumentation_stack_depth_, instrumentation_stack_->size());
209           const InstrumentationStackFrame& frame =
210               instrumentation_stack_->at(instrumentation_stack_depth_);
211           CHECK(frame.interpreter_entry_);
212           // This is an interpreter frame so method enter event must have been reported. However we
213           // need to push a DEX pc into the dex_pcs_ list to match size of instrumentation stack.
214           // Since we won't report method entry here, we can safely push any DEX pc.
215           dex_pcs_.push_back(0);
216           last_return_pc_ = frame.return_pc_;
217           ++instrumentation_stack_depth_;
218           return true;
219         } else {
220           if (kVerboseInstrumentation) {
221             LOG(INFO) << "  Skipping runtime method. Frame " << GetFrameId();
222           }
223           last_return_pc_ = GetReturnPc();
224           return true;  // Ignore unresolved methods since they will be instrumented after resolution.
225         }
226       }
227       if (kVerboseInstrumentation) {
228         LOG(INFO) << "  Installing exit stub in " << DescribeLocation();
229       }
230       if (return_pc == instrumentation_exit_pc_) {
231         // We've reached a frame which has already been installed with instrumentation exit stub.
232         // We should have already installed instrumentation on previous frames.
233         reached_existing_instrumentation_frames_ = true;
234 
235         CHECK_LT(instrumentation_stack_depth_, instrumentation_stack_->size());
236         const InstrumentationStackFrame& frame =
237             instrumentation_stack_->at(instrumentation_stack_depth_);
238         CHECK_EQ(m, frame.method_) << "Expected " << ArtMethod::PrettyMethod(m)
239                                    << ", Found " << ArtMethod::PrettyMethod(frame.method_);
240         return_pc = frame.return_pc_;
241         if (kVerboseInstrumentation) {
242           LOG(INFO) << "Ignoring already instrumented " << frame.Dump();
243         }
244       } else {
245         CHECK_NE(return_pc, 0U);
246         CHECK(!reached_existing_instrumentation_frames_);
247         InstrumentationStackFrame instrumentation_frame(GetThisObject(), m, return_pc, GetFrameId(),
248                                                         false);
249         if (kVerboseInstrumentation) {
250           LOG(INFO) << "Pushing frame " << instrumentation_frame.Dump();
251         }
252 
253         // Insert frame at the right position so we do not corrupt the instrumentation stack.
254         // Instrumentation stack frames are in descending frame id order.
255         auto it = instrumentation_stack_->begin();
256         for (auto end = instrumentation_stack_->end(); it != end; ++it) {
257           const InstrumentationStackFrame& current = *it;
258           if (instrumentation_frame.frame_id_ >= current.frame_id_) {
259             break;
260           }
261         }
262         instrumentation_stack_->insert(it, instrumentation_frame);
263         SetReturnPc(instrumentation_exit_pc_);
264       }
265       dex_pcs_.push_back((GetCurrentOatQuickMethodHeader() == nullptr)
266           ? DexFile::kDexNoIndex
267           : GetCurrentOatQuickMethodHeader()->ToDexPc(m, last_return_pc_));
268       last_return_pc_ = return_pc;
269       ++instrumentation_stack_depth_;
270       return true;  // Continue.
271     }
272     std::deque<InstrumentationStackFrame>* const instrumentation_stack_;
273     std::vector<InstrumentationStackFrame> shadow_stack_;
274     std::vector<uint32_t> dex_pcs_;
275     const uintptr_t instrumentation_exit_pc_;
276     bool reached_existing_instrumentation_frames_;
277     size_t instrumentation_stack_depth_;
278     uintptr_t last_return_pc_;
279   };
280   if (kVerboseInstrumentation) {
281     std::string thread_name;
282     thread->GetThreadName(thread_name);
283     LOG(INFO) << "Installing exit stubs in " << thread_name;
284   }
285 
286   Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
287   std::unique_ptr<Context> context(Context::Create());
288   uintptr_t instrumentation_exit_pc = reinterpret_cast<uintptr_t>(GetQuickInstrumentationExitPc());
289   InstallStackVisitor visitor(thread, context.get(), instrumentation_exit_pc);
290   visitor.WalkStack(true);
291   CHECK_EQ(visitor.dex_pcs_.size(), thread->GetInstrumentationStack()->size());
292 
293   if (instrumentation->ShouldNotifyMethodEnterExitEvents()) {
294     // Create method enter events for all methods currently on the thread's stack. We only do this
295     // if no debugger is attached to prevent from posting events twice.
296     auto ssi = visitor.shadow_stack_.rbegin();
297     for (auto isi = thread->GetInstrumentationStack()->rbegin(),
298         end = thread->GetInstrumentationStack()->rend(); isi != end; ++isi) {
299       while (ssi != visitor.shadow_stack_.rend() && (*ssi).frame_id_ < (*isi).frame_id_) {
300         instrumentation->MethodEnterEvent(thread, (*ssi).this_object_, (*ssi).method_, 0);
301         ++ssi;
302       }
303       uint32_t dex_pc = visitor.dex_pcs_.back();
304       visitor.dex_pcs_.pop_back();
305       if (!isi->interpreter_entry_) {
306         instrumentation->MethodEnterEvent(thread, (*isi).this_object_, (*isi).method_, dex_pc);
307       }
308     }
309   }
310   thread->VerifyStack();
311 }
312 
InstrumentThreadStack(Thread * thread)313 void Instrumentation::InstrumentThreadStack(Thread* thread) {
314   instrumentation_stubs_installed_ = true;
315   InstrumentationInstallStack(thread, this);
316 }
317 
318 // Removes the instrumentation exit pc as the return PC for every quick frame.
InstrumentationRestoreStack(Thread * thread,void * arg)319 static void InstrumentationRestoreStack(Thread* thread, void* arg)
320     REQUIRES(Locks::mutator_lock_) {
321   Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
322 
323   struct RestoreStackVisitor FINAL : public StackVisitor {
324     RestoreStackVisitor(Thread* thread_in, uintptr_t instrumentation_exit_pc,
325                         Instrumentation* instrumentation)
326         : StackVisitor(thread_in, nullptr, kInstrumentationStackWalk),
327           thread_(thread_in),
328           instrumentation_exit_pc_(instrumentation_exit_pc),
329           instrumentation_(instrumentation),
330           instrumentation_stack_(thread_in->GetInstrumentationStack()),
331           frames_removed_(0) {}
332 
333     bool VisitFrame() OVERRIDE REQUIRES_SHARED(Locks::mutator_lock_) {
334       if (instrumentation_stack_->size() == 0) {
335         return false;  // Stop.
336       }
337       ArtMethod* m = GetMethod();
338       if (GetCurrentQuickFrame() == nullptr) {
339         if (kVerboseInstrumentation) {
340           LOG(INFO) << "  Ignoring a shadow frame. Frame " << GetFrameId()
341               << " Method=" << ArtMethod::PrettyMethod(m);
342         }
343         return true;  // Ignore shadow frames.
344       }
345       if (m == nullptr) {
346         if (kVerboseInstrumentation) {
347           LOG(INFO) << "  Skipping upcall. Frame " << GetFrameId();
348         }
349         return true;  // Ignore upcalls.
350       }
351       bool removed_stub = false;
352       // TODO: make this search more efficient?
353       const size_t frameId = GetFrameId();
354       for (const InstrumentationStackFrame& instrumentation_frame : *instrumentation_stack_) {
355         if (instrumentation_frame.frame_id_ == frameId) {
356           if (kVerboseInstrumentation) {
357             LOG(INFO) << "  Removing exit stub in " << DescribeLocation();
358           }
359           if (instrumentation_frame.interpreter_entry_) {
360             CHECK(m == Runtime::Current()->GetCalleeSaveMethod(Runtime::kSaveRefsAndArgs));
361           } else {
362             CHECK(m == instrumentation_frame.method_) << ArtMethod::PrettyMethod(m);
363           }
364           SetReturnPc(instrumentation_frame.return_pc_);
365           if (instrumentation_->ShouldNotifyMethodEnterExitEvents()) {
366             // Create the method exit events. As the methods didn't really exit the result is 0.
367             // We only do this if no debugger is attached to prevent from posting events twice.
368             instrumentation_->MethodExitEvent(thread_, instrumentation_frame.this_object_, m,
369                                               GetDexPc(), JValue());
370           }
371           frames_removed_++;
372           removed_stub = true;
373           break;
374         }
375       }
376       if (!removed_stub) {
377         if (kVerboseInstrumentation) {
378           LOG(INFO) << "  No exit stub in " << DescribeLocation();
379         }
380       }
381       return true;  // Continue.
382     }
383     Thread* const thread_;
384     const uintptr_t instrumentation_exit_pc_;
385     Instrumentation* const instrumentation_;
386     std::deque<instrumentation::InstrumentationStackFrame>* const instrumentation_stack_;
387     size_t frames_removed_;
388   };
389   if (kVerboseInstrumentation) {
390     std::string thread_name;
391     thread->GetThreadName(thread_name);
392     LOG(INFO) << "Removing exit stubs in " << thread_name;
393   }
394   std::deque<instrumentation::InstrumentationStackFrame>* stack = thread->GetInstrumentationStack();
395   if (stack->size() > 0) {
396     Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
397     uintptr_t instrumentation_exit_pc =
398         reinterpret_cast<uintptr_t>(GetQuickInstrumentationExitPc());
399     RestoreStackVisitor visitor(thread, instrumentation_exit_pc, instrumentation);
400     visitor.WalkStack(true);
401     CHECK_EQ(visitor.frames_removed_, stack->size());
402     while (stack->size() > 0) {
403       stack->pop_front();
404     }
405   }
406 }
407 
HasEvent(Instrumentation::InstrumentationEvent expected,uint32_t events)408 static bool HasEvent(Instrumentation::InstrumentationEvent expected, uint32_t events) {
409   return (events & expected) != 0;
410 }
411 
PotentiallyAddListenerTo(Instrumentation::InstrumentationEvent event,uint32_t events,std::list<InstrumentationListener * > & list,InstrumentationListener * listener,bool * has_listener)412 static void PotentiallyAddListenerTo(Instrumentation::InstrumentationEvent event,
413                                      uint32_t events,
414                                      std::list<InstrumentationListener*>& list,
415                                      InstrumentationListener* listener,
416                                      bool* has_listener)
417     REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !Locks::classlinker_classes_lock_) {
418   Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
419   if (!HasEvent(event, events)) {
420     return;
421   }
422   // If there is a free slot in the list, we insert the listener in that slot.
423   // Otherwise we add it to the end of the list.
424   auto it = std::find(list.begin(), list.end(), nullptr);
425   if (it != list.end()) {
426     *it = listener;
427   } else {
428     list.push_back(listener);
429   }
430   *has_listener = true;
431 }
432 
AddListener(InstrumentationListener * listener,uint32_t events)433 void Instrumentation::AddListener(InstrumentationListener* listener, uint32_t events) {
434   Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
435   PotentiallyAddListenerTo(kMethodEntered,
436                            events,
437                            method_entry_listeners_,
438                            listener,
439                            &have_method_entry_listeners_);
440   PotentiallyAddListenerTo(kMethodExited,
441                            events,
442                            method_exit_listeners_,
443                            listener,
444                            &have_method_exit_listeners_);
445   PotentiallyAddListenerTo(kMethodUnwind,
446                            events,
447                            method_unwind_listeners_,
448                            listener,
449                            &have_method_unwind_listeners_);
450   PotentiallyAddListenerTo(kBranch,
451                            events,
452                            branch_listeners_,
453                            listener,
454                            &have_branch_listeners_);
455   PotentiallyAddListenerTo(kInvokeVirtualOrInterface,
456                            events,
457                            invoke_virtual_or_interface_listeners_,
458                            listener,
459                            &have_invoke_virtual_or_interface_listeners_);
460   PotentiallyAddListenerTo(kDexPcMoved,
461                            events,
462                            dex_pc_listeners_,
463                            listener,
464                            &have_dex_pc_listeners_);
465   PotentiallyAddListenerTo(kFieldRead,
466                            events,
467                            field_read_listeners_,
468                            listener,
469                            &have_field_read_listeners_);
470   PotentiallyAddListenerTo(kFieldWritten,
471                            events,
472                            field_write_listeners_,
473                            listener,
474                            &have_field_write_listeners_);
475   PotentiallyAddListenerTo(kExceptionCaught,
476                            events,
477                            exception_caught_listeners_,
478                            listener,
479                            &have_exception_caught_listeners_);
480   UpdateInterpreterHandlerTable();
481 }
482 
PotentiallyRemoveListenerFrom(Instrumentation::InstrumentationEvent event,uint32_t events,std::list<InstrumentationListener * > & list,InstrumentationListener * listener,bool * has_listener)483 static void PotentiallyRemoveListenerFrom(Instrumentation::InstrumentationEvent event,
484                                           uint32_t events,
485                                           std::list<InstrumentationListener*>& list,
486                                           InstrumentationListener* listener,
487                                           bool* has_listener)
488     REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !Locks::classlinker_classes_lock_) {
489   Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
490   if (!HasEvent(event, events)) {
491     return;
492   }
493   auto it = std::find(list.begin(), list.end(), listener);
494   if (it != list.end()) {
495     // Just update the entry, do not remove from the list. Removing entries in the list
496     // is unsafe when mutators are iterating over it.
497     *it = nullptr;
498   }
499 
500   // Check if the list contains any non-null listener, and update 'has_listener'.
501   for (InstrumentationListener* l : list) {
502     if (l != nullptr) {
503       *has_listener = true;
504       return;
505     }
506   }
507   *has_listener = false;
508 }
509 
RemoveListener(InstrumentationListener * listener,uint32_t events)510 void Instrumentation::RemoveListener(InstrumentationListener* listener, uint32_t events) {
511   Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
512   PotentiallyRemoveListenerFrom(kMethodEntered,
513                                 events,
514                                 method_entry_listeners_,
515                                 listener,
516                                 &have_method_entry_listeners_);
517   PotentiallyRemoveListenerFrom(kMethodExited,
518                                 events,
519                                 method_exit_listeners_,
520                                 listener,
521                                 &have_method_exit_listeners_);
522   PotentiallyRemoveListenerFrom(kMethodUnwind,
523                                 events,
524                                 method_unwind_listeners_,
525                                 listener,
526                                 &have_method_unwind_listeners_);
527   PotentiallyRemoveListenerFrom(kBranch,
528                                 events,
529                                 branch_listeners_,
530                                 listener,
531                                 &have_branch_listeners_);
532   PotentiallyRemoveListenerFrom(kInvokeVirtualOrInterface,
533                                 events,
534                                 invoke_virtual_or_interface_listeners_,
535                                 listener,
536                                 &have_invoke_virtual_or_interface_listeners_);
537   PotentiallyRemoveListenerFrom(kDexPcMoved,
538                                 events,
539                                 dex_pc_listeners_,
540                                 listener,
541                                 &have_dex_pc_listeners_);
542   PotentiallyRemoveListenerFrom(kFieldRead,
543                                 events,
544                                 field_read_listeners_,
545                                 listener,
546                                 &have_field_read_listeners_);
547   PotentiallyRemoveListenerFrom(kFieldWritten,
548                                 events,
549                                 field_write_listeners_,
550                                 listener,
551                                 &have_field_write_listeners_);
552   PotentiallyRemoveListenerFrom(kExceptionCaught,
553                                 events,
554                                 exception_caught_listeners_,
555                                 listener,
556                                 &have_exception_caught_listeners_);
557   UpdateInterpreterHandlerTable();
558 }
559 
GetCurrentInstrumentationLevel() const560 Instrumentation::InstrumentationLevel Instrumentation::GetCurrentInstrumentationLevel() const {
561   if (interpreter_stubs_installed_) {
562     return InstrumentationLevel::kInstrumentWithInterpreter;
563   } else if (entry_exit_stubs_installed_) {
564     return InstrumentationLevel::kInstrumentWithInstrumentationStubs;
565   } else {
566     return InstrumentationLevel::kInstrumentNothing;
567   }
568 }
569 
RequiresInstrumentationInstallation(InstrumentationLevel new_level) const570 bool Instrumentation::RequiresInstrumentationInstallation(InstrumentationLevel new_level) const {
571   // We need to reinstall instrumentation if we go to a different level.
572   return GetCurrentInstrumentationLevel() != new_level;
573 }
574 
ConfigureStubs(const char * key,InstrumentationLevel desired_level)575 void Instrumentation::ConfigureStubs(const char* key, InstrumentationLevel desired_level) {
576   // Store the instrumentation level for this key or remove it.
577   if (desired_level == InstrumentationLevel::kInstrumentNothing) {
578     // The client no longer needs instrumentation.
579     requested_instrumentation_levels_.erase(key);
580   } else {
581     // The client needs instrumentation.
582     requested_instrumentation_levels_.Overwrite(key, desired_level);
583   }
584 
585   // Look for the highest required instrumentation level.
586   InstrumentationLevel requested_level = InstrumentationLevel::kInstrumentNothing;
587   for (const auto& v : requested_instrumentation_levels_) {
588     requested_level = std::max(requested_level, v.second);
589   }
590 
591   interpret_only_ = (requested_level == InstrumentationLevel::kInstrumentWithInterpreter) ||
592                     forced_interpret_only_;
593 
594   if (!RequiresInstrumentationInstallation(requested_level)) {
595     // We're already set.
596     return;
597   }
598   Thread* const self = Thread::Current();
599   Runtime* runtime = Runtime::Current();
600   Locks::mutator_lock_->AssertExclusiveHeld(self);
601   Locks::thread_list_lock_->AssertNotHeld(self);
602   if (requested_level > InstrumentationLevel::kInstrumentNothing) {
603     if (requested_level == InstrumentationLevel::kInstrumentWithInterpreter) {
604       interpreter_stubs_installed_ = true;
605       entry_exit_stubs_installed_ = true;
606     } else {
607       CHECK_EQ(requested_level, InstrumentationLevel::kInstrumentWithInstrumentationStubs);
608       entry_exit_stubs_installed_ = true;
609       interpreter_stubs_installed_ = false;
610     }
611     InstallStubsClassVisitor visitor(this);
612     runtime->GetClassLinker()->VisitClasses(&visitor);
613     instrumentation_stubs_installed_ = true;
614     MutexLock mu(self, *Locks::thread_list_lock_);
615     runtime->GetThreadList()->ForEach(InstrumentationInstallStack, this);
616   } else {
617     interpreter_stubs_installed_ = false;
618     entry_exit_stubs_installed_ = false;
619     InstallStubsClassVisitor visitor(this);
620     runtime->GetClassLinker()->VisitClasses(&visitor);
621     // Restore stack only if there is no method currently deoptimized.
622     bool empty;
623     {
624       ReaderMutexLock mu(self, deoptimized_methods_lock_);
625       empty = IsDeoptimizedMethodsEmpty();  // Avoid lock violation.
626     }
627     if (empty) {
628       MutexLock mu(self, *Locks::thread_list_lock_);
629       Runtime::Current()->GetThreadList()->ForEach(InstrumentationRestoreStack, this);
630       // Only do this after restoring, as walking the stack when restoring will see
631       // the instrumentation exit pc.
632       instrumentation_stubs_installed_ = false;
633     }
634   }
635 }
636 
ResetQuickAllocEntryPointsForThread(Thread * thread,void * arg ATTRIBUTE_UNUSED)637 static void ResetQuickAllocEntryPointsForThread(Thread* thread, void* arg ATTRIBUTE_UNUSED) {
638   thread->ResetQuickAllocEntryPointsForThread(kUseReadBarrier && thread->GetIsGcMarking());
639 }
640 
SetEntrypointsInstrumented(bool instrumented)641 void Instrumentation::SetEntrypointsInstrumented(bool instrumented) {
642   Thread* self = Thread::Current();
643   Runtime* runtime = Runtime::Current();
644   Locks::mutator_lock_->AssertNotHeld(self);
645   Locks::instrument_entrypoints_lock_->AssertHeld(self);
646   if (runtime->IsStarted()) {
647     ScopedSuspendAll ssa(__FUNCTION__);
648     MutexLock mu(self, *Locks::runtime_shutdown_lock_);
649     SetQuickAllocEntryPointsInstrumented(instrumented);
650     ResetQuickAllocEntryPoints();
651     alloc_entrypoints_instrumented_ = instrumented;
652   } else {
653     MutexLock mu(self, *Locks::runtime_shutdown_lock_);
654     SetQuickAllocEntryPointsInstrumented(instrumented);
655 
656     // Note: ResetQuickAllocEntryPoints only works when the runtime is started. Manually run the
657     //       update for just this thread.
658     // Note: self may be null. One of those paths is setting instrumentation in the Heap
659     //       constructor for gcstress mode.
660     if (self != nullptr) {
661       ResetQuickAllocEntryPointsForThread(self, nullptr);
662     }
663 
664     alloc_entrypoints_instrumented_ = instrumented;
665   }
666 }
667 
InstrumentQuickAllocEntryPoints()668 void Instrumentation::InstrumentQuickAllocEntryPoints() {
669   MutexLock mu(Thread::Current(), *Locks::instrument_entrypoints_lock_);
670   InstrumentQuickAllocEntryPointsLocked();
671 }
672 
UninstrumentQuickAllocEntryPoints()673 void Instrumentation::UninstrumentQuickAllocEntryPoints() {
674   MutexLock mu(Thread::Current(), *Locks::instrument_entrypoints_lock_);
675   UninstrumentQuickAllocEntryPointsLocked();
676 }
677 
InstrumentQuickAllocEntryPointsLocked()678 void Instrumentation::InstrumentQuickAllocEntryPointsLocked() {
679   Locks::instrument_entrypoints_lock_->AssertHeld(Thread::Current());
680   if (quick_alloc_entry_points_instrumentation_counter_ == 0) {
681     SetEntrypointsInstrumented(true);
682   }
683   ++quick_alloc_entry_points_instrumentation_counter_;
684 }
685 
UninstrumentQuickAllocEntryPointsLocked()686 void Instrumentation::UninstrumentQuickAllocEntryPointsLocked() {
687   Locks::instrument_entrypoints_lock_->AssertHeld(Thread::Current());
688   CHECK_GT(quick_alloc_entry_points_instrumentation_counter_, 0U);
689   --quick_alloc_entry_points_instrumentation_counter_;
690   if (quick_alloc_entry_points_instrumentation_counter_ == 0) {
691     SetEntrypointsInstrumented(false);
692   }
693 }
694 
ResetQuickAllocEntryPoints()695 void Instrumentation::ResetQuickAllocEntryPoints() {
696   Runtime* runtime = Runtime::Current();
697   if (runtime->IsStarted()) {
698     MutexLock mu(Thread::Current(), *Locks::thread_list_lock_);
699     runtime->GetThreadList()->ForEach(ResetQuickAllocEntryPointsForThread, nullptr);
700   }
701 }
702 
UpdateMethodsCodeImpl(ArtMethod * method,const void * quick_code)703 void Instrumentation::UpdateMethodsCodeImpl(ArtMethod* method, const void* quick_code) {
704   const void* new_quick_code;
705   if (LIKELY(!instrumentation_stubs_installed_)) {
706     new_quick_code = quick_code;
707   } else {
708     if ((interpreter_stubs_installed_ || IsDeoptimized(method)) && !method->IsNative()) {
709       new_quick_code = GetQuickToInterpreterBridge();
710     } else {
711       ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
712       if (class_linker->IsQuickResolutionStub(quick_code) ||
713           class_linker->IsQuickToInterpreterBridge(quick_code)) {
714         new_quick_code = quick_code;
715       } else if (entry_exit_stubs_installed_) {
716         new_quick_code = GetQuickInstrumentationEntryPoint();
717       } else {
718         new_quick_code = quick_code;
719       }
720     }
721   }
722   UpdateEntrypoints(method, new_quick_code);
723 }
724 
UpdateMethodsCode(ArtMethod * method,const void * quick_code)725 void Instrumentation::UpdateMethodsCode(ArtMethod* method, const void* quick_code) {
726   DCHECK(method->GetDeclaringClass()->IsResolved());
727   UpdateMethodsCodeImpl(method, quick_code);
728 }
729 
UpdateMethodsCodeForJavaDebuggable(ArtMethod * method,const void * quick_code)730 void Instrumentation::UpdateMethodsCodeForJavaDebuggable(ArtMethod* method,
731                                                          const void* quick_code) {
732   // When the runtime is set to Java debuggable, we may update the entry points of
733   // all methods of a class to the interpreter bridge. A method's declaring class
734   // might not be in resolved state yet in that case, so we bypass the DCHECK in
735   // UpdateMethodsCode.
736   UpdateMethodsCodeImpl(method, quick_code);
737 }
738 
AddDeoptimizedMethod(ArtMethod * method)739 bool Instrumentation::AddDeoptimizedMethod(ArtMethod* method) {
740   if (IsDeoptimizedMethod(method)) {
741     // Already in the map. Return.
742     return false;
743   }
744   // Not found. Add it.
745   deoptimized_methods_.insert(method);
746   return true;
747 }
748 
IsDeoptimizedMethod(ArtMethod * method)749 bool Instrumentation::IsDeoptimizedMethod(ArtMethod* method) {
750   return deoptimized_methods_.find(method) != deoptimized_methods_.end();
751 }
752 
BeginDeoptimizedMethod()753 ArtMethod* Instrumentation::BeginDeoptimizedMethod() {
754   if (deoptimized_methods_.empty()) {
755     // Empty.
756     return nullptr;
757   }
758   return *deoptimized_methods_.begin();
759 }
760 
RemoveDeoptimizedMethod(ArtMethod * method)761 bool Instrumentation::RemoveDeoptimizedMethod(ArtMethod* method) {
762   auto it = deoptimized_methods_.find(method);
763   if (it == deoptimized_methods_.end()) {
764     return false;
765   }
766   deoptimized_methods_.erase(it);
767   return true;
768 }
769 
IsDeoptimizedMethodsEmpty() const770 bool Instrumentation::IsDeoptimizedMethodsEmpty() const {
771   return deoptimized_methods_.empty();
772 }
773 
Deoptimize(ArtMethod * method)774 void Instrumentation::Deoptimize(ArtMethod* method) {
775   CHECK(!method->IsNative());
776   CHECK(!method->IsProxyMethod());
777   CHECK(method->IsInvokable());
778 
779   Thread* self = Thread::Current();
780   {
781     WriterMutexLock mu(self, deoptimized_methods_lock_);
782     bool has_not_been_deoptimized = AddDeoptimizedMethod(method);
783     CHECK(has_not_been_deoptimized) << "Method " << ArtMethod::PrettyMethod(method)
784         << " is already deoptimized";
785   }
786   if (!interpreter_stubs_installed_) {
787     UpdateEntrypoints(method, GetQuickInstrumentationEntryPoint());
788 
789     // Install instrumentation exit stub and instrumentation frames. We may already have installed
790     // these previously so it will only cover the newly created frames.
791     instrumentation_stubs_installed_ = true;
792     MutexLock mu(self, *Locks::thread_list_lock_);
793     Runtime::Current()->GetThreadList()->ForEach(InstrumentationInstallStack, this);
794   }
795 }
796 
Undeoptimize(ArtMethod * method)797 void Instrumentation::Undeoptimize(ArtMethod* method) {
798   CHECK(!method->IsNative());
799   CHECK(!method->IsProxyMethod());
800   CHECK(method->IsInvokable());
801 
802   Thread* self = Thread::Current();
803   bool empty;
804   {
805     WriterMutexLock mu(self, deoptimized_methods_lock_);
806     bool found_and_erased = RemoveDeoptimizedMethod(method);
807     CHECK(found_and_erased) << "Method " << ArtMethod::PrettyMethod(method)
808         << " is not deoptimized";
809     empty = IsDeoptimizedMethodsEmpty();
810   }
811 
812   // Restore code and possibly stack only if we did not deoptimize everything.
813   if (!interpreter_stubs_installed_) {
814     // Restore its code or resolution trampoline.
815     ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
816     if (method->IsStatic() && !method->IsConstructor() &&
817         !method->GetDeclaringClass()->IsInitialized()) {
818       UpdateEntrypoints(method, GetQuickResolutionStub());
819     } else {
820       const void* quick_code = NeedDebugVersionFor(method)
821           ? GetQuickToInterpreterBridge()
822           : class_linker->GetQuickOatCodeFor(method);
823       UpdateEntrypoints(method, quick_code);
824     }
825 
826     // If there is no deoptimized method left, we can restore the stack of each thread.
827     if (empty) {
828       MutexLock mu(self, *Locks::thread_list_lock_);
829       Runtime::Current()->GetThreadList()->ForEach(InstrumentationRestoreStack, this);
830       instrumentation_stubs_installed_ = false;
831     }
832   }
833 }
834 
IsDeoptimized(ArtMethod * method)835 bool Instrumentation::IsDeoptimized(ArtMethod* method) {
836   DCHECK(method != nullptr);
837   ReaderMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
838   return IsDeoptimizedMethod(method);
839 }
840 
EnableDeoptimization()841 void Instrumentation::EnableDeoptimization() {
842   ReaderMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
843   CHECK(IsDeoptimizedMethodsEmpty());
844   CHECK_EQ(deoptimization_enabled_, false);
845   deoptimization_enabled_ = true;
846 }
847 
DisableDeoptimization(const char * key)848 void Instrumentation::DisableDeoptimization(const char* key) {
849   CHECK_EQ(deoptimization_enabled_, true);
850   // If we deoptimized everything, undo it.
851   InstrumentationLevel level = GetCurrentInstrumentationLevel();
852   if (level == InstrumentationLevel::kInstrumentWithInterpreter) {
853     UndeoptimizeEverything(key);
854   }
855   // Undeoptimized selected methods.
856   while (true) {
857     ArtMethod* method;
858     {
859       ReaderMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
860       if (IsDeoptimizedMethodsEmpty()) {
861         break;
862       }
863       method = BeginDeoptimizedMethod();
864       CHECK(method != nullptr);
865     }
866     Undeoptimize(method);
867   }
868   deoptimization_enabled_ = false;
869 }
870 
871 // Indicates if instrumentation should notify method enter/exit events to the listeners.
ShouldNotifyMethodEnterExitEvents() const872 bool Instrumentation::ShouldNotifyMethodEnterExitEvents() const {
873   if (!HasMethodEntryListeners() && !HasMethodExitListeners()) {
874     return false;
875   }
876   return !deoptimization_enabled_ && !interpreter_stubs_installed_;
877 }
878 
DeoptimizeEverything(const char * key)879 void Instrumentation::DeoptimizeEverything(const char* key) {
880   CHECK(deoptimization_enabled_);
881   ConfigureStubs(key, InstrumentationLevel::kInstrumentWithInterpreter);
882 }
883 
UndeoptimizeEverything(const char * key)884 void Instrumentation::UndeoptimizeEverything(const char* key) {
885   CHECK(interpreter_stubs_installed_);
886   CHECK(deoptimization_enabled_);
887   ConfigureStubs(key, InstrumentationLevel::kInstrumentNothing);
888 }
889 
EnableMethodTracing(const char * key,bool needs_interpreter)890 void Instrumentation::EnableMethodTracing(const char* key, bool needs_interpreter) {
891   InstrumentationLevel level;
892   if (needs_interpreter) {
893     level = InstrumentationLevel::kInstrumentWithInterpreter;
894   } else {
895     level = InstrumentationLevel::kInstrumentWithInstrumentationStubs;
896   }
897   ConfigureStubs(key, level);
898 }
899 
DisableMethodTracing(const char * key)900 void Instrumentation::DisableMethodTracing(const char* key) {
901   ConfigureStubs(key, InstrumentationLevel::kInstrumentNothing);
902 }
903 
GetQuickCodeFor(ArtMethod * method,PointerSize pointer_size) const904 const void* Instrumentation::GetQuickCodeFor(ArtMethod* method, PointerSize pointer_size) const {
905   ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
906   if (LIKELY(!instrumentation_stubs_installed_)) {
907     const void* code = method->GetEntryPointFromQuickCompiledCodePtrSize(pointer_size);
908     DCHECK(code != nullptr);
909     if (LIKELY(!class_linker->IsQuickResolutionStub(code) &&
910                !class_linker->IsQuickToInterpreterBridge(code)) &&
911                !class_linker->IsQuickResolutionStub(code) &&
912                !class_linker->IsQuickToInterpreterBridge(code)) {
913       return code;
914     }
915   }
916   return class_linker->GetQuickOatCodeFor(method);
917 }
918 
MethodEnterEventImpl(Thread * thread,mirror::Object * this_object,ArtMethod * method,uint32_t dex_pc) const919 void Instrumentation::MethodEnterEventImpl(Thread* thread, mirror::Object* this_object,
920                                            ArtMethod* method,
921                                            uint32_t dex_pc) const {
922   if (HasMethodEntryListeners()) {
923     for (InstrumentationListener* listener : method_entry_listeners_) {
924       if (listener != nullptr) {
925         listener->MethodEntered(thread, this_object, method, dex_pc);
926       }
927     }
928   }
929 }
930 
MethodExitEventImpl(Thread * thread,mirror::Object * this_object,ArtMethod * method,uint32_t dex_pc,const JValue & return_value) const931 void Instrumentation::MethodExitEventImpl(Thread* thread, mirror::Object* this_object,
932                                           ArtMethod* method,
933                                           uint32_t dex_pc, const JValue& return_value) const {
934   if (HasMethodExitListeners()) {
935     for (InstrumentationListener* listener : method_exit_listeners_) {
936       if (listener != nullptr) {
937         listener->MethodExited(thread, this_object, method, dex_pc, return_value);
938       }
939     }
940   }
941 }
942 
MethodUnwindEvent(Thread * thread,mirror::Object * this_object,ArtMethod * method,uint32_t dex_pc) const943 void Instrumentation::MethodUnwindEvent(Thread* thread, mirror::Object* this_object,
944                                         ArtMethod* method,
945                                         uint32_t dex_pc) const {
946   if (HasMethodUnwindListeners()) {
947     for (InstrumentationListener* listener : method_unwind_listeners_) {
948       if (listener != nullptr) {
949         listener->MethodUnwind(thread, this_object, method, dex_pc);
950       }
951     }
952   }
953 }
954 
DexPcMovedEventImpl(Thread * thread,mirror::Object * this_object,ArtMethod * method,uint32_t dex_pc) const955 void Instrumentation::DexPcMovedEventImpl(Thread* thread, mirror::Object* this_object,
956                                           ArtMethod* method,
957                                           uint32_t dex_pc) const {
958   for (InstrumentationListener* listener : dex_pc_listeners_) {
959     if (listener != nullptr) {
960       listener->DexPcMoved(thread, this_object, method, dex_pc);
961     }
962   }
963 }
964 
BranchImpl(Thread * thread,ArtMethod * method,uint32_t dex_pc,int32_t offset) const965 void Instrumentation::BranchImpl(Thread* thread,
966                                  ArtMethod* method,
967                                  uint32_t dex_pc,
968                                  int32_t offset) const {
969   for (InstrumentationListener* listener : branch_listeners_) {
970     if (listener != nullptr) {
971       listener->Branch(thread, method, dex_pc, offset);
972     }
973   }
974 }
975 
InvokeVirtualOrInterfaceImpl(Thread * thread,mirror::Object * this_object,ArtMethod * caller,uint32_t dex_pc,ArtMethod * callee) const976 void Instrumentation::InvokeVirtualOrInterfaceImpl(Thread* thread,
977                                                    mirror::Object* this_object,
978                                                    ArtMethod* caller,
979                                                    uint32_t dex_pc,
980                                                    ArtMethod* callee) const {
981   // We cannot have thread suspension since that would cause the this_object parameter to
982   // potentially become a dangling pointer. An alternative could be to put it in a handle instead.
983   ScopedAssertNoThreadSuspension ants(__FUNCTION__);
984   for (InstrumentationListener* listener : invoke_virtual_or_interface_listeners_) {
985     if (listener != nullptr) {
986       listener->InvokeVirtualOrInterface(thread, this_object, caller, dex_pc, callee);
987     }
988   }
989 }
990 
FieldReadEventImpl(Thread * thread,mirror::Object * this_object,ArtMethod * method,uint32_t dex_pc,ArtField * field) const991 void Instrumentation::FieldReadEventImpl(Thread* thread, mirror::Object* this_object,
992                                          ArtMethod* method, uint32_t dex_pc,
993                                          ArtField* field) const {
994   for (InstrumentationListener* listener : field_read_listeners_) {
995     if (listener != nullptr) {
996       listener->FieldRead(thread, this_object, method, dex_pc, field);
997     }
998   }
999 }
1000 
FieldWriteEventImpl(Thread * thread,mirror::Object * this_object,ArtMethod * method,uint32_t dex_pc,ArtField * field,const JValue & field_value) const1001 void Instrumentation::FieldWriteEventImpl(Thread* thread, mirror::Object* this_object,
1002                                          ArtMethod* method, uint32_t dex_pc,
1003                                          ArtField* field, const JValue& field_value) const {
1004   for (InstrumentationListener* listener : field_write_listeners_) {
1005     if (listener != nullptr) {
1006       listener->FieldWritten(thread, this_object, method, dex_pc, field, field_value);
1007     }
1008   }
1009 }
1010 
ExceptionCaughtEvent(Thread * thread,mirror::Throwable * exception_object) const1011 void Instrumentation::ExceptionCaughtEvent(Thread* thread,
1012                                            mirror::Throwable* exception_object) const {
1013   Thread* self = Thread::Current();
1014   StackHandleScope<1> hs(self);
1015   Handle<mirror::Throwable> h_exception(hs.NewHandle(exception_object));
1016   if (HasExceptionCaughtListeners()) {
1017     DCHECK_EQ(thread->GetException(), h_exception.Get());
1018     thread->ClearException();
1019     for (InstrumentationListener* listener : exception_caught_listeners_) {
1020       if (listener != nullptr) {
1021         listener->ExceptionCaught(thread, h_exception.Get());
1022       }
1023     }
1024     thread->SetException(h_exception.Get());
1025   }
1026 }
1027 
1028 // Computes a frame ID by ignoring inlined frames.
ComputeFrameId(Thread * self,size_t frame_depth,size_t inlined_frames_before_frame)1029 size_t Instrumentation::ComputeFrameId(Thread* self,
1030                                        size_t frame_depth,
1031                                        size_t inlined_frames_before_frame) {
1032   CHECK_GE(frame_depth, inlined_frames_before_frame);
1033   size_t no_inline_depth = frame_depth - inlined_frames_before_frame;
1034   return StackVisitor::ComputeNumFrames(self, kInstrumentationStackWalk) - no_inline_depth;
1035 }
1036 
CheckStackDepth(Thread * self,const InstrumentationStackFrame & instrumentation_frame,int delta)1037 static void CheckStackDepth(Thread* self, const InstrumentationStackFrame& instrumentation_frame,
1038                             int delta)
1039     REQUIRES_SHARED(Locks::mutator_lock_) {
1040   size_t frame_id = StackVisitor::ComputeNumFrames(self, kInstrumentationStackWalk) + delta;
1041   if (frame_id != instrumentation_frame.frame_id_) {
1042     LOG(ERROR) << "Expected frame_id=" << frame_id << " but found "
1043         << instrumentation_frame.frame_id_;
1044     StackVisitor::DescribeStack(self);
1045     CHECK_EQ(frame_id, instrumentation_frame.frame_id_);
1046   }
1047 }
1048 
PushInstrumentationStackFrame(Thread * self,mirror::Object * this_object,ArtMethod * method,uintptr_t lr,bool interpreter_entry)1049 void Instrumentation::PushInstrumentationStackFrame(Thread* self, mirror::Object* this_object,
1050                                                     ArtMethod* method,
1051                                                     uintptr_t lr, bool interpreter_entry) {
1052   // We have a callee-save frame meaning this value is guaranteed to never be 0.
1053   size_t frame_id = StackVisitor::ComputeNumFrames(self, kInstrumentationStackWalk);
1054   std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
1055   if (kVerboseInstrumentation) {
1056     LOG(INFO) << "Entering " << ArtMethod::PrettyMethod(method) << " from PC "
1057               << reinterpret_cast<void*>(lr);
1058   }
1059   instrumentation::InstrumentationStackFrame instrumentation_frame(this_object, method, lr,
1060                                                                    frame_id, interpreter_entry);
1061   stack->push_front(instrumentation_frame);
1062 
1063   if (!interpreter_entry) {
1064     MethodEnterEvent(self, this_object, method, 0);
1065   }
1066 }
1067 
PopInstrumentationStackFrame(Thread * self,uintptr_t * return_pc,uint64_t gpr_result,uint64_t fpr_result)1068 TwoWordReturn Instrumentation::PopInstrumentationStackFrame(Thread* self, uintptr_t* return_pc,
1069                                                             uint64_t gpr_result,
1070                                                             uint64_t fpr_result) {
1071   // Do the pop.
1072   std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
1073   CHECK_GT(stack->size(), 0U);
1074   InstrumentationStackFrame instrumentation_frame = stack->front();
1075   stack->pop_front();
1076 
1077   // Set return PC and check the sanity of the stack.
1078   *return_pc = instrumentation_frame.return_pc_;
1079   CheckStackDepth(self, instrumentation_frame, 0);
1080   self->VerifyStack();
1081 
1082   ArtMethod* method = instrumentation_frame.method_;
1083   uint32_t length;
1084   const PointerSize pointer_size = Runtime::Current()->GetClassLinker()->GetImagePointerSize();
1085   char return_shorty = method->GetInterfaceMethodIfProxy(pointer_size)->GetShorty(&length)[0];
1086   JValue return_value;
1087   if (return_shorty == 'V') {
1088     return_value.SetJ(0);
1089   } else if (return_shorty == 'F' || return_shorty == 'D') {
1090     return_value.SetJ(fpr_result);
1091   } else {
1092     return_value.SetJ(gpr_result);
1093   }
1094   // TODO: improve the dex pc information here, requires knowledge of current PC as opposed to
1095   //       return_pc.
1096   uint32_t dex_pc = DexFile::kDexNoIndex;
1097   mirror::Object* this_object = instrumentation_frame.this_object_;
1098   if (!instrumentation_frame.interpreter_entry_) {
1099     MethodExitEvent(self, this_object, instrumentation_frame.method_, dex_pc, return_value);
1100   }
1101 
1102   // Deoptimize if the caller needs to continue execution in the interpreter. Do nothing if we get
1103   // back to an upcall.
1104   NthCallerVisitor visitor(self, 1, true);
1105   visitor.WalkStack(true);
1106   bool deoptimize = (visitor.caller != nullptr) &&
1107                     (interpreter_stubs_installed_ || IsDeoptimized(visitor.caller) ||
1108                     Dbg::IsForcedInterpreterNeededForUpcall(self, visitor.caller));
1109   if (deoptimize && Runtime::Current()->IsAsyncDeoptimizeable(*return_pc)) {
1110     if (kVerboseInstrumentation) {
1111       LOG(INFO) << "Deoptimizing "
1112                 << visitor.caller->PrettyMethod()
1113                 << " by returning from "
1114                 << method->PrettyMethod()
1115                 << " with result "
1116                 << std::hex << return_value.GetJ() << std::dec
1117                 << " in "
1118                 << *self;
1119     }
1120     self->PushDeoptimizationContext(return_value,
1121                                     return_shorty == 'L',
1122                                     false /* from_code */,
1123                                     nullptr /* no pending exception */);
1124     return GetTwoWordSuccessValue(*return_pc,
1125                                   reinterpret_cast<uintptr_t>(GetQuickDeoptimizationEntryPoint()));
1126   } else {
1127     if (deoptimize && !Runtime::Current()->IsAsyncDeoptimizeable(*return_pc)) {
1128       LOG(WARNING) << "Got a deoptimization request on un-deoptimizable " << method->PrettyMethod()
1129                    << " at PC " << reinterpret_cast<void*>(*return_pc);
1130     }
1131     if (kVerboseInstrumentation) {
1132       LOG(INFO) << "Returning from " << method->PrettyMethod()
1133                 << " to PC " << reinterpret_cast<void*>(*return_pc);
1134     }
1135     return GetTwoWordSuccessValue(0, *return_pc);
1136   }
1137 }
1138 
PopMethodForUnwind(Thread * self,bool is_deoptimization) const1139 uintptr_t Instrumentation::PopMethodForUnwind(Thread* self, bool is_deoptimization) const {
1140   // Do the pop.
1141   std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
1142   CHECK_GT(stack->size(), 0U);
1143   InstrumentationStackFrame instrumentation_frame = stack->front();
1144   // TODO: bring back CheckStackDepth(self, instrumentation_frame, 2);
1145   stack->pop_front();
1146 
1147   ArtMethod* method = instrumentation_frame.method_;
1148   if (is_deoptimization) {
1149     if (kVerboseInstrumentation) {
1150       LOG(INFO) << "Popping for deoptimization " << ArtMethod::PrettyMethod(method);
1151     }
1152   } else {
1153     if (kVerboseInstrumentation) {
1154       LOG(INFO) << "Popping for unwind " << ArtMethod::PrettyMethod(method);
1155     }
1156 
1157     // Notify listeners of method unwind.
1158     // TODO: improve the dex pc information here, requires knowledge of current PC as opposed to
1159     //       return_pc.
1160     uint32_t dex_pc = DexFile::kDexNoIndex;
1161     MethodUnwindEvent(self, instrumentation_frame.this_object_, method, dex_pc);
1162   }
1163   return instrumentation_frame.return_pc_;
1164 }
1165 
Dump() const1166 std::string InstrumentationStackFrame::Dump() const {
1167   std::ostringstream os;
1168   os << "Frame " << frame_id_ << " " << ArtMethod::PrettyMethod(method_) << ":"
1169       << reinterpret_cast<void*>(return_pc_) << " this=" << reinterpret_cast<void*>(this_object_);
1170   return os.str();
1171 }
1172 
1173 }  // namespace instrumentation
1174 }  // namespace art
1175