1 /*
2  * Copyright (C) 2011 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ART_RUNTIME_INSTRUMENTATION_H_
18 #define ART_RUNTIME_INSTRUMENTATION_H_
19 
20 #include <stdint.h>
21 #include <list>
22 #include <unordered_set>
23 
24 #include "arch/instruction_set.h"
25 #include "base/macros.h"
26 #include "base/mutex.h"
27 #include "gc_root.h"
28 #include "safe_map.h"
29 
30 namespace art {
31 namespace mirror {
32   class Class;
33   class Object;
34   class Throwable;
35 }  // namespace mirror
36 class ArtField;
37 class ArtMethod;
38 union JValue;
39 class Thread;
40 
41 namespace instrumentation {
42 
43 // Interpreter handler tables.
44 enum InterpreterHandlerTable {
45   kMainHandlerTable = 0,          // Main handler table: no suspend check, no instrumentation.
46   kAlternativeHandlerTable = 1,   // Alternative handler table: suspend check and/or instrumentation
47                                   // enabled.
48   kNumHandlerTables
49 };
50 
51 // Do we want to deoptimize for method entry and exit listeners or just try to intercept
52 // invocations? Deoptimization forces all code to run in the interpreter and considerably hurts the
53 // application's performance.
54 static constexpr bool kDeoptimizeForAccurateMethodEntryExitListeners = true;
55 
56 // Instrumentation event listener API. Registered listeners will get the appropriate call back for
57 // the events they are listening for. The call backs supply the thread, method and dex_pc the event
58 // occurred upon. The thread may or may not be Thread::Current().
59 struct InstrumentationListener {
InstrumentationListenerInstrumentationListener60   InstrumentationListener() {}
~InstrumentationListenerInstrumentationListener61   virtual ~InstrumentationListener() {}
62 
63   // Call-back for when a method is entered.
64   virtual void MethodEntered(Thread* thread, mirror::Object* this_object,
65                              ArtMethod* method,
66                              uint32_t dex_pc) SHARED_REQUIRES(Locks::mutator_lock_) = 0;
67 
68   // Call-back for when a method is exited.
69   virtual void MethodExited(Thread* thread, mirror::Object* this_object,
70                             ArtMethod* method, uint32_t dex_pc,
71                             const JValue& return_value)
72       SHARED_REQUIRES(Locks::mutator_lock_) = 0;
73 
74   // Call-back for when a method is popped due to an exception throw. A method will either cause a
75   // MethodExited call-back or a MethodUnwind call-back when its activation is removed.
76   virtual void MethodUnwind(Thread* thread, mirror::Object* this_object,
77                             ArtMethod* method, uint32_t dex_pc)
78       SHARED_REQUIRES(Locks::mutator_lock_) = 0;
79 
80   // Call-back for when the dex pc moves in a method.
81   virtual void DexPcMoved(Thread* thread, mirror::Object* this_object,
82                           ArtMethod* method, uint32_t new_dex_pc)
83       SHARED_REQUIRES(Locks::mutator_lock_) = 0;
84 
85   // Call-back for when we read from a field.
86   virtual void FieldRead(Thread* thread, mirror::Object* this_object, ArtMethod* method,
87                          uint32_t dex_pc, ArtField* field) = 0;
88 
89   // Call-back for when we write into a field.
90   virtual void FieldWritten(Thread* thread, mirror::Object* this_object, ArtMethod* method,
91                             uint32_t dex_pc, ArtField* field, const JValue& field_value) = 0;
92 
93   // Call-back when an exception is caught.
94   virtual void ExceptionCaught(Thread* thread, mirror::Throwable* exception_object)
95       SHARED_REQUIRES(Locks::mutator_lock_) = 0;
96 
97   // Call-back for when we execute a branch.
98   virtual void Branch(Thread* thread,
99                       ArtMethod* method,
100                       uint32_t dex_pc,
101                       int32_t dex_pc_offset)
102       SHARED_REQUIRES(Locks::mutator_lock_) = 0;
103 
104   // Call-back for when we get an invokevirtual or an invokeinterface.
105   virtual void InvokeVirtualOrInterface(Thread* thread,
106                                         mirror::Object* this_object,
107                                         ArtMethod* caller,
108                                         uint32_t dex_pc,
109                                         ArtMethod* callee)
110       REQUIRES(Roles::uninterruptible_)
111       SHARED_REQUIRES(Locks::mutator_lock_) = 0;
112 };
113 
114 // Instrumentation is a catch-all for when extra information is required from the runtime. The
115 // typical use for instrumentation is for profiling and debugging. Instrumentation may add stubs
116 // to method entry and exit, it may also force execution to be switched to the interpreter and
117 // trigger deoptimization.
118 class Instrumentation {
119  public:
120   enum InstrumentationEvent {
121     kMethodEntered = 0x1,
122     kMethodExited = 0x2,
123     kMethodUnwind = 0x4,
124     kDexPcMoved = 0x8,
125     kFieldRead = 0x10,
126     kFieldWritten = 0x20,
127     kExceptionCaught = 0x40,
128     kBranch = 0x80,
129     kInvokeVirtualOrInterface = 0x100,
130   };
131 
132   enum class InstrumentationLevel {
133     kInstrumentNothing,                   // execute without instrumentation
134     kInstrumentWithInstrumentationStubs,  // execute with instrumentation entry/exit stubs
135     kInstrumentWithInterpreter            // execute with interpreter
136   };
137 
138   Instrumentation();
139 
140   // Add a listener to be notified of the masked together sent of instrumentation events. This
141   // suspend the runtime to install stubs. You are expected to hold the mutator lock as a proxy
142   // for saying you should have suspended all threads (installing stubs while threads are running
143   // will break).
144   void AddListener(InstrumentationListener* listener, uint32_t events)
145       REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !Locks::classlinker_classes_lock_);
146 
147   // Removes a listener possibly removing instrumentation stubs.
148   void RemoveListener(InstrumentationListener* listener, uint32_t events)
149       REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !Locks::classlinker_classes_lock_);
150 
151   // Deoptimization.
152   void EnableDeoptimization()
153       REQUIRES(Locks::mutator_lock_)
154       REQUIRES(!deoptimized_methods_lock_);
155   // Calls UndeoptimizeEverything which may visit class linker classes through ConfigureStubs.
156   void DisableDeoptimization(const char* key)
157       REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
158       REQUIRES(!deoptimized_methods_lock_);
159 
AreAllMethodsDeoptimized()160   bool AreAllMethodsDeoptimized() const {
161     return interpreter_stubs_installed_;
162   }
163   bool ShouldNotifyMethodEnterExitEvents() const SHARED_REQUIRES(Locks::mutator_lock_);
164 
165   // Executes everything with interpreter.
166   void DeoptimizeEverything(const char* key)
167       REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
168       REQUIRES(!Locks::thread_list_lock_,
169                !Locks::classlinker_classes_lock_,
170                !deoptimized_methods_lock_);
171 
172   // Executes everything with compiled code (or interpreter if there is no code). May visit class
173   // linker classes through ConfigureStubs.
174   void UndeoptimizeEverything(const char* key)
175       REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
176       REQUIRES(!Locks::thread_list_lock_,
177                !Locks::classlinker_classes_lock_,
178                !deoptimized_methods_lock_);
179 
180   // Deoptimize a method by forcing its execution with the interpreter. Nevertheless, a static
181   // method (except a class initializer) set to the resolution trampoline will be deoptimized only
182   // once its declaring class is initialized.
183   void Deoptimize(ArtMethod* method)
184       REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !deoptimized_methods_lock_);
185 
186   // Undeoptimze the method by restoring its entrypoints. Nevertheless, a static method
187   // (except a class initializer) set to the resolution trampoline will be updated only once its
188   // declaring class is initialized.
189   void Undeoptimize(ArtMethod* method)
190       REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !deoptimized_methods_lock_);
191 
192   // Indicates whether the method has been deoptimized so it is executed with the interpreter.
193   bool IsDeoptimized(ArtMethod* method)
194       REQUIRES(!deoptimized_methods_lock_) SHARED_REQUIRES(Locks::mutator_lock_);
195 
196   // Enable method tracing by installing instrumentation entry/exit stubs or interpreter.
197   void EnableMethodTracing(const char* key,
198                            bool needs_interpreter = kDeoptimizeForAccurateMethodEntryExitListeners)
199       REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
200       REQUIRES(!Locks::thread_list_lock_,
201                !Locks::classlinker_classes_lock_,
202                !deoptimized_methods_lock_);
203 
204   // Disable method tracing by uninstalling instrumentation entry/exit stubs or interpreter.
205   void DisableMethodTracing(const char* key)
206       REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
207       REQUIRES(!Locks::thread_list_lock_,
208                !Locks::classlinker_classes_lock_,
209                !deoptimized_methods_lock_);
210 
GetInterpreterHandlerTable()211   InterpreterHandlerTable GetInterpreterHandlerTable() const
212       SHARED_REQUIRES(Locks::mutator_lock_) {
213     return interpreter_handler_table_;
214   }
215 
216   void InstrumentQuickAllocEntryPoints() REQUIRES(!Locks::instrument_entrypoints_lock_);
217   void UninstrumentQuickAllocEntryPoints() REQUIRES(!Locks::instrument_entrypoints_lock_);
218   void InstrumentQuickAllocEntryPointsLocked()
219       REQUIRES(Locks::instrument_entrypoints_lock_, !Locks::thread_list_lock_,
220                !Locks::runtime_shutdown_lock_);
221   void UninstrumentQuickAllocEntryPointsLocked()
222       REQUIRES(Locks::instrument_entrypoints_lock_, !Locks::thread_list_lock_,
223                !Locks::runtime_shutdown_lock_);
224   void ResetQuickAllocEntryPoints() REQUIRES(Locks::runtime_shutdown_lock_);
225 
226   // Update the code of a method respecting any installed stubs.
227   void UpdateMethodsCode(ArtMethod* method, const void* quick_code)
228       SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(!deoptimized_methods_lock_);
229 
230   // Update the code of a method respecting any installed stubs from debugger.
231   void UpdateMethodsCodeFromDebugger(ArtMethod* method, const void* quick_code)
232       SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(!deoptimized_methods_lock_);
233 
234   // Get the quick code for the given method. More efficient than asking the class linker as it
235   // will short-cut to GetCode if instrumentation and static method resolution stubs aren't
236   // installed.
237   const void* GetQuickCodeFor(ArtMethod* method, size_t pointer_size) const
238       SHARED_REQUIRES(Locks::mutator_lock_);
239 
ForceInterpretOnly()240   void ForceInterpretOnly() {
241     interpret_only_ = true;
242     forced_interpret_only_ = true;
243   }
244 
245   // Called by ArtMethod::Invoke to determine dispatch mechanism.
InterpretOnly()246   bool InterpretOnly() const {
247     return interpret_only_;
248   }
249 
IsForcedInterpretOnly()250   bool IsForcedInterpretOnly() const {
251     return forced_interpret_only_;
252   }
253 
254   // Code is in boot image oat file which isn't compiled as debuggable.
255   // Need debug version (interpreter or jitted) if that's the case.
256   bool NeedDebugVersionForBootImageCode(ArtMethod* method, const void* code) const
257       SHARED_REQUIRES(Locks::mutator_lock_);
258 
AreExitStubsInstalled()259   bool AreExitStubsInstalled() const {
260     return instrumentation_stubs_installed_;
261   }
262 
HasMethodEntryListeners()263   bool HasMethodEntryListeners() const SHARED_REQUIRES(Locks::mutator_lock_) {
264     return have_method_entry_listeners_;
265   }
266 
HasMethodExitListeners()267   bool HasMethodExitListeners() const SHARED_REQUIRES(Locks::mutator_lock_) {
268     return have_method_exit_listeners_;
269   }
270 
HasMethodUnwindListeners()271   bool HasMethodUnwindListeners() const SHARED_REQUIRES(Locks::mutator_lock_) {
272     return have_method_unwind_listeners_;
273   }
274 
HasDexPcListeners()275   bool HasDexPcListeners() const SHARED_REQUIRES(Locks::mutator_lock_) {
276     return have_dex_pc_listeners_;
277   }
278 
HasFieldReadListeners()279   bool HasFieldReadListeners() const SHARED_REQUIRES(Locks::mutator_lock_) {
280     return have_field_read_listeners_;
281   }
282 
HasFieldWriteListeners()283   bool HasFieldWriteListeners() const SHARED_REQUIRES(Locks::mutator_lock_) {
284     return have_field_write_listeners_;
285   }
286 
HasExceptionCaughtListeners()287   bool HasExceptionCaughtListeners() const SHARED_REQUIRES(Locks::mutator_lock_) {
288     return have_exception_caught_listeners_;
289   }
290 
HasBranchListeners()291   bool HasBranchListeners() const SHARED_REQUIRES(Locks::mutator_lock_) {
292     return have_branch_listeners_;
293   }
294 
HasInvokeVirtualOrInterfaceListeners()295   bool HasInvokeVirtualOrInterfaceListeners() const SHARED_REQUIRES(Locks::mutator_lock_) {
296     return have_invoke_virtual_or_interface_listeners_;
297   }
298 
IsActive()299   bool IsActive() const SHARED_REQUIRES(Locks::mutator_lock_) {
300     return have_dex_pc_listeners_ || have_method_entry_listeners_ || have_method_exit_listeners_ ||
301         have_field_read_listeners_ || have_field_write_listeners_ ||
302         have_exception_caught_listeners_ || have_method_unwind_listeners_ ||
303         have_branch_listeners_ || have_invoke_virtual_or_interface_listeners_;
304   }
305 
306   // Any instrumentation *other* than what is needed for Jit profiling active?
NonJitProfilingActive()307   bool NonJitProfilingActive() const SHARED_REQUIRES(Locks::mutator_lock_) {
308     return have_dex_pc_listeners_ || have_method_exit_listeners_ ||
309         have_field_read_listeners_ || have_field_write_listeners_ ||
310         have_exception_caught_listeners_ || have_method_unwind_listeners_ ||
311         have_branch_listeners_;
312   }
313 
314   // Inform listeners that a method has been entered. A dex PC is provided as we may install
315   // listeners into executing code and get method enter events for methods already on the stack.
MethodEnterEvent(Thread * thread,mirror::Object * this_object,ArtMethod * method,uint32_t dex_pc)316   void MethodEnterEvent(Thread* thread, mirror::Object* this_object,
317                         ArtMethod* method, uint32_t dex_pc) const
318       SHARED_REQUIRES(Locks::mutator_lock_) {
319     if (UNLIKELY(HasMethodEntryListeners())) {
320       MethodEnterEventImpl(thread, this_object, method, dex_pc);
321     }
322   }
323 
324   // Inform listeners that a method has been exited.
MethodExitEvent(Thread * thread,mirror::Object * this_object,ArtMethod * method,uint32_t dex_pc,const JValue & return_value)325   void MethodExitEvent(Thread* thread, mirror::Object* this_object,
326                        ArtMethod* method, uint32_t dex_pc,
327                        const JValue& return_value) const
328       SHARED_REQUIRES(Locks::mutator_lock_) {
329     if (UNLIKELY(HasMethodExitListeners())) {
330       MethodExitEventImpl(thread, this_object, method, dex_pc, return_value);
331     }
332   }
333 
334   // Inform listeners that a method has been exited due to an exception.
335   void MethodUnwindEvent(Thread* thread, mirror::Object* this_object,
336                          ArtMethod* method, uint32_t dex_pc) const
337       SHARED_REQUIRES(Locks::mutator_lock_);
338 
339   // Inform listeners that the dex pc has moved (only supported by the interpreter).
DexPcMovedEvent(Thread * thread,mirror::Object * this_object,ArtMethod * method,uint32_t dex_pc)340   void DexPcMovedEvent(Thread* thread, mirror::Object* this_object,
341                        ArtMethod* method, uint32_t dex_pc) const
342       SHARED_REQUIRES(Locks::mutator_lock_) {
343     if (UNLIKELY(HasDexPcListeners())) {
344       DexPcMovedEventImpl(thread, this_object, method, dex_pc);
345     }
346   }
347 
348   // Inform listeners that a branch has been taken (only supported by the interpreter).
Branch(Thread * thread,ArtMethod * method,uint32_t dex_pc,int32_t offset)349   void Branch(Thread* thread, ArtMethod* method, uint32_t dex_pc, int32_t offset) const
350       SHARED_REQUIRES(Locks::mutator_lock_) {
351     if (UNLIKELY(HasBranchListeners())) {
352       BranchImpl(thread, method, dex_pc, offset);
353     }
354   }
355 
356   // Inform listeners that we read a field (only supported by the interpreter).
FieldReadEvent(Thread * thread,mirror::Object * this_object,ArtMethod * method,uint32_t dex_pc,ArtField * field)357   void FieldReadEvent(Thread* thread, mirror::Object* this_object,
358                       ArtMethod* method, uint32_t dex_pc,
359                       ArtField* field) const
360       SHARED_REQUIRES(Locks::mutator_lock_) {
361     if (UNLIKELY(HasFieldReadListeners())) {
362       FieldReadEventImpl(thread, this_object, method, dex_pc, field);
363     }
364   }
365 
366   // Inform listeners that we write a field (only supported by the interpreter).
FieldWriteEvent(Thread * thread,mirror::Object * this_object,ArtMethod * method,uint32_t dex_pc,ArtField * field,const JValue & field_value)367   void FieldWriteEvent(Thread* thread, mirror::Object* this_object,
368                        ArtMethod* method, uint32_t dex_pc,
369                        ArtField* field, const JValue& field_value) const
370       SHARED_REQUIRES(Locks::mutator_lock_) {
371     if (UNLIKELY(HasFieldWriteListeners())) {
372       FieldWriteEventImpl(thread, this_object, method, dex_pc, field, field_value);
373     }
374   }
375 
InvokeVirtualOrInterface(Thread * thread,mirror::Object * this_object,ArtMethod * caller,uint32_t dex_pc,ArtMethod * callee)376   void InvokeVirtualOrInterface(Thread* thread,
377                                 mirror::Object* this_object,
378                                 ArtMethod* caller,
379                                 uint32_t dex_pc,
380                                 ArtMethod* callee) const
381       SHARED_REQUIRES(Locks::mutator_lock_) {
382     if (UNLIKELY(HasInvokeVirtualOrInterfaceListeners())) {
383       InvokeVirtualOrInterfaceImpl(thread, this_object, caller, dex_pc, callee);
384     }
385   }
386 
387   // Inform listeners that an exception was caught.
388   void ExceptionCaughtEvent(Thread* thread, mirror::Throwable* exception_object) const
389       SHARED_REQUIRES(Locks::mutator_lock_);
390 
391   // Called when an instrumented method is entered. The intended link register (lr) is saved so
392   // that returning causes a branch to the method exit stub. Generates method enter events.
393   void PushInstrumentationStackFrame(Thread* self, mirror::Object* this_object,
394                                      ArtMethod* method, uintptr_t lr,
395                                      bool interpreter_entry)
396       SHARED_REQUIRES(Locks::mutator_lock_);
397 
398   // Called when an instrumented method is exited. Removes the pushed instrumentation frame
399   // returning the intended link register. Generates method exit events.
400   TwoWordReturn PopInstrumentationStackFrame(Thread* self, uintptr_t* return_pc,
401                                              uint64_t gpr_result, uint64_t fpr_result)
402       SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(!deoptimized_methods_lock_);
403 
404   // Pops an instrumentation frame from the current thread and generate an unwind event.
405   void PopMethodForUnwind(Thread* self, bool is_deoptimization) const
406       SHARED_REQUIRES(Locks::mutator_lock_);
407 
408   // Call back for configure stubs.
409   void InstallStubsForClass(mirror::Class* klass) SHARED_REQUIRES(Locks::mutator_lock_)
410       REQUIRES(!deoptimized_methods_lock_);
411 
412   void InstallStubsForMethod(ArtMethod* method)
413       SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(!deoptimized_methods_lock_);
414 
415   // Install instrumentation exit stub on every method of the stack of the given thread.
416   // This is used by the debugger to cause a deoptimization of the thread's stack after updating
417   // local variable(s).
418   void InstrumentThreadStack(Thread* thread)
419       SHARED_REQUIRES(Locks::mutator_lock_)
420       REQUIRES(!Locks::thread_list_lock_);
421 
422   static size_t ComputeFrameId(Thread* self,
423                                size_t frame_depth,
424                                size_t inlined_frames_before_frame)
425       SHARED_REQUIRES(Locks::mutator_lock_);
426 
427   // Does not hold lock, used to check if someone changed from not instrumented to instrumented
428   // during a GC suspend point.
AllocEntrypointsInstrumented()429   bool AllocEntrypointsInstrumented() const SHARED_REQUIRES(Locks::mutator_lock_) {
430     return alloc_entrypoints_instrumented_;
431   }
432 
433  private:
434   InstrumentationLevel GetCurrentInstrumentationLevel() const;
435 
436   // Does the job of installing or removing instrumentation code within methods.
437   // In order to support multiple clients using instrumentation at the same time,
438   // the caller must pass a unique key (a string) identifying it so we remind which
439   // instrumentation level it needs. Therefore the current instrumentation level
440   // becomes the highest instrumentation level required by a client.
441   void ConfigureStubs(const char* key, InstrumentationLevel desired_instrumentation_level)
442       REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
443       REQUIRES(!deoptimized_methods_lock_,
444                !Locks::thread_list_lock_,
445                !Locks::classlinker_classes_lock_);
446 
UpdateInterpreterHandlerTable()447   void UpdateInterpreterHandlerTable() REQUIRES(Locks::mutator_lock_) {
448     /*
449      * TUNING: Dalvik's mterp stashes the actual current handler table base in a
450      * tls field.  For Arm, this enables all suspend, debug & tracing checks to be
451      * collapsed into a single conditionally-executed ldw instruction.
452      * Move to Dalvik-style handler-table management for both the goto interpreter and
453      * mterp.
454      */
455     interpreter_handler_table_ = IsActive() ? kAlternativeHandlerTable : kMainHandlerTable;
456   }
457 
458   // No thread safety analysis to get around SetQuickAllocEntryPointsInstrumented requiring
459   // exclusive access to mutator lock which you can't get if the runtime isn't started.
460   void SetEntrypointsInstrumented(bool instrumented) NO_THREAD_SAFETY_ANALYSIS;
461 
462   void MethodEnterEventImpl(Thread* thread, mirror::Object* this_object,
463                             ArtMethod* method, uint32_t dex_pc) const
464       SHARED_REQUIRES(Locks::mutator_lock_);
465   void MethodExitEventImpl(Thread* thread, mirror::Object* this_object,
466                            ArtMethod* method,
467                            uint32_t dex_pc, const JValue& return_value) const
468       SHARED_REQUIRES(Locks::mutator_lock_);
469   void DexPcMovedEventImpl(Thread* thread, mirror::Object* this_object,
470                            ArtMethod* method, uint32_t dex_pc) const
471       SHARED_REQUIRES(Locks::mutator_lock_);
472   void BranchImpl(Thread* thread, ArtMethod* method, uint32_t dex_pc, int32_t offset) const
473       SHARED_REQUIRES(Locks::mutator_lock_);
474   void InvokeVirtualOrInterfaceImpl(Thread* thread,
475                                     mirror::Object* this_object,
476                                     ArtMethod* caller,
477                                     uint32_t dex_pc,
478                                     ArtMethod* callee) const
479       SHARED_REQUIRES(Locks::mutator_lock_);
480   void FieldReadEventImpl(Thread* thread, mirror::Object* this_object,
481                            ArtMethod* method, uint32_t dex_pc,
482                            ArtField* field) const
483       SHARED_REQUIRES(Locks::mutator_lock_);
484   void FieldWriteEventImpl(Thread* thread, mirror::Object* this_object,
485                            ArtMethod* method, uint32_t dex_pc,
486                            ArtField* field, const JValue& field_value) const
487       SHARED_REQUIRES(Locks::mutator_lock_);
488 
489   // Read barrier-aware utility functions for accessing deoptimized_methods_
490   bool AddDeoptimizedMethod(ArtMethod* method)
491       SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(deoptimized_methods_lock_);
492   bool IsDeoptimizedMethod(ArtMethod* method)
493       SHARED_REQUIRES(Locks::mutator_lock_, deoptimized_methods_lock_);
494   bool RemoveDeoptimizedMethod(ArtMethod* method)
495       SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(deoptimized_methods_lock_);
496   ArtMethod* BeginDeoptimizedMethod()
497       SHARED_REQUIRES(Locks::mutator_lock_, deoptimized_methods_lock_);
498   bool IsDeoptimizedMethodsEmpty() const
499       SHARED_REQUIRES(Locks::mutator_lock_, deoptimized_methods_lock_);
500   void UpdateMethodsCodeImpl(ArtMethod* method, const void* quick_code)
501       SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(!deoptimized_methods_lock_);
502 
503 
504   // Have we hijacked ArtMethod::code_ so that it calls instrumentation/interpreter code?
505   bool instrumentation_stubs_installed_;
506 
507   // Have we hijacked ArtMethod::code_ to reference the enter/exit stubs?
508   bool entry_exit_stubs_installed_;
509 
510   // Have we hijacked ArtMethod::code_ to reference the enter interpreter stub?
511   bool interpreter_stubs_installed_;
512 
513   // Do we need the fidelity of events that we only get from running within the interpreter?
514   bool interpret_only_;
515 
516   // Did the runtime request we only run in the interpreter? ie -Xint mode.
517   bool forced_interpret_only_;
518 
519   // Do we have any listeners for method entry events? Short-cut to avoid taking the
520   // instrumentation_lock_.
521   bool have_method_entry_listeners_ GUARDED_BY(Locks::mutator_lock_);
522 
523   // Do we have any listeners for method exit events? Short-cut to avoid taking the
524   // instrumentation_lock_.
525   bool have_method_exit_listeners_ GUARDED_BY(Locks::mutator_lock_);
526 
527   // Do we have any listeners for method unwind events? Short-cut to avoid taking the
528   // instrumentation_lock_.
529   bool have_method_unwind_listeners_ GUARDED_BY(Locks::mutator_lock_);
530 
531   // Do we have any listeners for dex move events? Short-cut to avoid taking the
532   // instrumentation_lock_.
533   bool have_dex_pc_listeners_ GUARDED_BY(Locks::mutator_lock_);
534 
535   // Do we have any listeners for field read events? Short-cut to avoid taking the
536   // instrumentation_lock_.
537   bool have_field_read_listeners_ GUARDED_BY(Locks::mutator_lock_);
538 
539   // Do we have any listeners for field write events? Short-cut to avoid taking the
540   // instrumentation_lock_.
541   bool have_field_write_listeners_ GUARDED_BY(Locks::mutator_lock_);
542 
543   // Do we have any exception caught listeners? Short-cut to avoid taking the instrumentation_lock_.
544   bool have_exception_caught_listeners_ GUARDED_BY(Locks::mutator_lock_);
545 
546   // Do we have any branch listeners? Short-cut to avoid taking the instrumentation_lock_.
547   bool have_branch_listeners_ GUARDED_BY(Locks::mutator_lock_);
548 
549   // Do we have any invoke listeners? Short-cut to avoid taking the instrumentation_lock_.
550   bool have_invoke_virtual_or_interface_listeners_ GUARDED_BY(Locks::mutator_lock_);
551 
552   // Contains the instrumentation level required by each client of the instrumentation identified
553   // by a string key.
554   typedef SafeMap<const char*, InstrumentationLevel> InstrumentationLevelTable;
555   InstrumentationLevelTable requested_instrumentation_levels_ GUARDED_BY(Locks::mutator_lock_);
556 
557   // The event listeners, written to with the mutator_lock_ exclusively held.
558   // Mutators must be able to iterate over these lists concurrently, that is, with listeners being
559   // added or removed while iterating. The modifying thread holds exclusive lock,
560   // so other threads cannot iterate (i.e. read the data of the list) at the same time but they
561   // do keep iterators that need to remain valid. This is the reason these listeners are std::list
562   // and not for example std::vector: the existing storage for a std::list does not move.
563   // Note that mutators cannot make a copy of these lists before iterating, as the instrumentation
564   // listeners can also be deleted concurrently.
565   // As a result, these lists are never trimmed. That's acceptable given the low number of
566   // listeners we have.
567   std::list<InstrumentationListener*> method_entry_listeners_ GUARDED_BY(Locks::mutator_lock_);
568   std::list<InstrumentationListener*> method_exit_listeners_ GUARDED_BY(Locks::mutator_lock_);
569   std::list<InstrumentationListener*> method_unwind_listeners_ GUARDED_BY(Locks::mutator_lock_);
570   std::list<InstrumentationListener*> branch_listeners_ GUARDED_BY(Locks::mutator_lock_);
571   std::list<InstrumentationListener*> invoke_virtual_or_interface_listeners_
572       GUARDED_BY(Locks::mutator_lock_);
573   std::list<InstrumentationListener*> dex_pc_listeners_ GUARDED_BY(Locks::mutator_lock_);
574   std::list<InstrumentationListener*> field_read_listeners_ GUARDED_BY(Locks::mutator_lock_);
575   std::list<InstrumentationListener*> field_write_listeners_ GUARDED_BY(Locks::mutator_lock_);
576   std::list<InstrumentationListener*> exception_caught_listeners_ GUARDED_BY(Locks::mutator_lock_);
577 
578   // The set of methods being deoptimized (by the debugger) which must be executed with interpreter
579   // only.
580   mutable ReaderWriterMutex deoptimized_methods_lock_ DEFAULT_MUTEX_ACQUIRED_AFTER;
581   std::unordered_set<ArtMethod*> deoptimized_methods_ GUARDED_BY(deoptimized_methods_lock_);
582   bool deoptimization_enabled_;
583 
584   // Current interpreter handler table. This is updated each time the thread state flags are
585   // modified.
586   InterpreterHandlerTable interpreter_handler_table_ GUARDED_BY(Locks::mutator_lock_);
587 
588   // Greater than 0 if quick alloc entry points instrumented.
589   size_t quick_alloc_entry_points_instrumentation_counter_;
590 
591   // alloc_entrypoints_instrumented_ is only updated with all the threads suspended, this is done
592   // to prevent races with the GC where the GC relies on thread suspension only see
593   // alloc_entrypoints_instrumented_ change during suspend points.
594   bool alloc_entrypoints_instrumented_;
595 
596   friend class InstrumentationTest;  // For GetCurrentInstrumentationLevel and ConfigureStubs.
597 
598   DISALLOW_COPY_AND_ASSIGN(Instrumentation);
599 };
600 std::ostream& operator<<(std::ostream& os, const Instrumentation::InstrumentationEvent& rhs);
601 std::ostream& operator<<(std::ostream& os, const Instrumentation::InstrumentationLevel& rhs);
602 
603 // An element in the instrumentation side stack maintained in art::Thread.
604 struct InstrumentationStackFrame {
InstrumentationStackFrameInstrumentationStackFrame605   InstrumentationStackFrame(mirror::Object* this_object, ArtMethod* method,
606                             uintptr_t return_pc, size_t frame_id, bool interpreter_entry)
607       : this_object_(this_object), method_(method), return_pc_(return_pc), frame_id_(frame_id),
608         interpreter_entry_(interpreter_entry) {
609   }
610 
611   std::string Dump() const SHARED_REQUIRES(Locks::mutator_lock_);
612 
613   mirror::Object* this_object_;
614   ArtMethod* method_;
615   uintptr_t return_pc_;
616   size_t frame_id_;
617   bool interpreter_entry_;
618 };
619 
620 }  // namespace instrumentation
621 }  // namespace art
622 
623 #endif  // ART_RUNTIME_INSTRUMENTATION_H_
624