1 /*
2  * Copyright (C) 2011 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ART_RUNTIME_INSTRUMENTATION_H_
18 #define ART_RUNTIME_INSTRUMENTATION_H_
19 
20 #include <stdint.h>
21 #include <list>
22 #include <unordered_set>
23 
24 #include "arch/instruction_set.h"
25 #include "base/enums.h"
26 #include "base/macros.h"
27 #include "base/mutex.h"
28 #include "base/safe_map.h"
29 #include "gc_root.h"
30 
31 namespace art {
32 namespace mirror {
33 class Class;
34 class Object;
35 class Throwable;
36 }  // namespace mirror
37 class ArtField;
38 class ArtMethod;
39 template <typename T> class Handle;
40 union JValue;
41 class ShadowFrame;
42 class Thread;
43 enum class DeoptimizationMethodType;
44 
45 namespace instrumentation {
46 
47 // Interpreter handler tables.
48 enum InterpreterHandlerTable {
49   kMainHandlerTable = 0,          // Main handler table: no suspend check, no instrumentation.
50   kAlternativeHandlerTable = 1,   // Alternative handler table: suspend check and/or instrumentation
51                                   // enabled.
52   kNumHandlerTables
53 };
54 
55 // Do we want to deoptimize for method entry and exit listeners or just try to intercept
56 // invocations? Deoptimization forces all code to run in the interpreter and considerably hurts the
57 // application's performance.
58 static constexpr bool kDeoptimizeForAccurateMethodEntryExitListeners = true;
59 
60 // Instrumentation event listener API. Registered listeners will get the appropriate call back for
61 // the events they are listening for. The call backs supply the thread, method and dex_pc the event
62 // occurred upon. The thread may or may not be Thread::Current().
63 struct InstrumentationListener {
InstrumentationListenerInstrumentationListener64   InstrumentationListener() {}
~InstrumentationListenerInstrumentationListener65   virtual ~InstrumentationListener() {}
66 
67   // Call-back for when a method is entered.
68   virtual void MethodEntered(Thread* thread,
69                              Handle<mirror::Object> this_object,
70                              ArtMethod* method,
71                              uint32_t dex_pc) REQUIRES_SHARED(Locks::mutator_lock_) = 0;
72 
73   virtual void MethodExited(Thread* thread,
74                             Handle<mirror::Object> this_object,
75                             ArtMethod* method,
76                             uint32_t dex_pc,
77                             Handle<mirror::Object> return_value)
78       REQUIRES_SHARED(Locks::mutator_lock_);
79 
80   // Call-back for when a method is exited. The implementor should either handler-ize the return
81   // value (if appropriate) or use the alternate MethodExited callback instead if they need to
82   // go through a suspend point.
83   virtual void MethodExited(Thread* thread,
84                             Handle<mirror::Object> this_object,
85                             ArtMethod* method,
86                             uint32_t dex_pc,
87                             const JValue& return_value)
88       REQUIRES_SHARED(Locks::mutator_lock_) = 0;
89 
90   // Call-back for when a method is popped due to an exception throw. A method will either cause a
91   // MethodExited call-back or a MethodUnwind call-back when its activation is removed.
92   virtual void MethodUnwind(Thread* thread,
93                             Handle<mirror::Object> this_object,
94                             ArtMethod* method,
95                             uint32_t dex_pc)
96       REQUIRES_SHARED(Locks::mutator_lock_) = 0;
97 
98   // Call-back for when the dex pc moves in a method.
99   virtual void DexPcMoved(Thread* thread,
100                           Handle<mirror::Object> this_object,
101                           ArtMethod* method,
102                           uint32_t new_dex_pc)
103       REQUIRES_SHARED(Locks::mutator_lock_) = 0;
104 
105   // Call-back for when we read from a field.
106   virtual void FieldRead(Thread* thread,
107                          Handle<mirror::Object> this_object,
108                          ArtMethod* method,
109                          uint32_t dex_pc,
110                          ArtField* field) = 0;
111 
112   virtual void FieldWritten(Thread* thread,
113                             Handle<mirror::Object> this_object,
114                             ArtMethod* method,
115                             uint32_t dex_pc,
116                             ArtField* field,
117                             Handle<mirror::Object> field_value)
118       REQUIRES_SHARED(Locks::mutator_lock_);
119 
120   // Call-back for when we write into a field.
121   virtual void FieldWritten(Thread* thread,
122                             Handle<mirror::Object> this_object,
123                             ArtMethod* method,
124                             uint32_t dex_pc,
125                             ArtField* field,
126                             const JValue& field_value)
127       REQUIRES_SHARED(Locks::mutator_lock_) = 0;
128 
129   // Call-back when an exception is thrown.
130   virtual void ExceptionThrown(Thread* thread,
131                                Handle<mirror::Throwable> exception_object)
132       REQUIRES_SHARED(Locks::mutator_lock_) = 0;
133 
134   // Call-back when an exception is caught/handled by java code.
135   virtual void ExceptionHandled(Thread* thread, Handle<mirror::Throwable> exception_object)
136       REQUIRES_SHARED(Locks::mutator_lock_) = 0;
137 
138   // Call-back for when we execute a branch.
139   virtual void Branch(Thread* thread,
140                       ArtMethod* method,
141                       uint32_t dex_pc,
142                       int32_t dex_pc_offset)
143       REQUIRES_SHARED(Locks::mutator_lock_) = 0;
144 
145   // Call-back for when we get an invokevirtual or an invokeinterface.
146   virtual void InvokeVirtualOrInterface(Thread* thread,
147                                         Handle<mirror::Object> this_object,
148                                         ArtMethod* caller,
149                                         uint32_t dex_pc,
150                                         ArtMethod* callee)
151       REQUIRES_SHARED(Locks::mutator_lock_) = 0;
152 
153   // Call-back when a shadow_frame with the needs_notify_pop_ boolean set is popped off the stack by
154   // either return or exceptions. Normally instrumentation listeners should ensure that there are
155   // shadow-frames by deoptimizing stacks.
156   virtual void WatchedFramePop(Thread* thread ATTRIBUTE_UNUSED,
157                                const ShadowFrame& frame ATTRIBUTE_UNUSED)
158       REQUIRES_SHARED(Locks::mutator_lock_) = 0;
159 };
160 
161 // Instrumentation is a catch-all for when extra information is required from the runtime. The
162 // typical use for instrumentation is for profiling and debugging. Instrumentation may add stubs
163 // to method entry and exit, it may also force execution to be switched to the interpreter and
164 // trigger deoptimization.
165 class Instrumentation {
166  public:
167   enum InstrumentationEvent {
168     kMethodEntered = 0x1,
169     kMethodExited = 0x2,
170     kMethodUnwind = 0x4,
171     kDexPcMoved = 0x8,
172     kFieldRead = 0x10,
173     kFieldWritten = 0x20,
174     kExceptionThrown = 0x40,
175     kBranch = 0x80,
176     kInvokeVirtualOrInterface = 0x100,
177     kWatchedFramePop = 0x200,
178     kExceptionHandled = 0x400,
179   };
180 
181   enum class InstrumentationLevel {
182     kInstrumentNothing,                   // execute without instrumentation
183     kInstrumentWithInstrumentationStubs,  // execute with instrumentation entry/exit stubs
184     kInstrumentWithInterpreter            // execute with interpreter
185   };
186 
187   Instrumentation();
188 
189   // Add a listener to be notified of the masked together sent of instrumentation events. This
190   // suspend the runtime to install stubs. You are expected to hold the mutator lock as a proxy
191   // for saying you should have suspended all threads (installing stubs while threads are running
192   // will break).
193   void AddListener(InstrumentationListener* listener, uint32_t events)
194       REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !Locks::classlinker_classes_lock_);
195 
196   // Removes a listener possibly removing instrumentation stubs.
197   void RemoveListener(InstrumentationListener* listener, uint32_t events)
198       REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !Locks::classlinker_classes_lock_);
199 
200   // Deoptimization.
201   void EnableDeoptimization()
202       REQUIRES(Locks::mutator_lock_)
203       REQUIRES(!deoptimized_methods_lock_);
204   // Calls UndeoptimizeEverything which may visit class linker classes through ConfigureStubs.
205   void DisableDeoptimization(const char* key)
206       REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
207       REQUIRES(!deoptimized_methods_lock_);
208 
AreAllMethodsDeoptimized()209   bool AreAllMethodsDeoptimized() const {
210     return interpreter_stubs_installed_;
211   }
212   bool ShouldNotifyMethodEnterExitEvents() const REQUIRES_SHARED(Locks::mutator_lock_);
213 
CanDeoptimize()214   bool CanDeoptimize() {
215     return deoptimization_enabled_;
216   }
217 
218   // Executes everything with interpreter.
219   void DeoptimizeEverything(const char* key)
220       REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
221       REQUIRES(!Locks::thread_list_lock_,
222                !Locks::classlinker_classes_lock_,
223                !deoptimized_methods_lock_);
224 
225   // Executes everything with compiled code (or interpreter if there is no code). May visit class
226   // linker classes through ConfigureStubs.
227   void UndeoptimizeEverything(const char* key)
228       REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
229       REQUIRES(!Locks::thread_list_lock_,
230                !Locks::classlinker_classes_lock_,
231                !deoptimized_methods_lock_);
232 
233   // Deoptimize a method by forcing its execution with the interpreter. Nevertheless, a static
234   // method (except a class initializer) set to the resolution trampoline will be deoptimized only
235   // once its declaring class is initialized.
236   void Deoptimize(ArtMethod* method)
237       REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !deoptimized_methods_lock_);
238 
239   // Undeoptimze the method by restoring its entrypoints. Nevertheless, a static method
240   // (except a class initializer) set to the resolution trampoline will be updated only once its
241   // declaring class is initialized.
242   void Undeoptimize(ArtMethod* method)
243       REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !deoptimized_methods_lock_);
244 
245   // Indicates whether the method has been deoptimized so it is executed with the interpreter.
246   bool IsDeoptimized(ArtMethod* method)
247       REQUIRES(!deoptimized_methods_lock_) REQUIRES_SHARED(Locks::mutator_lock_);
248 
249   // Enable method tracing by installing instrumentation entry/exit stubs or interpreter.
250   void EnableMethodTracing(const char* key,
251                            bool needs_interpreter = kDeoptimizeForAccurateMethodEntryExitListeners)
252       REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
253       REQUIRES(!Locks::thread_list_lock_,
254                !Locks::classlinker_classes_lock_,
255                !deoptimized_methods_lock_);
256 
257   // Disable method tracing by uninstalling instrumentation entry/exit stubs or interpreter.
258   void DisableMethodTracing(const char* key)
259       REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
260       REQUIRES(!Locks::thread_list_lock_,
261                !Locks::classlinker_classes_lock_,
262                !deoptimized_methods_lock_);
263 
GetInterpreterHandlerTable()264   InterpreterHandlerTable GetInterpreterHandlerTable() const
265       REQUIRES_SHARED(Locks::mutator_lock_) {
266     return interpreter_handler_table_;
267   }
268 
269   void InstrumentQuickAllocEntryPoints() REQUIRES(!Locks::instrument_entrypoints_lock_);
270   void UninstrumentQuickAllocEntryPoints() REQUIRES(!Locks::instrument_entrypoints_lock_);
271   void InstrumentQuickAllocEntryPointsLocked()
272       REQUIRES(Locks::instrument_entrypoints_lock_, !Locks::thread_list_lock_,
273                !Locks::runtime_shutdown_lock_);
274   void UninstrumentQuickAllocEntryPointsLocked()
275       REQUIRES(Locks::instrument_entrypoints_lock_, !Locks::thread_list_lock_,
276                !Locks::runtime_shutdown_lock_);
277   void ResetQuickAllocEntryPoints() REQUIRES(Locks::runtime_shutdown_lock_);
278 
279   // Update the code of a method respecting any installed stubs.
280   void UpdateMethodsCode(ArtMethod* method, const void* quick_code)
281       REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!deoptimized_methods_lock_);
282 
283   // Update the code of a native method to a JITed stub.
284   void UpdateNativeMethodsCodeToJitCode(ArtMethod* method, const void* quick_code)
285       REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!deoptimized_methods_lock_);
286 
287   // Update the code of a method to the interpreter respecting any installed stubs from debugger.
288   void UpdateMethodsCodeToInterpreterEntryPoint(ArtMethod* method)
289       REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!deoptimized_methods_lock_);
290 
291   // Update the code of a method respecting any installed stubs from debugger.
292   void UpdateMethodsCodeForJavaDebuggable(ArtMethod* method, const void* quick_code)
293       REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!deoptimized_methods_lock_);
294 
295   // Get the quick code for the given method. More efficient than asking the class linker as it
296   // will short-cut to GetCode if instrumentation and static method resolution stubs aren't
297   // installed.
298   const void* GetQuickCodeFor(ArtMethod* method, PointerSize pointer_size) const
299       REQUIRES_SHARED(Locks::mutator_lock_);
300 
ForceInterpretOnly()301   void ForceInterpretOnly() {
302     interpret_only_ = true;
303     forced_interpret_only_ = true;
304   }
305 
306   // Called by ArtMethod::Invoke to determine dispatch mechanism.
InterpretOnly()307   bool InterpretOnly() const {
308     return interpret_only_;
309   }
310 
IsForcedInterpretOnly()311   bool IsForcedInterpretOnly() const {
312     return forced_interpret_only_;
313   }
314 
315   // Code is in boot image oat file which isn't compiled as debuggable.
316   // Need debug version (interpreter or jitted) if that's the case.
317   bool NeedDebugVersionFor(ArtMethod* method) const
318       REQUIRES_SHARED(Locks::mutator_lock_);
319 
AreExitStubsInstalled()320   bool AreExitStubsInstalled() const {
321     return instrumentation_stubs_installed_;
322   }
323 
HasMethodEntryListeners()324   bool HasMethodEntryListeners() const REQUIRES_SHARED(Locks::mutator_lock_) {
325     return have_method_entry_listeners_;
326   }
327 
HasMethodExitListeners()328   bool HasMethodExitListeners() const REQUIRES_SHARED(Locks::mutator_lock_) {
329     return have_method_exit_listeners_;
330   }
331 
HasMethodUnwindListeners()332   bool HasMethodUnwindListeners() const REQUIRES_SHARED(Locks::mutator_lock_) {
333     return have_method_unwind_listeners_;
334   }
335 
HasDexPcListeners()336   bool HasDexPcListeners() const REQUIRES_SHARED(Locks::mutator_lock_) {
337     return have_dex_pc_listeners_;
338   }
339 
HasFieldReadListeners()340   bool HasFieldReadListeners() const REQUIRES_SHARED(Locks::mutator_lock_) {
341     return have_field_read_listeners_;
342   }
343 
HasFieldWriteListeners()344   bool HasFieldWriteListeners() const REQUIRES_SHARED(Locks::mutator_lock_) {
345     return have_field_write_listeners_;
346   }
347 
HasExceptionThrownListeners()348   bool HasExceptionThrownListeners() const REQUIRES_SHARED(Locks::mutator_lock_) {
349     return have_exception_thrown_listeners_;
350   }
351 
HasBranchListeners()352   bool HasBranchListeners() const REQUIRES_SHARED(Locks::mutator_lock_) {
353     return have_branch_listeners_;
354   }
355 
HasInvokeVirtualOrInterfaceListeners()356   bool HasInvokeVirtualOrInterfaceListeners() const REQUIRES_SHARED(Locks::mutator_lock_) {
357     return have_invoke_virtual_or_interface_listeners_;
358   }
359 
HasWatchedFramePopListeners()360   bool HasWatchedFramePopListeners() const REQUIRES_SHARED(Locks::mutator_lock_) {
361     return have_watched_frame_pop_listeners_;
362   }
363 
HasExceptionHandledListeners()364   bool HasExceptionHandledListeners() const REQUIRES_SHARED(Locks::mutator_lock_) {
365     return have_exception_handled_listeners_;
366   }
367 
IsActive()368   bool IsActive() const REQUIRES_SHARED(Locks::mutator_lock_) {
369     return have_dex_pc_listeners_ || have_method_entry_listeners_ || have_method_exit_listeners_ ||
370         have_field_read_listeners_ || have_field_write_listeners_ ||
371         have_exception_thrown_listeners_ || have_method_unwind_listeners_ ||
372         have_branch_listeners_ || have_invoke_virtual_or_interface_listeners_ ||
373         have_watched_frame_pop_listeners_ || have_exception_handled_listeners_;
374   }
375 
376   // Any instrumentation *other* than what is needed for Jit profiling active?
NonJitProfilingActive()377   bool NonJitProfilingActive() const REQUIRES_SHARED(Locks::mutator_lock_) {
378     return have_dex_pc_listeners_ || have_method_exit_listeners_ ||
379         have_field_read_listeners_ || have_field_write_listeners_ ||
380         have_exception_thrown_listeners_ || have_method_unwind_listeners_ ||
381         have_branch_listeners_ || have_watched_frame_pop_listeners_ ||
382         have_exception_handled_listeners_;
383   }
384 
385   // Inform listeners that a method has been entered. A dex PC is provided as we may install
386   // listeners into executing code and get method enter events for methods already on the stack.
MethodEnterEvent(Thread * thread,mirror::Object * this_object,ArtMethod * method,uint32_t dex_pc)387   void MethodEnterEvent(Thread* thread, mirror::Object* this_object,
388                         ArtMethod* method, uint32_t dex_pc) const
389       REQUIRES_SHARED(Locks::mutator_lock_) {
390     if (UNLIKELY(HasMethodEntryListeners())) {
391       MethodEnterEventImpl(thread, this_object, method, dex_pc);
392     }
393   }
394 
395   // Inform listeners that a method has been exited.
MethodExitEvent(Thread * thread,mirror::Object * this_object,ArtMethod * method,uint32_t dex_pc,const JValue & return_value)396   void MethodExitEvent(Thread* thread,
397                        mirror::Object* this_object,
398                        ArtMethod* method,
399                        uint32_t dex_pc,
400                        const JValue& return_value) const
401       REQUIRES_SHARED(Locks::mutator_lock_) {
402     if (UNLIKELY(HasMethodExitListeners())) {
403       MethodExitEventImpl(thread, this_object, method, dex_pc, return_value);
404     }
405   }
406 
407   // Inform listeners that a method has been exited due to an exception.
408   void MethodUnwindEvent(Thread* thread, mirror::Object* this_object,
409                          ArtMethod* method, uint32_t dex_pc) const
410       REQUIRES_SHARED(Locks::mutator_lock_);
411 
412   // Inform listeners that the dex pc has moved (only supported by the interpreter).
DexPcMovedEvent(Thread * thread,mirror::Object * this_object,ArtMethod * method,uint32_t dex_pc)413   void DexPcMovedEvent(Thread* thread, mirror::Object* this_object,
414                        ArtMethod* method, uint32_t dex_pc) const
415       REQUIRES_SHARED(Locks::mutator_lock_) {
416     if (UNLIKELY(HasDexPcListeners())) {
417       DexPcMovedEventImpl(thread, this_object, method, dex_pc);
418     }
419   }
420 
421   // Inform listeners that a branch has been taken (only supported by the interpreter).
Branch(Thread * thread,ArtMethod * method,uint32_t dex_pc,int32_t offset)422   void Branch(Thread* thread, ArtMethod* method, uint32_t dex_pc, int32_t offset) const
423       REQUIRES_SHARED(Locks::mutator_lock_) {
424     if (UNLIKELY(HasBranchListeners())) {
425       BranchImpl(thread, method, dex_pc, offset);
426     }
427   }
428 
429   // Inform listeners that we read a field (only supported by the interpreter).
FieldReadEvent(Thread * thread,mirror::Object * this_object,ArtMethod * method,uint32_t dex_pc,ArtField * field)430   void FieldReadEvent(Thread* thread, mirror::Object* this_object,
431                       ArtMethod* method, uint32_t dex_pc,
432                       ArtField* field) const
433       REQUIRES_SHARED(Locks::mutator_lock_) {
434     if (UNLIKELY(HasFieldReadListeners())) {
435       FieldReadEventImpl(thread, this_object, method, dex_pc, field);
436     }
437   }
438 
439   // Inform listeners that we write a field (only supported by the interpreter).
FieldWriteEvent(Thread * thread,mirror::Object * this_object,ArtMethod * method,uint32_t dex_pc,ArtField * field,const JValue & field_value)440   void FieldWriteEvent(Thread* thread, mirror::Object* this_object,
441                        ArtMethod* method, uint32_t dex_pc,
442                        ArtField* field, const JValue& field_value) const
443       REQUIRES_SHARED(Locks::mutator_lock_) {
444     if (UNLIKELY(HasFieldWriteListeners())) {
445       FieldWriteEventImpl(thread, this_object, method, dex_pc, field, field_value);
446     }
447   }
448 
InvokeVirtualOrInterface(Thread * thread,mirror::Object * this_object,ArtMethod * caller,uint32_t dex_pc,ArtMethod * callee)449   void InvokeVirtualOrInterface(Thread* thread,
450                                 mirror::Object* this_object,
451                                 ArtMethod* caller,
452                                 uint32_t dex_pc,
453                                 ArtMethod* callee) const
454       REQUIRES_SHARED(Locks::mutator_lock_) {
455     if (UNLIKELY(HasInvokeVirtualOrInterfaceListeners())) {
456       InvokeVirtualOrInterfaceImpl(thread, this_object, caller, dex_pc, callee);
457     }
458   }
459 
460   // Inform listeners that a branch has been taken (only supported by the interpreter).
WatchedFramePopped(Thread * thread,const ShadowFrame & frame)461   void WatchedFramePopped(Thread* thread, const ShadowFrame& frame) const
462       REQUIRES_SHARED(Locks::mutator_lock_) {
463     if (UNLIKELY(HasWatchedFramePopListeners())) {
464       WatchedFramePopImpl(thread, frame);
465     }
466   }
467 
468   // Inform listeners that an exception was thrown.
469   void ExceptionThrownEvent(Thread* thread, mirror::Throwable* exception_object) const
470       REQUIRES_SHARED(Locks::mutator_lock_);
471 
472   // Inform listeners that an exception has been handled. This is not sent for native code or for
473   // exceptions which reach the end of the thread's stack.
474   void ExceptionHandledEvent(Thread* thread, mirror::Throwable* exception_object) const
475       REQUIRES_SHARED(Locks::mutator_lock_);
476 
477   // Called when an instrumented method is entered. The intended link register (lr) is saved so
478   // that returning causes a branch to the method exit stub. Generates method enter events.
479   void PushInstrumentationStackFrame(Thread* self, mirror::Object* this_object,
480                                      ArtMethod* method, uintptr_t lr,
481                                      bool interpreter_entry)
482       REQUIRES_SHARED(Locks::mutator_lock_);
483 
484   DeoptimizationMethodType GetDeoptimizationMethodType(ArtMethod* method)
485       REQUIRES_SHARED(Locks::mutator_lock_);
486 
487   // Called when an instrumented method is exited. Removes the pushed instrumentation frame
488   // returning the intended link register. Generates method exit events. The gpr_result and
489   // fpr_result pointers are pointers to the locations where the integer/pointer and floating point
490   // result values of the function are stored. Both pointers must always be valid but the values
491   // held there will only be meaningful if interpreted as the appropriate type given the function
492   // being returned from.
493   TwoWordReturn PopInstrumentationStackFrame(Thread* self, uintptr_t* return_pc,
494                                              uint64_t* gpr_result, uint64_t* fpr_result)
495       REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!deoptimized_methods_lock_);
496 
497   // Pops an instrumentation frame from the current thread and generate an unwind event.
498   // Returns the return pc for the instrumentation frame that's popped.
499   uintptr_t PopMethodForUnwind(Thread* self, bool is_deoptimization) const
500       REQUIRES_SHARED(Locks::mutator_lock_);
501 
502   // Call back for configure stubs.
503   void InstallStubsForClass(mirror::Class* klass) REQUIRES_SHARED(Locks::mutator_lock_)
504       REQUIRES(!deoptimized_methods_lock_);
505 
506   void InstallStubsForMethod(ArtMethod* method)
507       REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!deoptimized_methods_lock_);
508 
509   // Install instrumentation exit stub on every method of the stack of the given thread.
510   // This is used by the debugger to cause a deoptimization of the thread's stack after updating
511   // local variable(s).
512   void InstrumentThreadStack(Thread* thread)
513       REQUIRES_SHARED(Locks::mutator_lock_);
514 
515   static size_t ComputeFrameId(Thread* self,
516                                size_t frame_depth,
517                                size_t inlined_frames_before_frame)
518       REQUIRES_SHARED(Locks::mutator_lock_);
519 
520   // Does not hold lock, used to check if someone changed from not instrumented to instrumented
521   // during a GC suspend point.
AllocEntrypointsInstrumented()522   bool AllocEntrypointsInstrumented() const REQUIRES_SHARED(Locks::mutator_lock_) {
523     return alloc_entrypoints_instrumented_;
524   }
525 
526   InstrumentationLevel GetCurrentInstrumentationLevel() const;
527 
528  private:
529   // Returns true if moving to the given instrumentation level requires the installation of stubs.
530   // False otherwise.
531   bool RequiresInstrumentationInstallation(InstrumentationLevel new_level) const;
532 
533   // Does the job of installing or removing instrumentation code within methods.
534   // In order to support multiple clients using instrumentation at the same time,
535   // the caller must pass a unique key (a string) identifying it so we remind which
536   // instrumentation level it needs. Therefore the current instrumentation level
537   // becomes the highest instrumentation level required by a client.
538   void ConfigureStubs(const char* key, InstrumentationLevel desired_instrumentation_level)
539       REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
540       REQUIRES(!deoptimized_methods_lock_,
541                !Locks::thread_list_lock_,
542                !Locks::classlinker_classes_lock_);
543 
UpdateInterpreterHandlerTable()544   void UpdateInterpreterHandlerTable() REQUIRES(Locks::mutator_lock_) {
545     /*
546      * TUNING: Dalvik's mterp stashes the actual current handler table base in a
547      * tls field.  For Arm, this enables all suspend, debug & tracing checks to be
548      * collapsed into a single conditionally-executed ldw instruction.
549      * Move to Dalvik-style handler-table management for both the goto interpreter and
550      * mterp.
551      */
552     interpreter_handler_table_ = IsActive() ? kAlternativeHandlerTable : kMainHandlerTable;
553   }
554 
555   // No thread safety analysis to get around SetQuickAllocEntryPointsInstrumented requiring
556   // exclusive access to mutator lock which you can't get if the runtime isn't started.
557   void SetEntrypointsInstrumented(bool instrumented) NO_THREAD_SAFETY_ANALYSIS;
558 
559   void MethodEnterEventImpl(Thread* thread,
560                             ObjPtr<mirror::Object> this_object,
561                             ArtMethod* method,
562                             uint32_t dex_pc) const
563       REQUIRES_SHARED(Locks::mutator_lock_);
564   void MethodExitEventImpl(Thread* thread,
565                            ObjPtr<mirror::Object> this_object,
566                            ArtMethod* method,
567                            uint32_t dex_pc,
568                            const JValue& return_value) const
569       REQUIRES_SHARED(Locks::mutator_lock_);
570   void DexPcMovedEventImpl(Thread* thread,
571                            ObjPtr<mirror::Object> this_object,
572                            ArtMethod* method,
573                            uint32_t dex_pc) const
574       REQUIRES_SHARED(Locks::mutator_lock_);
575   void BranchImpl(Thread* thread, ArtMethod* method, uint32_t dex_pc, int32_t offset) const
576       REQUIRES_SHARED(Locks::mutator_lock_);
577   void InvokeVirtualOrInterfaceImpl(Thread* thread,
578                                     ObjPtr<mirror::Object> this_object,
579                                     ArtMethod* caller,
580                                     uint32_t dex_pc,
581                                     ArtMethod* callee) const
582       REQUIRES_SHARED(Locks::mutator_lock_);
583   void WatchedFramePopImpl(Thread* thread, const ShadowFrame& frame) const
584       REQUIRES_SHARED(Locks::mutator_lock_);
585   void FieldReadEventImpl(Thread* thread,
586                           ObjPtr<mirror::Object> this_object,
587                           ArtMethod* method,
588                           uint32_t dex_pc,
589                           ArtField* field) const
590       REQUIRES_SHARED(Locks::mutator_lock_);
591   void FieldWriteEventImpl(Thread* thread,
592                            ObjPtr<mirror::Object> this_object,
593                            ArtMethod* method,
594                            uint32_t dex_pc,
595                            ArtField* field,
596                            const JValue& field_value) const
597       REQUIRES_SHARED(Locks::mutator_lock_);
598 
599   // Read barrier-aware utility functions for accessing deoptimized_methods_
600   bool AddDeoptimizedMethod(ArtMethod* method)
601       REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(deoptimized_methods_lock_);
602   bool IsDeoptimizedMethod(ArtMethod* method)
603       REQUIRES_SHARED(Locks::mutator_lock_, deoptimized_methods_lock_);
604   bool RemoveDeoptimizedMethod(ArtMethod* method)
605       REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(deoptimized_methods_lock_);
606   ArtMethod* BeginDeoptimizedMethod()
607       REQUIRES_SHARED(Locks::mutator_lock_, deoptimized_methods_lock_);
608   bool IsDeoptimizedMethodsEmpty() const
609       REQUIRES_SHARED(Locks::mutator_lock_, deoptimized_methods_lock_);
610   void UpdateMethodsCodeImpl(ArtMethod* method, const void* quick_code)
611       REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!deoptimized_methods_lock_);
612 
613 
614   // Have we hijacked ArtMethod::code_ so that it calls instrumentation/interpreter code?
615   bool instrumentation_stubs_installed_;
616 
617   // Have we hijacked ArtMethod::code_ to reference the enter/exit stubs?
618   bool entry_exit_stubs_installed_;
619 
620   // Have we hijacked ArtMethod::code_ to reference the enter interpreter stub?
621   bool interpreter_stubs_installed_;
622 
623   // Do we need the fidelity of events that we only get from running within the interpreter?
624   bool interpret_only_;
625 
626   // Did the runtime request we only run in the interpreter? ie -Xint mode.
627   bool forced_interpret_only_;
628 
629   // Do we have any listeners for method entry events? Short-cut to avoid taking the
630   // instrumentation_lock_.
631   bool have_method_entry_listeners_ GUARDED_BY(Locks::mutator_lock_);
632 
633   // Do we have any listeners for method exit events? Short-cut to avoid taking the
634   // instrumentation_lock_.
635   bool have_method_exit_listeners_ GUARDED_BY(Locks::mutator_lock_);
636 
637   // Do we have any listeners for method unwind events? Short-cut to avoid taking the
638   // instrumentation_lock_.
639   bool have_method_unwind_listeners_ GUARDED_BY(Locks::mutator_lock_);
640 
641   // Do we have any listeners for dex move events? Short-cut to avoid taking the
642   // instrumentation_lock_.
643   bool have_dex_pc_listeners_ GUARDED_BY(Locks::mutator_lock_);
644 
645   // Do we have any listeners for field read events? Short-cut to avoid taking the
646   // instrumentation_lock_.
647   bool have_field_read_listeners_ GUARDED_BY(Locks::mutator_lock_);
648 
649   // Do we have any listeners for field write events? Short-cut to avoid taking the
650   // instrumentation_lock_.
651   bool have_field_write_listeners_ GUARDED_BY(Locks::mutator_lock_);
652 
653   // Do we have any exception thrown listeners? Short-cut to avoid taking the instrumentation_lock_.
654   bool have_exception_thrown_listeners_ GUARDED_BY(Locks::mutator_lock_);
655 
656   // Do we have any frame pop listeners? Short-cut to avoid taking the instrumentation_lock_.
657   bool have_watched_frame_pop_listeners_ GUARDED_BY(Locks::mutator_lock_);
658 
659   // Do we have any branch listeners? Short-cut to avoid taking the instrumentation_lock_.
660   bool have_branch_listeners_ GUARDED_BY(Locks::mutator_lock_);
661 
662   // Do we have any invoke listeners? Short-cut to avoid taking the instrumentation_lock_.
663   bool have_invoke_virtual_or_interface_listeners_ GUARDED_BY(Locks::mutator_lock_);
664 
665   // Do we have any exception handled listeners? Short-cut to avoid taking the
666   // instrumentation_lock_.
667   bool have_exception_handled_listeners_ GUARDED_BY(Locks::mutator_lock_);
668 
669   // Contains the instrumentation level required by each client of the instrumentation identified
670   // by a string key.
671   typedef SafeMap<const char*, InstrumentationLevel> InstrumentationLevelTable;
672   InstrumentationLevelTable requested_instrumentation_levels_ GUARDED_BY(Locks::mutator_lock_);
673 
674   // The event listeners, written to with the mutator_lock_ exclusively held.
675   // Mutators must be able to iterate over these lists concurrently, that is, with listeners being
676   // added or removed while iterating. The modifying thread holds exclusive lock,
677   // so other threads cannot iterate (i.e. read the data of the list) at the same time but they
678   // do keep iterators that need to remain valid. This is the reason these listeners are std::list
679   // and not for example std::vector: the existing storage for a std::list does not move.
680   // Note that mutators cannot make a copy of these lists before iterating, as the instrumentation
681   // listeners can also be deleted concurrently.
682   // As a result, these lists are never trimmed. That's acceptable given the low number of
683   // listeners we have.
684   std::list<InstrumentationListener*> method_entry_listeners_ GUARDED_BY(Locks::mutator_lock_);
685   std::list<InstrumentationListener*> method_exit_listeners_ GUARDED_BY(Locks::mutator_lock_);
686   std::list<InstrumentationListener*> method_unwind_listeners_ GUARDED_BY(Locks::mutator_lock_);
687   std::list<InstrumentationListener*> branch_listeners_ GUARDED_BY(Locks::mutator_lock_);
688   std::list<InstrumentationListener*> invoke_virtual_or_interface_listeners_
689       GUARDED_BY(Locks::mutator_lock_);
690   std::list<InstrumentationListener*> dex_pc_listeners_ GUARDED_BY(Locks::mutator_lock_);
691   std::list<InstrumentationListener*> field_read_listeners_ GUARDED_BY(Locks::mutator_lock_);
692   std::list<InstrumentationListener*> field_write_listeners_ GUARDED_BY(Locks::mutator_lock_);
693   std::list<InstrumentationListener*> exception_thrown_listeners_ GUARDED_BY(Locks::mutator_lock_);
694   std::list<InstrumentationListener*> watched_frame_pop_listeners_ GUARDED_BY(Locks::mutator_lock_);
695   std::list<InstrumentationListener*> exception_handled_listeners_ GUARDED_BY(Locks::mutator_lock_);
696 
697   // The set of methods being deoptimized (by the debugger) which must be executed with interpreter
698   // only.
699   mutable ReaderWriterMutex deoptimized_methods_lock_ DEFAULT_MUTEX_ACQUIRED_AFTER;
700   std::unordered_set<ArtMethod*> deoptimized_methods_ GUARDED_BY(deoptimized_methods_lock_);
701   bool deoptimization_enabled_;
702 
703   // Current interpreter handler table. This is updated each time the thread state flags are
704   // modified.
705   InterpreterHandlerTable interpreter_handler_table_ GUARDED_BY(Locks::mutator_lock_);
706 
707   // Greater than 0 if quick alloc entry points instrumented.
708   size_t quick_alloc_entry_points_instrumentation_counter_;
709 
710   // alloc_entrypoints_instrumented_ is only updated with all the threads suspended, this is done
711   // to prevent races with the GC where the GC relies on thread suspension only see
712   // alloc_entrypoints_instrumented_ change during suspend points.
713   bool alloc_entrypoints_instrumented_;
714 
715   friend class InstrumentationTest;  // For GetCurrentInstrumentationLevel and ConfigureStubs.
716 
717   DISALLOW_COPY_AND_ASSIGN(Instrumentation);
718 };
719 std::ostream& operator<<(std::ostream& os, const Instrumentation::InstrumentationEvent& rhs);
720 std::ostream& operator<<(std::ostream& os, const Instrumentation::InstrumentationLevel& rhs);
721 
722 // An element in the instrumentation side stack maintained in art::Thread.
723 struct InstrumentationStackFrame {
InstrumentationStackFrameInstrumentationStackFrame724   InstrumentationStackFrame(mirror::Object* this_object,
725                             ArtMethod* method,
726                             uintptr_t return_pc,
727                             size_t frame_id,
728                             bool interpreter_entry)
729       : this_object_(this_object),
730         method_(method),
731         return_pc_(return_pc),
732         frame_id_(frame_id),
733         interpreter_entry_(interpreter_entry) {
734   }
735 
736   std::string Dump() const REQUIRES_SHARED(Locks::mutator_lock_);
737 
738   mirror::Object* this_object_;
739   ArtMethod* method_;
740   uintptr_t return_pc_;
741   size_t frame_id_;
742   bool interpreter_entry_;
743 };
744 
745 }  // namespace instrumentation
746 }  // namespace art
747 
748 #endif  // ART_RUNTIME_INSTRUMENTATION_H_
749