1 /*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "instrumentation.h"
18
19 #include <functional>
20 #include <optional>
21 #include <sstream>
22
23 #include <android-base/logging.h>
24
25 #include "arch/context.h"
26 #include "art_field-inl.h"
27 #include "art_method-inl.h"
28 #include "base/atomic.h"
29 #include "base/callee_save_type.h"
30 #include "class_linker.h"
31 #include "debugger.h"
32 #include "dex/dex_file-inl.h"
33 #include "dex/dex_file_types.h"
34 #include "dex/dex_instruction-inl.h"
35 #include "entrypoints/quick/quick_alloc_entrypoints.h"
36 #include "entrypoints/quick/quick_entrypoints.h"
37 #include "entrypoints/runtime_asm_entrypoints.h"
38 #include "gc_root-inl.h"
39 #include "interpreter/interpreter.h"
40 #include "interpreter/interpreter_common.h"
41 #include "jit/jit.h"
42 #include "jit/jit_code_cache.h"
43 #include "jvalue-inl.h"
44 #include "jvalue.h"
45 #include "mirror/class-inl.h"
46 #include "mirror/dex_cache.h"
47 #include "mirror/object-inl.h"
48 #include "mirror/object_array-inl.h"
49 #include "nterp_helpers.h"
50 #include "nth_caller_visitor.h"
51 #include "oat/oat_file_manager.h"
52 #include "oat/oat_quick_method_header.h"
53 #include "runtime-inl.h"
54 #include "thread.h"
55 #include "thread_list.h"
56
57 namespace art HIDDEN {
58 extern "C" NO_RETURN void artDeoptimize(Thread* self, bool skip_method_exit_callbacks);
59 extern "C" NO_RETURN void artDeliverPendingExceptionFromCode(Thread* self);
60
61 namespace instrumentation {
62
63 constexpr bool kVerboseInstrumentation = false;
64
MethodExited(Thread * thread,ArtMethod * method,OptionalFrame frame,MutableHandle<mirror::Object> & return_value)65 void InstrumentationListener::MethodExited(
66 Thread* thread,
67 ArtMethod* method,
68 OptionalFrame frame,
69 MutableHandle<mirror::Object>& return_value) {
70 DCHECK_EQ(method->GetInterfaceMethodIfProxy(kRuntimePointerSize)->GetReturnTypePrimitive(),
71 Primitive::kPrimNot);
72 const void* original_ret = return_value.Get();
73 JValue v;
74 v.SetL(return_value.Get());
75 MethodExited(thread, method, frame, v);
76 DCHECK(original_ret == v.GetL()) << "Return value changed";
77 }
78
FieldWritten(Thread * thread,Handle<mirror::Object> this_object,ArtMethod * method,uint32_t dex_pc,ArtField * field,Handle<mirror::Object> field_value)79 void InstrumentationListener::FieldWritten(Thread* thread,
80 Handle<mirror::Object> this_object,
81 ArtMethod* method,
82 uint32_t dex_pc,
83 ArtField* field,
84 Handle<mirror::Object> field_value) {
85 DCHECK(!field->IsPrimitiveType());
86 JValue v;
87 v.SetL(field_value.Get());
88 FieldWritten(thread, this_object, method, dex_pc, field, v);
89 }
90
91 // Instrumentation works on non-inlined frames by updating returned PCs
92 // of compiled frames.
93 static constexpr StackVisitor::StackWalkKind kInstrumentationStackWalk =
94 StackVisitor::StackWalkKind::kSkipInlinedFrames;
95
96 class InstallStubsClassVisitor : public ClassVisitor {
97 public:
InstallStubsClassVisitor(Instrumentation * instrumentation)98 explicit InstallStubsClassVisitor(Instrumentation* instrumentation)
99 : instrumentation_(instrumentation) {}
100
operator ()(ObjPtr<mirror::Class> klass)101 bool operator()(ObjPtr<mirror::Class> klass) override REQUIRES(Locks::mutator_lock_) {
102 instrumentation_->InstallStubsForClass(klass.Ptr());
103 return true; // we visit all classes.
104 }
105
106 private:
107 Instrumentation* const instrumentation_;
108 };
109
Instrumentation()110 Instrumentation::Instrumentation()
111 : run_exit_hooks_(false),
112 instrumentation_level_(InstrumentationLevel::kInstrumentNothing),
113 forced_interpret_only_(false),
114 have_method_entry_listeners_(0),
115 have_method_exit_listeners_(0),
116 have_method_unwind_listeners_(false),
117 have_dex_pc_listeners_(false),
118 have_field_read_listeners_(false),
119 have_field_write_listeners_(false),
120 have_exception_thrown_listeners_(false),
121 have_watched_frame_pop_listeners_(false),
122 have_branch_listeners_(false),
123 have_exception_handled_listeners_(false),
124 quick_alloc_entry_points_instrumentation_counter_(0),
125 alloc_entrypoints_instrumented_(false) {}
126
ProcessMethodUnwindCallbacks(Thread * self,std::queue<ArtMethod * > & methods,MutableHandle<mirror::Throwable> & exception)127 bool Instrumentation::ProcessMethodUnwindCallbacks(Thread* self,
128 std::queue<ArtMethod*>& methods,
129 MutableHandle<mirror::Throwable>& exception) {
130 DCHECK(!self->IsExceptionPending());
131 if (!HasMethodUnwindListeners()) {
132 return true;
133 }
134 if (kVerboseInstrumentation) {
135 LOG(INFO) << "Popping frames for exception " << exception->Dump();
136 }
137 // The instrumentation events expect the exception to be set.
138 self->SetException(exception.Get());
139 bool new_exception_thrown = false;
140
141 // Process callbacks for all methods that would be unwound until a new exception is thrown.
142 while (!methods.empty()) {
143 ArtMethod* method = methods.front();
144 methods.pop();
145 if (kVerboseInstrumentation) {
146 LOG(INFO) << "Popping for unwind " << method->PrettyMethod();
147 }
148
149 if (method->IsRuntimeMethod()) {
150 continue;
151 }
152
153 // Notify listeners of method unwind.
154 // TODO: improve the dex_pc information here.
155 uint32_t dex_pc = dex::kDexNoIndex;
156 MethodUnwindEvent(self, method, dex_pc);
157 new_exception_thrown = self->GetException() != exception.Get();
158 if (new_exception_thrown) {
159 break;
160 }
161 }
162
163 exception.Assign(self->GetException());
164 self->ClearException();
165 if (kVerboseInstrumentation && new_exception_thrown) {
166 LOG(INFO) << "Did partial pop of frames due to new exception";
167 }
168 return !new_exception_thrown;
169 }
170
InstallStubsForClass(ObjPtr<mirror::Class> klass)171 void Instrumentation::InstallStubsForClass(ObjPtr<mirror::Class> klass) {
172 if (!klass->IsResolved()) {
173 // We need the class to be resolved to install/uninstall stubs. Otherwise its methods
174 // could not be initialized or linked with regards to class inheritance.
175 } else if (klass->IsErroneousResolved()) {
176 // We can't execute code in a erroneous class: do nothing.
177 } else {
178 for (ArtMethod& method : klass->GetMethods(kRuntimePointerSize)) {
179 InstallStubsForMethod(&method);
180 }
181 }
182 }
183
CanHandleInitializationCheck(const void * code)184 static bool CanHandleInitializationCheck(const void* code) {
185 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
186 return class_linker->IsQuickResolutionStub(code) ||
187 class_linker->IsQuickToInterpreterBridge(code) ||
188 class_linker->IsQuickGenericJniStub(code) ||
189 (code == interpreter::GetNterpWithClinitEntryPoint());
190 }
191
IsProxyInit(ArtMethod * method)192 static bool IsProxyInit(ArtMethod* method) REQUIRES_SHARED(Locks::mutator_lock_) {
193 // Annoyingly this can be called before we have actually initialized WellKnownClasses so therefore
194 // we also need to check this based on the declaring-class descriptor. The check is valid because
195 // Proxy only has a single constructor.
196 ArtMethod* well_known_proxy_init = WellKnownClasses::java_lang_reflect_Proxy_init;
197 if (well_known_proxy_init == method) {
198 return true;
199 }
200
201 if (well_known_proxy_init != nullptr) {
202 return false;
203 }
204
205 return method->IsConstructor() && !method->IsStatic() &&
206 method->GetDeclaringClass()->DescriptorEquals("Ljava/lang/reflect/Proxy;");
207 }
208
209 // Returns true if we need entry exit stub to call entry hooks. JITed code
210 // directly call entry / exit hooks and don't need the stub.
CodeSupportsEntryExitHooks(const void * entry_point,ArtMethod * method)211 static bool CodeSupportsEntryExitHooks(const void* entry_point, ArtMethod* method)
212 REQUIRES_SHARED(Locks::mutator_lock_) {
213 // Proxy.init should always run with the switch interpreter where entry / exit hooks are
214 // supported.
215 if (IsProxyInit(method)) {
216 return true;
217 }
218
219 // In some tests runtime isn't setup fully and hence the entry points could be nullptr.
220 // just be conservative and return false here.
221 if (entry_point == nullptr) {
222 return false;
223 }
224
225 ClassLinker* linker = Runtime::Current()->GetClassLinker();
226 // Interpreter supports entry / exit hooks. Resolution stubs fetch code that supports entry / exit
227 // hooks when required. So return true for both cases.
228 if (linker->IsQuickToInterpreterBridge(entry_point) ||
229 linker->IsQuickResolutionStub(entry_point)) {
230 return true;
231 }
232
233 // When jiting code for debuggable runtimes / instrumentation is active we generate the code to
234 // call method entry / exit hooks when required.
235 jit::Jit* jit = Runtime::Current()->GetJit();
236 if (jit != nullptr && jit->GetCodeCache()->ContainsPc(entry_point)) {
237 // If JITed code was compiled with instrumentation support we support entry / exit hooks.
238 OatQuickMethodHeader* header = OatQuickMethodHeader::FromEntryPoint(entry_point);
239 return CodeInfo::IsDebuggable(header->GetOptimizedCodeInfoPtr());
240 }
241
242 // GenericJni trampoline can handle entry / exit hooks.
243 if (linker->IsQuickGenericJniStub(entry_point)) {
244 return true;
245 }
246
247 // The remaining cases are nterp / oat code / JIT code that isn't compiled with instrumentation
248 // support.
249 return false;
250 }
251
UpdateEntryPoints(ArtMethod * method,const void * quick_code)252 static void UpdateEntryPoints(ArtMethod* method, const void* quick_code)
253 REQUIRES_SHARED(Locks::mutator_lock_) {
254 if (kIsDebugBuild) {
255 if (method->StillNeedsClinitCheckMayBeDead()) {
256 CHECK(CanHandleInitializationCheck(quick_code));
257 }
258 jit::Jit* jit = Runtime::Current()->GetJit();
259 if (jit != nullptr && jit->GetCodeCache()->ContainsPc(quick_code)) {
260 // Ensure we always have the thumb entrypoint for JIT on arm32.
261 if (kRuntimeISA == InstructionSet::kArm) {
262 CHECK_EQ(reinterpret_cast<uintptr_t>(quick_code) & 1, 1u);
263 }
264 }
265 const Instrumentation* instr = Runtime::Current()->GetInstrumentation();
266 if (instr->EntryExitStubsInstalled()) {
267 CHECK(CodeSupportsEntryExitHooks(quick_code, method));
268 }
269 if (instr->InterpreterStubsInstalled() && !method->IsNative()) {
270 CHECK_EQ(quick_code, GetQuickToInterpreterBridge());
271 }
272 }
273 // If the method is from a boot image, don't dirty it if the entrypoint
274 // doesn't change.
275 if (method->GetEntryPointFromQuickCompiledCode() != quick_code) {
276 method->SetEntryPointFromQuickCompiledCode(quick_code);
277 }
278 }
279
NeedsDexPcEvents(ArtMethod * method,Thread * thread)280 bool Instrumentation::NeedsDexPcEvents(ArtMethod* method, Thread* thread) {
281 return (InterpretOnly(method) || thread->IsForceInterpreter()) && HasDexPcListeners();
282 }
283
InterpretOnly(ArtMethod * method)284 bool Instrumentation::InterpretOnly(ArtMethod* method) REQUIRES_SHARED(Locks::mutator_lock_) {
285 if (method->IsNative()) {
286 return false;
287 }
288 return InterpretOnly() || IsDeoptimized(method);
289 }
290
CanUseAotCode(const void * quick_code)291 static bool CanUseAotCode(const void* quick_code)
292 REQUIRES_SHARED(Locks::mutator_lock_) {
293 if (quick_code == nullptr) {
294 return false;
295 }
296 Runtime* runtime = Runtime::Current();
297 // For simplicity, we never use AOT code for debuggable.
298 if (runtime->IsJavaDebuggable()) {
299 return false;
300 }
301
302 if (runtime->IsNativeDebuggable()) {
303 DCHECK(runtime->UseJitCompilation() && runtime->GetJit()->JitAtFirstUse());
304 // If we are doing native debugging, ignore application's AOT code,
305 // since we want to JIT it (at first use) with extra stackmaps for native
306 // debugging. We keep however all AOT code from the boot image,
307 // since the JIT-at-first-use is blocking and would result in non-negligible
308 // startup performance impact.
309 return runtime->GetHeap()->IsInBootImageOatFile(quick_code);
310 }
311
312 return true;
313 }
314
CanUseNterp(ArtMethod * method)315 static bool CanUseNterp(ArtMethod* method) REQUIRES_SHARED(Locks::mutator_lock_) {
316 return interpreter::CanRuntimeUseNterp() &&
317 CanMethodUseNterp(method) &&
318 method->IsDeclaringClassVerifiedMayBeDead();
319 }
320
GetOptimizedCodeFor(ArtMethod * method)321 static const void* GetOptimizedCodeFor(ArtMethod* method) REQUIRES_SHARED(Locks::mutator_lock_) {
322 DCHECK(!Runtime::Current()->GetInstrumentation()->InterpretOnly(method));
323 CHECK(method->IsInvokable()) << method->PrettyMethod();
324 if (method->IsProxyMethod()) {
325 return GetQuickProxyInvokeHandler();
326 }
327
328 // In debuggable mode, we can only use AOT code for native methods.
329 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
330 const void* aot_code = method->GetOatMethodQuickCode(class_linker->GetImagePointerSize());
331 if (CanUseAotCode(aot_code)) {
332 return aot_code;
333 }
334
335 // If the method has been precompiled, there can be a JIT version.
336 jit::Jit* jit = Runtime::Current()->GetJit();
337 if (jit != nullptr) {
338 const void* code = jit->GetCodeCache()->GetSavedEntryPointOfPreCompiledMethod(method);
339 if (code != nullptr) {
340 return code;
341 }
342 }
343
344 // We need to check if the class has been verified for setting up nterp, as
345 // the verifier could punt the method to the switch interpreter in case we
346 // need to do lock counting.
347 if (CanUseNterp(method)) {
348 return interpreter::GetNterpEntryPoint();
349 }
350
351 return method->IsNative() ? GetQuickGenericJniStub() : GetQuickToInterpreterBridge();
352 }
353
InitializeMethodsCode(ArtMethod * method,const void * aot_code)354 void Instrumentation::InitializeMethodsCode(ArtMethod* method, const void* aot_code)
355 REQUIRES_SHARED(Locks::mutator_lock_) {
356 if (!method->IsInvokable()) {
357 DCHECK(method->GetEntryPointFromQuickCompiledCode() == nullptr ||
358 Runtime::Current()->GetClassLinker()->IsQuickToInterpreterBridge(
359 method->GetEntryPointFromQuickCompiledCode()));
360 UpdateEntryPoints(method, GetQuickToInterpreterBridge());
361 return;
362 }
363
364 // Use instrumentation entrypoints if instrumentation is installed.
365 if (UNLIKELY(EntryExitStubsInstalled() || IsForcedInterpretOnly() || IsDeoptimized(method))) {
366 UpdateEntryPoints(
367 method, method->IsNative() ? GetQuickGenericJniStub() : GetQuickToInterpreterBridge());
368 return;
369 }
370
371 // Special case if we need an initialization check.
372 // The method and its declaring class may be dead when starting JIT GC during managed heap GC.
373 if (method->StillNeedsClinitCheckMayBeDead()) {
374 // If we have code but the method needs a class initialization check before calling
375 // that code, install the resolution stub that will perform the check.
376 // It will be replaced by the proper entry point by ClassLinker::FixupStaticTrampolines
377 // after initializing class (see ClassLinker::InitializeClass method).
378 // Note: this mimics the logic in image_writer.cc that installs the resolution
379 // stub only if we have compiled code or we can execute nterp, and the method needs a class
380 // initialization check.
381 if (aot_code != nullptr || method->IsNative() || CanUseNterp(method)) {
382 if (kIsDebugBuild && CanUseNterp(method)) {
383 // Adds some test coverage for the nterp clinit entrypoint.
384 UpdateEntryPoints(method, interpreter::GetNterpWithClinitEntryPoint());
385 } else {
386 UpdateEntryPoints(method, GetQuickResolutionStub());
387 }
388 } else {
389 UpdateEntryPoints(method, GetQuickToInterpreterBridge());
390 }
391 return;
392 }
393
394 // Use the provided AOT code if possible.
395 if (CanUseAotCode(aot_code)) {
396 UpdateEntryPoints(method, aot_code);
397 return;
398 }
399
400 // We check if the class is verified as we need the slow interpreter for lock verification.
401 // If the class is not verified, This will be updated in
402 // ClassLinker::UpdateClassAfterVerification.
403 if (CanUseNterp(method)) {
404 UpdateEntryPoints(method, interpreter::GetNterpEntryPoint());
405 return;
406 }
407
408 // Use default entrypoints.
409 UpdateEntryPoints(
410 method, method->IsNative() ? GetQuickGenericJniStub() : GetQuickToInterpreterBridge());
411 }
412
InstallStubsForMethod(ArtMethod * method)413 void Instrumentation::InstallStubsForMethod(ArtMethod* method) {
414 if (!method->IsInvokable() || method->IsProxyMethod()) {
415 // Do not change stubs for these methods.
416 return;
417 }
418 // Don't stub Proxy.<init>. Note that the Proxy class itself is not a proxy class.
419 // TODO We should remove the need for this since it means we cannot always correctly detect calls
420 // to Proxy.<init>
421 if (IsProxyInit(method)) {
422 return;
423 }
424
425 // If the instrumentation needs to go through the interpreter, just update the
426 // entrypoint to interpreter.
427 if (InterpretOnly(method)) {
428 UpdateEntryPoints(method, GetQuickToInterpreterBridge());
429 return;
430 }
431
432 if (EntryExitStubsInstalled()) {
433 // Install interpreter bridge / GenericJni stub if the existing code doesn't support
434 // entry / exit hooks.
435 if (!CodeSupportsEntryExitHooks(method->GetEntryPointFromQuickCompiledCode(), method)) {
436 UpdateEntryPoints(
437 method, method->IsNative() ? GetQuickGenericJniStub() : GetQuickToInterpreterBridge());
438 }
439 return;
440 }
441
442 // We're being asked to restore the entrypoints after instrumentation.
443 CHECK_EQ(instrumentation_level_, InstrumentationLevel::kInstrumentNothing);
444 // We need to have the resolution stub still if the class is not initialized.
445 if (method->StillNeedsClinitCheck()) {
446 UpdateEntryPoints(method, GetQuickResolutionStub());
447 return;
448 }
449 UpdateEntryPoints(method, GetOptimizedCodeFor(method));
450 }
451
UpdateEntrypointsForDebuggable()452 void Instrumentation::UpdateEntrypointsForDebuggable() {
453 Runtime* runtime = Runtime::Current();
454 // If we are transitioning from non-debuggable to debuggable, we patch
455 // entry points of methods to remove any aot / JITed entry points.
456 InstallStubsClassVisitor visitor(this);
457 runtime->GetClassLinker()->VisitClasses(&visitor);
458 }
459
MethodSupportsExitEvents(ArtMethod * method,const OatQuickMethodHeader * header)460 bool Instrumentation::MethodSupportsExitEvents(ArtMethod* method,
461 const OatQuickMethodHeader* header) {
462 if (header == nullptr) {
463 // Header can be a nullptr for runtime / proxy methods that doesn't support method exit hooks
464 // or for native methods that use generic jni stubs. Generic jni stubs support method exit
465 // hooks.
466 return method->IsNative();
467 }
468
469 if (header->IsNterpMethodHeader()) {
470 // Nterp doesn't support method exit events
471 return false;
472 }
473
474 DCHECK(header->IsOptimized());
475 if (CodeInfo::IsDebuggable(header->GetOptimizedCodeInfoPtr())) {
476 // For optimized code, we only support method entry / exit hooks if they are compiled as
477 // debuggable.
478 return true;
479 }
480
481 return false;
482 }
483
484 // Updates on stack frames to support any changes related to instrumentation.
485 // For JITed frames, DeoptimizeFlag is updated to enable deoptimization of
486 // methods when necessary. Shadow frames are updated if dex pc event
487 // notification has changed. When force_deopt is true then DeoptimizationFlag is
488 // updated to force a deoptimization.
InstrumentationInstallStack(Thread * thread,bool deopt_all_frames)489 void InstrumentationInstallStack(Thread* thread, bool deopt_all_frames)
490 REQUIRES(Locks::mutator_lock_) {
491 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
492 struct InstallStackVisitor final : public StackVisitor {
493 InstallStackVisitor(Thread* thread_in,
494 Context* context,
495 bool deopt_all_frames)
496 : StackVisitor(thread_in, context, kInstrumentationStackWalk),
497 deopt_all_frames_(deopt_all_frames),
498 runtime_methods_need_deopt_check_(false) {}
499
500 bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
501 ArtMethod* m = GetMethod();
502 if (m == nullptr || m->IsRuntimeMethod()) {
503 if (kVerboseInstrumentation) {
504 LOG(INFO) << " Skipping upcall / runtime method. Frame " << GetFrameId();
505 }
506 return true; // Ignore upcalls and runtime methods.
507 }
508
509 bool is_shadow_frame = GetCurrentQuickFrame() == nullptr;
510 if (kVerboseInstrumentation) {
511 LOG(INFO) << "Processing frame: method: " << m->PrettyMethod()
512 << " is_shadow_frame: " << is_shadow_frame;
513 }
514
515 // Handle interpreter frame.
516 if (is_shadow_frame) {
517 // Since we are updating the instrumentation related information we have to recalculate
518 // NeedsDexPcEvents. For example, when a new method or thread is deoptimized / interpreter
519 // stubs are installed the NeedsDexPcEvents could change for the shadow frames on the stack.
520 // If we don't update it here we would miss reporting dex pc events which is incorrect.
521 ShadowFrame* shadow_frame = GetCurrentShadowFrame();
522 DCHECK(shadow_frame != nullptr);
523 shadow_frame->SetNotifyDexPcMoveEvents(
524 Runtime::Current()->GetInstrumentation()->NeedsDexPcEvents(GetMethod(), GetThread()));
525 return true; // Continue.
526 }
527
528 DCHECK(!m->IsRuntimeMethod());
529 const OatQuickMethodHeader* method_header = GetCurrentOatQuickMethodHeader();
530 // If it is a JITed frame then just set the deopt bit if required otherwise continue.
531 // We need kForceDeoptForRedefinition to ensure we don't use any JITed code after a
532 // redefinition. We support redefinition only if the runtime has started off as a
533 // debuggable runtime which makes sure we don't use any AOT or Nterp code.
534 // The CheckCallerForDeopt is an optimization which we only do for non-native JITed code for
535 // now. We can extend it to native methods but that needs reserving an additional stack slot.
536 // We don't do it currently since that wasn't important for debugger performance.
537 if (method_header != nullptr && method_header->HasShouldDeoptimizeFlag()) {
538 if (deopt_all_frames_) {
539 runtime_methods_need_deopt_check_ = true;
540 SetShouldDeoptimizeFlag(DeoptimizeFlagValue::kForceDeoptForRedefinition);
541 }
542 SetShouldDeoptimizeFlag(DeoptimizeFlagValue::kCheckCallerForDeopt);
543 }
544
545 return true; // Continue.
546 }
547 bool deopt_all_frames_;
548 bool runtime_methods_need_deopt_check_;
549 };
550 if (kVerboseInstrumentation) {
551 std::string thread_name;
552 thread->GetThreadName(thread_name);
553 LOG(INFO) << "Installing exit stubs in " << thread_name;
554 }
555
556 std::unique_ptr<Context> context(Context::Create());
557 InstallStackVisitor visitor(thread,
558 context.get(),
559 deopt_all_frames);
560 visitor.WalkStack(true);
561
562 if (visitor.runtime_methods_need_deopt_check_) {
563 thread->SetDeoptCheckRequired(true);
564 }
565
566 thread->VerifyStack();
567 }
568
UpdateNeedsDexPcEventsOnStack(Thread * thread)569 void UpdateNeedsDexPcEventsOnStack(Thread* thread) REQUIRES(Locks::mutator_lock_) {
570 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
571
572 struct InstallStackVisitor final : public StackVisitor {
573 InstallStackVisitor(Thread* thread_in, Context* context)
574 : StackVisitor(thread_in, context, kInstrumentationStackWalk) {}
575
576 bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
577 ShadowFrame* shadow_frame = GetCurrentShadowFrame();
578 if (shadow_frame != nullptr) {
579 shadow_frame->SetNotifyDexPcMoveEvents(
580 Runtime::Current()->GetInstrumentation()->NeedsDexPcEvents(GetMethod(), GetThread()));
581 }
582 return true;
583 }
584 };
585
586 std::unique_ptr<Context> context(Context::Create());
587 InstallStackVisitor visitor(thread, context.get());
588 visitor.WalkStack(true);
589 }
590
ReportMethodEntryForOnStackMethods(InstrumentationListener * listener,Thread * thread)591 void ReportMethodEntryForOnStackMethods(InstrumentationListener* listener, Thread* thread)
592 REQUIRES(Locks::mutator_lock_) {
593 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
594
595 struct InstallStackVisitor final : public StackVisitor {
596 InstallStackVisitor(Thread* thread_in, Context* context)
597 : StackVisitor(thread_in, context, kInstrumentationStackWalk) {}
598
599 bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
600 ArtMethod* m = GetMethod();
601 if (m == nullptr || m->IsRuntimeMethod()) {
602 // Skip upcall / runtime methods
603 return true;
604 }
605
606 if (GetCurrentShadowFrame() != nullptr) {
607 stack_methods_.push_back(m);
608 } else {
609 const OatQuickMethodHeader* method_header = GetCurrentOatQuickMethodHeader();
610 if (Runtime::Current()->GetInstrumentation()->MethodSupportsExitEvents(m, method_header)) {
611 // It is unexpected to see a method enter event but not a method exit event so record
612 // stack methods only for frames that support method exit events. Even if we deoptimize we
613 // make sure that we only call method exit event if the frame supported it in the first
614 // place. For ex: deoptimizing from JITed code with debug support calls a method exit hook
615 // but deoptimizing from nterp doesn't.
616 stack_methods_.push_back(m);
617 }
618 }
619 return true;
620 }
621
622 std::vector<ArtMethod*> stack_methods_;
623 };
624
625 if (kVerboseInstrumentation) {
626 std::string thread_name;
627 thread->GetThreadName(thread_name);
628 LOG(INFO) << "Updating DexPcMoveEvents on shadow frames on stack " << thread_name;
629 }
630
631 std::unique_ptr<Context> context(Context::Create());
632 InstallStackVisitor visitor(thread, context.get());
633 visitor.WalkStack(true);
634
635 // Create method enter events for all methods currently on the thread's stack.
636 for (auto smi = visitor.stack_methods_.rbegin(); smi != visitor.stack_methods_.rend(); smi++) {
637 listener->MethodEntered(thread, *smi);
638 }
639 }
640
InstrumentThreadStack(Thread * thread,bool force_deopt)641 void Instrumentation::InstrumentThreadStack(Thread* thread, bool force_deopt) {
642 run_exit_hooks_ = true;
643 InstrumentationInstallStack(thread, force_deopt);
644 }
645
InstrumentAllThreadStacks(bool force_deopt)646 void Instrumentation::InstrumentAllThreadStacks(bool force_deopt) {
647 run_exit_hooks_ = true;
648 MutexLock mu(Thread::Current(), *Locks::thread_list_lock_);
649 for (Thread* thread : Runtime::Current()->GetThreadList()->GetList()) {
650 InstrumentThreadStack(thread, force_deopt);
651 }
652 }
653
InstrumentationRestoreStack(Thread * thread)654 static void InstrumentationRestoreStack(Thread* thread) REQUIRES(Locks::mutator_lock_) {
655 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
656
657 struct RestoreStackVisitor final : public StackVisitor {
658 RestoreStackVisitor(Thread* thread)
659 : StackVisitor(thread, nullptr, kInstrumentationStackWalk), thread_(thread) {}
660
661 bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
662 if (GetCurrentQuickFrame() == nullptr) {
663 return true;
664 }
665
666 const OatQuickMethodHeader* method_header = GetCurrentOatQuickMethodHeader();
667 if (method_header != nullptr && method_header->HasShouldDeoptimizeFlag()) {
668 // We shouldn't restore stack if any of the frames need a force deopt
669 DCHECK(!ShouldForceDeoptForRedefinition());
670 UnsetShouldDeoptimizeFlag(DeoptimizeFlagValue::kCheckCallerForDeopt);
671 }
672 return true; // Continue.
673 }
674 Thread* const thread_;
675 };
676
677 if (kVerboseInstrumentation) {
678 std::string thread_name;
679 thread->GetThreadName(thread_name);
680 LOG(INFO) << "Restoring stack for " << thread_name;
681 }
682 DCHECK(!thread->IsDeoptCheckRequired());
683 RestoreStackVisitor visitor(thread);
684 visitor.WalkStack(true);
685 }
686
HasFramesNeedingForceDeopt(Thread * thread)687 static bool HasFramesNeedingForceDeopt(Thread* thread) REQUIRES(Locks::mutator_lock_) {
688 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
689
690 struct CheckForForceDeoptStackVisitor final : public StackVisitor {
691 CheckForForceDeoptStackVisitor(Thread* thread)
692 : StackVisitor(thread, nullptr, kInstrumentationStackWalk),
693 thread_(thread),
694 force_deopt_check_needed_(false) {}
695
696 bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
697 if (GetCurrentQuickFrame() == nullptr) {
698 return true;
699 }
700
701 const OatQuickMethodHeader* method_header = GetCurrentOatQuickMethodHeader();
702 if (method_header != nullptr && method_header->HasShouldDeoptimizeFlag()) {
703 if (ShouldForceDeoptForRedefinition()) {
704 force_deopt_check_needed_ = true;
705 return false;
706 }
707 }
708 return true; // Continue.
709 }
710 Thread* const thread_;
711 bool force_deopt_check_needed_;
712 };
713
714 CheckForForceDeoptStackVisitor visitor(thread);
715 visitor.WalkStack(true);
716 // If there is a frame that requires a force deopt we should have set the IsDeoptCheckRequired
717 // bit. We don't check if the bit needs to be reset on every method exit / deoptimization. We
718 // only check when we no longer need instrumentation support. So it is possible that the bit is
719 // set but we don't find any frames that need a force deopt on the stack so reverse implication
720 // doesn't hold.
721 DCHECK_IMPLIES(visitor.force_deopt_check_needed_, thread->IsDeoptCheckRequired());
722 return visitor.force_deopt_check_needed_;
723 }
724
DeoptimizeAllThreadFrames()725 void Instrumentation::DeoptimizeAllThreadFrames() {
726 InstrumentAllThreadStacks(/* force_deopt= */ true);
727 }
728
HasEvent(Instrumentation::InstrumentationEvent expected,uint32_t events)729 static bool HasEvent(Instrumentation::InstrumentationEvent expected, uint32_t events) {
730 return (events & expected) != 0;
731 }
732
PotentiallyAddListenerTo(Instrumentation::InstrumentationEvent event,uint32_t events,std::list<InstrumentationListener * > & list,InstrumentationListener * listener)733 static bool PotentiallyAddListenerTo(Instrumentation::InstrumentationEvent event,
734 uint32_t events,
735 std::list<InstrumentationListener*>& list,
736 InstrumentationListener* listener)
737 REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !Locks::classlinker_classes_lock_) {
738 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
739 if (!HasEvent(event, events)) {
740 return false;
741 }
742 // If there is a free slot in the list, we insert the listener in that slot.
743 // Otherwise we add it to the end of the list.
744 auto it = std::find(list.begin(), list.end(), nullptr);
745 if (it != list.end()) {
746 *it = listener;
747 } else {
748 list.push_back(listener);
749 }
750 return true;
751 }
752
PotentiallyAddListenerTo(Instrumentation::InstrumentationEvent event,uint32_t events,std::list<InstrumentationListener * > & list,InstrumentationListener * listener,bool * has_listener)753 static void PotentiallyAddListenerTo(Instrumentation::InstrumentationEvent event,
754 uint32_t events,
755 std::list<InstrumentationListener*>& list,
756 InstrumentationListener* listener,
757 bool* has_listener)
758 REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !Locks::classlinker_classes_lock_) {
759 if (PotentiallyAddListenerTo(event, events, list, listener)) {
760 *has_listener = true;
761 }
762 }
763
PotentiallyAddListenerTo(Instrumentation::InstrumentationEvent event,uint32_t events,std::list<InstrumentationListener * > & list,InstrumentationListener * listener,uint8_t * has_listener,uint8_t flag)764 static void PotentiallyAddListenerTo(Instrumentation::InstrumentationEvent event,
765 uint32_t events,
766 std::list<InstrumentationListener*>& list,
767 InstrumentationListener* listener,
768 uint8_t* has_listener,
769 uint8_t flag)
770 REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !Locks::classlinker_classes_lock_) {
771 if (PotentiallyAddListenerTo(event, events, list, listener)) {
772 *has_listener = *has_listener | flag;
773 }
774 }
775
AddListener(InstrumentationListener * listener,uint32_t events,bool is_trace_listener)776 void Instrumentation::AddListener(InstrumentationListener* listener,
777 uint32_t events,
778 bool is_trace_listener) {
779 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
780 if (is_trace_listener) {
781 PotentiallyAddListenerTo(kMethodEntered,
782 events,
783 method_entry_fast_trace_listeners_,
784 listener,
785 &have_method_entry_listeners_,
786 kFastTraceListeners);
787 } else {
788 PotentiallyAddListenerTo(kMethodEntered,
789 events,
790 method_entry_slow_listeners_,
791 listener,
792 &have_method_entry_listeners_,
793 kSlowMethodEntryExitListeners);
794 }
795 if (is_trace_listener) {
796 PotentiallyAddListenerTo(kMethodExited,
797 events,
798 method_exit_fast_trace_listeners_,
799 listener,
800 &have_method_exit_listeners_,
801 kFastTraceListeners);
802 } else {
803 PotentiallyAddListenerTo(kMethodExited,
804 events,
805 method_exit_slow_listeners_,
806 listener,
807 &have_method_exit_listeners_,
808 kSlowMethodEntryExitListeners);
809 }
810 PotentiallyAddListenerTo(kMethodUnwind,
811 events,
812 method_unwind_listeners_,
813 listener,
814 &have_method_unwind_listeners_);
815 PotentiallyAddListenerTo(kBranch,
816 events,
817 branch_listeners_,
818 listener,
819 &have_branch_listeners_);
820 PotentiallyAddListenerTo(kDexPcMoved,
821 events,
822 dex_pc_listeners_,
823 listener,
824 &have_dex_pc_listeners_);
825 PotentiallyAddListenerTo(kFieldRead,
826 events,
827 field_read_listeners_,
828 listener,
829 &have_field_read_listeners_);
830 PotentiallyAddListenerTo(kFieldWritten,
831 events,
832 field_write_listeners_,
833 listener,
834 &have_field_write_listeners_);
835 PotentiallyAddListenerTo(kExceptionThrown,
836 events,
837 exception_thrown_listeners_,
838 listener,
839 &have_exception_thrown_listeners_);
840 PotentiallyAddListenerTo(kWatchedFramePop,
841 events,
842 watched_frame_pop_listeners_,
843 listener,
844 &have_watched_frame_pop_listeners_);
845 PotentiallyAddListenerTo(kExceptionHandled,
846 events,
847 exception_handled_listeners_,
848 listener,
849 &have_exception_handled_listeners_);
850 if (HasEvent(kDexPcMoved, events)) {
851 MutexLock mu(Thread::Current(), *Locks::thread_list_lock_);
852 for (Thread* thread : Runtime::Current()->GetThreadList()->GetList()) {
853 UpdateNeedsDexPcEventsOnStack(thread);
854 }
855 }
856 }
857
PotentiallyRemoveListenerFrom(Instrumentation::InstrumentationEvent event,uint32_t events,std::list<InstrumentationListener * > & list,InstrumentationListener * listener)858 static bool PotentiallyRemoveListenerFrom(Instrumentation::InstrumentationEvent event,
859 uint32_t events,
860 std::list<InstrumentationListener*>& list,
861 InstrumentationListener* listener)
862 REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !Locks::classlinker_classes_lock_) {
863 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
864 if (!HasEvent(event, events)) {
865 return false;
866 }
867 auto it = std::find(list.begin(), list.end(), listener);
868 if (it != list.end()) {
869 // Just update the entry, do not remove from the list. Removing entries in the list
870 // is unsafe when mutators are iterating over it.
871 *it = nullptr;
872 }
873
874 // Check if the list contains any non-null listener.
875 for (InstrumentationListener* l : list) {
876 if (l != nullptr) {
877 return false;
878 }
879 }
880
881 return true;
882 }
883
PotentiallyRemoveListenerFrom(Instrumentation::InstrumentationEvent event,uint32_t events,std::list<InstrumentationListener * > & list,InstrumentationListener * listener,bool * has_listener)884 static void PotentiallyRemoveListenerFrom(Instrumentation::InstrumentationEvent event,
885 uint32_t events,
886 std::list<InstrumentationListener*>& list,
887 InstrumentationListener* listener,
888 bool* has_listener)
889 REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !Locks::classlinker_classes_lock_) {
890 if (PotentiallyRemoveListenerFrom(event, events, list, listener)) {
891 *has_listener = false;
892 }
893 }
894
PotentiallyRemoveListenerFrom(Instrumentation::InstrumentationEvent event,uint32_t events,std::list<InstrumentationListener * > & list,InstrumentationListener * listener,uint8_t * has_listener,uint8_t flag)895 static void PotentiallyRemoveListenerFrom(Instrumentation::InstrumentationEvent event,
896 uint32_t events,
897 std::list<InstrumentationListener*>& list,
898 InstrumentationListener* listener,
899 uint8_t* has_listener,
900 uint8_t flag)
901 REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !Locks::classlinker_classes_lock_) {
902 if (PotentiallyRemoveListenerFrom(event, events, list, listener)) {
903 *has_listener = *has_listener & ~flag;
904 }
905 }
906
RemoveListener(InstrumentationListener * listener,uint32_t events,bool is_trace_listener)907 void Instrumentation::RemoveListener(InstrumentationListener* listener,
908 uint32_t events,
909 bool is_trace_listener) {
910 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
911 if (is_trace_listener) {
912 PotentiallyRemoveListenerFrom(kMethodEntered,
913 events,
914 method_entry_fast_trace_listeners_,
915 listener,
916 &have_method_entry_listeners_,
917 kFastTraceListeners);
918 } else {
919 PotentiallyRemoveListenerFrom(kMethodEntered,
920 events,
921 method_entry_slow_listeners_,
922 listener,
923 &have_method_entry_listeners_,
924 kSlowMethodEntryExitListeners);
925 }
926 if (is_trace_listener) {
927 PotentiallyRemoveListenerFrom(kMethodExited,
928 events,
929 method_exit_fast_trace_listeners_,
930 listener,
931 &have_method_exit_listeners_,
932 kFastTraceListeners);
933 } else {
934 PotentiallyRemoveListenerFrom(kMethodExited,
935 events,
936 method_exit_slow_listeners_,
937 listener,
938 &have_method_exit_listeners_,
939 kSlowMethodEntryExitListeners);
940 }
941 PotentiallyRemoveListenerFrom(kMethodUnwind,
942 events,
943 method_unwind_listeners_,
944 listener,
945 &have_method_unwind_listeners_);
946 PotentiallyRemoveListenerFrom(kBranch,
947 events,
948 branch_listeners_,
949 listener,
950 &have_branch_listeners_);
951 PotentiallyRemoveListenerFrom(kDexPcMoved,
952 events,
953 dex_pc_listeners_,
954 listener,
955 &have_dex_pc_listeners_);
956 PotentiallyRemoveListenerFrom(kFieldRead,
957 events,
958 field_read_listeners_,
959 listener,
960 &have_field_read_listeners_);
961 PotentiallyRemoveListenerFrom(kFieldWritten,
962 events,
963 field_write_listeners_,
964 listener,
965 &have_field_write_listeners_);
966 PotentiallyRemoveListenerFrom(kExceptionThrown,
967 events,
968 exception_thrown_listeners_,
969 listener,
970 &have_exception_thrown_listeners_);
971 PotentiallyRemoveListenerFrom(kWatchedFramePop,
972 events,
973 watched_frame_pop_listeners_,
974 listener,
975 &have_watched_frame_pop_listeners_);
976 PotentiallyRemoveListenerFrom(kExceptionHandled,
977 events,
978 exception_handled_listeners_,
979 listener,
980 &have_exception_handled_listeners_);
981 if (HasEvent(kDexPcMoved, events)) {
982 MutexLock mu(Thread::Current(), *Locks::thread_list_lock_);
983 for (Thread* thread : Runtime::Current()->GetThreadList()->GetList()) {
984 UpdateNeedsDexPcEventsOnStack(thread);
985 }
986 }
987 }
988
GetCurrentInstrumentationLevel() const989 Instrumentation::InstrumentationLevel Instrumentation::GetCurrentInstrumentationLevel() const {
990 return instrumentation_level_;
991 }
992
ConfigureStubs(const char * key,InstrumentationLevel desired_level,bool try_switch_to_non_debuggable)993 void Instrumentation::ConfigureStubs(const char* key,
994 InstrumentationLevel desired_level,
995 bool try_switch_to_non_debuggable) {
996 // Store the instrumentation level for this key or remove it.
997 if (desired_level == InstrumentationLevel::kInstrumentNothing) {
998 // The client no longer needs instrumentation.
999 requested_instrumentation_levels_.erase(key);
1000 } else {
1001 // The client needs instrumentation.
1002 requested_instrumentation_levels_.Overwrite(key, desired_level);
1003 }
1004
1005 UpdateStubs(try_switch_to_non_debuggable);
1006 }
1007
UpdateInstrumentationLevel(InstrumentationLevel requested_level)1008 void Instrumentation::UpdateInstrumentationLevel(InstrumentationLevel requested_level) {
1009 instrumentation_level_ = requested_level;
1010 }
1011
EnableEntryExitHooks(const char * key)1012 void Instrumentation::EnableEntryExitHooks(const char* key) {
1013 DCHECK(Runtime::Current()->IsJavaDebuggable());
1014 ConfigureStubs(key,
1015 InstrumentationLevel::kInstrumentWithEntryExitHooks,
1016 /*try_switch_to_non_debuggable=*/false);
1017 }
1018
MaybeRestoreInstrumentationStack()1019 void Instrumentation::MaybeRestoreInstrumentationStack() {
1020 // Restore stack only if there is no method currently deoptimized.
1021 if (!IsDeoptimizedMethodsEmpty()) {
1022 return;
1023 }
1024
1025 Thread* self = Thread::Current();
1026 MutexLock mu(self, *Locks::thread_list_lock_);
1027 bool no_remaining_deopts = true;
1028 // Check that there are no other forced deoptimizations. Do it here so we only need to lock
1029 // thread_list_lock once.
1030 // The compiler gets confused on the thread annotations, so use
1031 // NO_THREAD_SAFETY_ANALYSIS. Note that we hold the mutator lock
1032 // exclusively at this point.
1033 Locks::mutator_lock_->AssertExclusiveHeld(self);
1034 Runtime::Current()->GetThreadList()->ForEach([&](Thread* t) NO_THREAD_SAFETY_ANALYSIS {
1035 bool has_force_deopt_frames = HasFramesNeedingForceDeopt(t);
1036 if (!has_force_deopt_frames) {
1037 // We no longer have any frames that require a force deopt check. If the bit was true then we
1038 // had some frames earlier but they already got deoptimized and are no longer on stack.
1039 t->SetDeoptCheckRequired(false);
1040 }
1041 no_remaining_deopts =
1042 no_remaining_deopts &&
1043 !t->IsForceInterpreter() &&
1044 !t->HasDebuggerShadowFrames() &&
1045 !has_force_deopt_frames;
1046 });
1047 if (no_remaining_deopts) {
1048 Runtime::Current()->GetThreadList()->ForEach(InstrumentationRestoreStack);
1049 run_exit_hooks_ = false;
1050 }
1051 }
1052
UpdateStubs(bool try_switch_to_non_debuggable)1053 void Instrumentation::UpdateStubs(bool try_switch_to_non_debuggable) {
1054 // Look for the highest required instrumentation level.
1055 InstrumentationLevel requested_level = InstrumentationLevel::kInstrumentNothing;
1056 for (const auto& v : requested_instrumentation_levels_) {
1057 requested_level = std::max(requested_level, v.second);
1058 }
1059
1060 if (GetCurrentInstrumentationLevel() == requested_level) {
1061 // We're already set.
1062 return;
1063 }
1064
1065 Thread* const self = Thread::Current();
1066 Runtime* runtime = Runtime::Current();
1067 Locks::mutator_lock_->AssertExclusiveHeld(self);
1068 Locks::thread_list_lock_->AssertNotHeld(self);
1069 // The following needs to happen in the same order.
1070 // 1. Update the instrumentation level
1071 // 2. Switch the runtime to non-debuggable if requested. We switch to non-debuggable only when
1072 // the instrumentation level is set to kInstrumentNothing. So this needs to happen only after
1073 // updating the instrumentation level.
1074 // 3. Update the entry points. We use AOT code only if we aren't debuggable runtime. So update
1075 // entrypoints after switching the instrumentation level.
1076 UpdateInstrumentationLevel(requested_level);
1077 if (try_switch_to_non_debuggable) {
1078 MaybeSwitchRuntimeDebugState(self);
1079 }
1080 InstallStubsClassVisitor visitor(this);
1081 runtime->GetClassLinker()->VisitClasses(&visitor);
1082 if (requested_level > InstrumentationLevel::kInstrumentNothing) {
1083 InstrumentAllThreadStacks(/* force_deopt= */ false);
1084 } else {
1085 MaybeRestoreInstrumentationStack();
1086 }
1087 }
1088
ResetQuickAllocEntryPointsForThread(Thread * thread,void * arg)1089 static void ResetQuickAllocEntryPointsForThread(Thread* thread, [[maybe_unused]] void* arg) {
1090 thread->ResetQuickAllocEntryPointsForThread();
1091 }
1092
SetEntrypointsInstrumented(bool instrumented)1093 void Instrumentation::SetEntrypointsInstrumented(bool instrumented) {
1094 Thread* self = Thread::Current();
1095 Runtime* runtime = Runtime::Current();
1096 Locks::mutator_lock_->AssertNotHeld(self);
1097 Locks::instrument_entrypoints_lock_->AssertHeld(self);
1098 if (runtime->IsStarted()) {
1099 ScopedSuspendAll ssa(__FUNCTION__);
1100 MutexLock mu(self, *Locks::runtime_shutdown_lock_);
1101 SetQuickAllocEntryPointsInstrumented(instrumented);
1102 ResetQuickAllocEntryPoints();
1103 alloc_entrypoints_instrumented_ = instrumented;
1104 } else {
1105 MutexLock mu(self, *Locks::runtime_shutdown_lock_);
1106 SetQuickAllocEntryPointsInstrumented(instrumented);
1107
1108 // Note: ResetQuickAllocEntryPoints only works when the runtime is started. Manually run the
1109 // update for just this thread.
1110 // Note: self may be null. One of those paths is setting instrumentation in the Heap
1111 // constructor for gcstress mode.
1112 if (self != nullptr) {
1113 ResetQuickAllocEntryPointsForThread(self, nullptr);
1114 }
1115
1116 alloc_entrypoints_instrumented_ = instrumented;
1117 }
1118 }
1119
InstrumentQuickAllocEntryPoints()1120 void Instrumentation::InstrumentQuickAllocEntryPoints() {
1121 MutexLock mu(Thread::Current(), *Locks::instrument_entrypoints_lock_);
1122 InstrumentQuickAllocEntryPointsLocked();
1123 }
1124
UninstrumentQuickAllocEntryPoints()1125 void Instrumentation::UninstrumentQuickAllocEntryPoints() {
1126 MutexLock mu(Thread::Current(), *Locks::instrument_entrypoints_lock_);
1127 UninstrumentQuickAllocEntryPointsLocked();
1128 }
1129
InstrumentQuickAllocEntryPointsLocked()1130 void Instrumentation::InstrumentQuickAllocEntryPointsLocked() {
1131 Locks::instrument_entrypoints_lock_->AssertHeld(Thread::Current());
1132 if (quick_alloc_entry_points_instrumentation_counter_ == 0) {
1133 SetEntrypointsInstrumented(true);
1134 }
1135 ++quick_alloc_entry_points_instrumentation_counter_;
1136 }
1137
UninstrumentQuickAllocEntryPointsLocked()1138 void Instrumentation::UninstrumentQuickAllocEntryPointsLocked() {
1139 Locks::instrument_entrypoints_lock_->AssertHeld(Thread::Current());
1140 CHECK_GT(quick_alloc_entry_points_instrumentation_counter_, 0U);
1141 --quick_alloc_entry_points_instrumentation_counter_;
1142 if (quick_alloc_entry_points_instrumentation_counter_ == 0) {
1143 SetEntrypointsInstrumented(false);
1144 }
1145 }
1146
ResetQuickAllocEntryPoints()1147 void Instrumentation::ResetQuickAllocEntryPoints() {
1148 Runtime* runtime = Runtime::Current();
1149 if (runtime->IsStarted()) {
1150 MutexLock mu(Thread::Current(), *Locks::thread_list_lock_);
1151 runtime->GetThreadList()->ForEach(ResetQuickAllocEntryPointsForThread, nullptr);
1152 }
1153 }
1154
EntryPointString(const void * code)1155 std::string Instrumentation::EntryPointString(const void* code) {
1156 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
1157 jit::Jit* jit = Runtime::Current()->GetJit();
1158 if (class_linker->IsQuickToInterpreterBridge(code)) {
1159 return "interpreter";
1160 } else if (class_linker->IsQuickResolutionStub(code)) {
1161 return "resolution";
1162 } else if (jit != nullptr && jit->GetCodeCache()->ContainsPc(code)) {
1163 return "jit";
1164 } else if (code == GetInvokeObsoleteMethodStub()) {
1165 return "obsolete";
1166 } else if (code == interpreter::GetNterpEntryPoint()) {
1167 return "nterp";
1168 } else if (code == interpreter::GetNterpWithClinitEntryPoint()) {
1169 return "nterp with clinit";
1170 } else if (class_linker->IsQuickGenericJniStub(code)) {
1171 return "generic jni";
1172 } else if (Runtime::Current()->GetOatFileManager().ContainsPc(code)) {
1173 return "oat";
1174 } else if (OatQuickMethodHeader::IsStub(reinterpret_cast<const uint8_t*>(code)).value_or(false)) {
1175 return "stub";
1176 }
1177 return "unknown";
1178 }
1179
UpdateMethodsCodeImpl(ArtMethod * method,const void * new_code)1180 void Instrumentation::UpdateMethodsCodeImpl(ArtMethod* method, const void* new_code) {
1181 if (!EntryExitStubsInstalled()) {
1182 // Fast path: no instrumentation.
1183 DCHECK(!IsDeoptimized(method));
1184 UpdateEntryPoints(method, new_code);
1185 return;
1186 }
1187
1188 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
1189 if (class_linker->IsQuickToInterpreterBridge(new_code)) {
1190 // It's always OK to update to the interpreter.
1191 UpdateEntryPoints(method, new_code);
1192 return;
1193 }
1194
1195 if (InterpretOnly(method)) {
1196 DCHECK(class_linker->IsQuickToInterpreterBridge(method->GetEntryPointFromQuickCompiledCode()))
1197 << EntryPointString(method->GetEntryPointFromQuickCompiledCode());
1198 // Don't update, stay deoptimized.
1199 return;
1200 }
1201
1202 if (EntryExitStubsInstalled() && !CodeSupportsEntryExitHooks(new_code, method)) {
1203 DCHECK(CodeSupportsEntryExitHooks(method->GetEntryPointFromQuickCompiledCode(), method))
1204 << EntryPointString(method->GetEntryPointFromQuickCompiledCode()) << " "
1205 << method->PrettyMethod();
1206 // If we need entry / exit stubs but the new_code doesn't support entry / exit hooks just skip.
1207 return;
1208 }
1209
1210 // At this point, we can update as asked.
1211 UpdateEntryPoints(method, new_code);
1212 }
1213
UpdateNativeMethodsCodeToJitCode(ArtMethod * method,const void * new_code)1214 void Instrumentation::UpdateNativeMethodsCodeToJitCode(ArtMethod* method, const void* new_code) {
1215 // We don't do any read barrier on `method`'s declaring class in this code, as the JIT might
1216 // enter here on a soon-to-be deleted ArtMethod. Updating the entrypoint is OK though, as
1217 // the ArtMethod is still in memory.
1218 if (EntryExitStubsInstalled() && !CodeSupportsEntryExitHooks(new_code, method)) {
1219 // If the new code doesn't support entry exit hooks but we need them don't update with the new
1220 // code.
1221 return;
1222 }
1223 UpdateEntryPoints(method, new_code);
1224 }
1225
UpdateMethodsCode(ArtMethod * method,const void * new_code)1226 void Instrumentation::UpdateMethodsCode(ArtMethod* method, const void* new_code) {
1227 DCHECK(method->GetDeclaringClass()->IsResolved());
1228 UpdateMethodsCodeImpl(method, new_code);
1229 }
1230
AddDeoptimizedMethod(ArtMethod * method)1231 bool Instrumentation::AddDeoptimizedMethod(ArtMethod* method) {
1232 if (IsDeoptimizedMethod(method)) {
1233 // Already in the map. Return.
1234 return false;
1235 }
1236 // Not found. Add it.
1237 deoptimized_methods_.insert(method);
1238 return true;
1239 }
1240
IsDeoptimizedMethod(ArtMethod * method)1241 bool Instrumentation::IsDeoptimizedMethod(ArtMethod* method) {
1242 return deoptimized_methods_.find(method) != deoptimized_methods_.end();
1243 }
1244
RemoveDeoptimizedMethod(ArtMethod * method)1245 bool Instrumentation::RemoveDeoptimizedMethod(ArtMethod* method) {
1246 auto it = deoptimized_methods_.find(method);
1247 if (it == deoptimized_methods_.end()) {
1248 return false;
1249 }
1250 deoptimized_methods_.erase(it);
1251 return true;
1252 }
1253
Deoptimize(ArtMethod * method)1254 void Instrumentation::Deoptimize(ArtMethod* method) {
1255 CHECK(!method->IsNative());
1256 CHECK(!method->IsProxyMethod());
1257 CHECK(method->IsInvokable());
1258
1259 {
1260 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
1261 bool has_not_been_deoptimized = AddDeoptimizedMethod(method);
1262 CHECK(has_not_been_deoptimized) << "Method " << ArtMethod::PrettyMethod(method)
1263 << " is already deoptimized";
1264 }
1265
1266 if (method->IsObsolete()) {
1267 // If method was marked as obsolete it should have `GetInvokeObsoleteMethodStub`
1268 // as its quick entry point
1269 CHECK_EQ(method->GetEntryPointFromQuickCompiledCode(), GetInvokeObsoleteMethodStub());
1270 return;
1271 }
1272
1273 if (!InterpreterStubsInstalled()) {
1274 UpdateEntryPoints(method, GetQuickToInterpreterBridge());
1275
1276 // Instrument thread stacks to request a check if the caller needs a deoptimization.
1277 // This isn't a strong deopt. We deopt this method if it is still in the deopt methods list.
1278 // If by the time we hit this frame we no longer need a deopt it is safe to continue.
1279 InstrumentAllThreadStacks(/* force_deopt= */ false);
1280 }
1281 CHECK_EQ(method->GetEntryPointFromQuickCompiledCode(), GetQuickToInterpreterBridge());
1282 }
1283
Undeoptimize(ArtMethod * method)1284 void Instrumentation::Undeoptimize(ArtMethod* method) {
1285 CHECK(!method->IsNative());
1286 CHECK(!method->IsProxyMethod());
1287 CHECK(method->IsInvokable());
1288
1289 {
1290 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
1291 bool found_and_erased = RemoveDeoptimizedMethod(method);
1292 CHECK(found_and_erased) << "Method " << ArtMethod::PrettyMethod(method)
1293 << " is not deoptimized";
1294 }
1295
1296 // If interpreter stubs are still needed nothing to do.
1297 if (InterpreterStubsInstalled()) {
1298 return;
1299 }
1300
1301 if (method->IsObsolete()) {
1302 // Don't update entry points for obsolete methods. The entrypoint should
1303 // have been set to InvokeObsoleteMethoStub.
1304 DCHECK_EQ(method->GetEntryPointFromQuickCompiledCodePtrSize(kRuntimePointerSize),
1305 GetInvokeObsoleteMethodStub());
1306 return;
1307 }
1308
1309 // We are not using interpreter stubs for deoptimization. Restore the code of the method.
1310 // We still retain interpreter bridge if we need it for other reasons.
1311 if (InterpretOnly(method)) {
1312 UpdateEntryPoints(method, GetQuickToInterpreterBridge());
1313 } else if (method->StillNeedsClinitCheck()) {
1314 UpdateEntryPoints(method, GetQuickResolutionStub());
1315 } else {
1316 UpdateEntryPoints(method, GetMaybeInstrumentedCodeForInvoke(method));
1317 }
1318
1319 // If there is no deoptimized method left, we can restore the stack of each thread.
1320 if (!EntryExitStubsInstalled()) {
1321 MaybeRestoreInstrumentationStack();
1322 }
1323 }
1324
IsDeoptimizedMethodsEmpty() const1325 bool Instrumentation::IsDeoptimizedMethodsEmpty() const {
1326 return deoptimized_methods_.empty();
1327 }
1328
IsDeoptimized(ArtMethod * method)1329 bool Instrumentation::IsDeoptimized(ArtMethod* method) {
1330 DCHECK(method != nullptr);
1331 return IsDeoptimizedMethod(method);
1332 }
1333
DisableDeoptimization(const char * key,bool try_switch_to_non_debuggable)1334 void Instrumentation::DisableDeoptimization(const char* key, bool try_switch_to_non_debuggable) {
1335 // Remove any instrumentation support added for deoptimization.
1336 ConfigureStubs(key, InstrumentationLevel::kInstrumentNothing, try_switch_to_non_debuggable);
1337 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
1338 // Undeoptimized selected methods.
1339 while (true) {
1340 ArtMethod* method;
1341 {
1342 if (deoptimized_methods_.empty()) {
1343 break;
1344 }
1345 method = *deoptimized_methods_.begin();
1346 CHECK(method != nullptr);
1347 }
1348 Undeoptimize(method);
1349 }
1350 }
1351
MaybeSwitchRuntimeDebugState(Thread * self)1352 void Instrumentation::MaybeSwitchRuntimeDebugState(Thread* self) {
1353 Runtime* runtime = Runtime::Current();
1354 // Return early if runtime is shutting down.
1355 if (runtime->IsShuttingDown(self)) {
1356 return;
1357 }
1358
1359 // Don't switch the state if we started off as JavaDebuggable or if we still need entry / exit
1360 // hooks for other reasons.
1361 if (EntryExitStubsInstalled() || runtime->IsJavaDebuggableAtInit()) {
1362 return;
1363 }
1364
1365 art::jit::Jit* jit = runtime->GetJit();
1366 if (jit != nullptr) {
1367 jit->GetCodeCache()->InvalidateAllCompiledCode();
1368 jit->GetJitCompiler()->SetDebuggableCompilerOption(false);
1369 }
1370 runtime->SetRuntimeDebugState(art::Runtime::RuntimeDebugState::kNonJavaDebuggable);
1371 }
1372
DeoptimizeEverything(const char * key)1373 void Instrumentation::DeoptimizeEverything(const char* key) {
1374 // We want to switch to non-debuggable only when the debugger / profile tools are detaching.
1375 // This call is used for supporting debug related features (ex: single stepping across all
1376 // threads) while the debugger is still connected.
1377 ConfigureStubs(key,
1378 InstrumentationLevel::kInstrumentWithInterpreter,
1379 /*try_switch_to_non_debuggable=*/false);
1380 }
1381
UndeoptimizeEverything(const char * key)1382 void Instrumentation::UndeoptimizeEverything(const char* key) {
1383 CHECK(InterpreterStubsInstalled());
1384 // We want to switch to non-debuggable only when the debugger / profile tools are detaching.
1385 // This is used when we no longer need to run in interpreter. The debugger is still connected
1386 // so don't switch the runtime. We use "DisableDeoptimization" when detaching the debugger.
1387 ConfigureStubs(key,
1388 InstrumentationLevel::kInstrumentNothing,
1389 /*try_switch_to_non_debuggable=*/false);
1390 }
1391
EnableMethodTracing(const char * key,InstrumentationListener * listener,bool needs_interpreter)1392 void Instrumentation::EnableMethodTracing(const char* key,
1393 InstrumentationListener* listener,
1394 bool needs_interpreter) {
1395 InstrumentationLevel level;
1396 if (needs_interpreter) {
1397 level = InstrumentationLevel::kInstrumentWithInterpreter;
1398 } else {
1399 level = InstrumentationLevel::kInstrumentWithEntryExitHooks;
1400 }
1401 // We are enabling method tracing here and need to stay in debuggable.
1402 ConfigureStubs(key, level, /*try_switch_to_non_debuggable=*/false);
1403
1404 MutexLock mu(Thread::Current(), *Locks::thread_list_lock_);
1405 for (Thread* thread : Runtime::Current()->GetThreadList()->GetList()) {
1406 ReportMethodEntryForOnStackMethods(listener, thread);
1407 }
1408 }
1409
DisableMethodTracing(const char * key)1410 void Instrumentation::DisableMethodTracing(const char* key) {
1411 // We no longer need to be in debuggable runtime since we are stopping method tracing. If no
1412 // other debugger / profiling tools are active switch back to non-debuggable.
1413 ConfigureStubs(key,
1414 InstrumentationLevel::kInstrumentNothing,
1415 /*try_switch_to_non_debuggable=*/true);
1416 }
1417
GetCodeForInvoke(ArtMethod * method)1418 const void* Instrumentation::GetCodeForInvoke(ArtMethod* method) {
1419 // This is called by instrumentation and resolution trampolines
1420 // and that should never be getting proxy methods.
1421 DCHECK(!method->IsProxyMethod()) << method->PrettyMethod();
1422 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
1423 const void* code = method->GetEntryPointFromQuickCompiledCodePtrSize(kRuntimePointerSize);
1424 // If we don't have the instrumentation, the resolution stub, or the
1425 // interpreter, just return the current entrypoint,
1426 // assuming it's the most optimized.
1427 if (!class_linker->IsQuickResolutionStub(code) &&
1428 !class_linker->IsQuickToInterpreterBridge(code)) {
1429 return code;
1430 }
1431
1432 if (InterpretOnly(method)) {
1433 // If we're forced into interpreter just use it.
1434 return GetQuickToInterpreterBridge();
1435 }
1436
1437 return GetOptimizedCodeFor(method);
1438 }
1439
GetMaybeInstrumentedCodeForInvoke(ArtMethod * method)1440 const void* Instrumentation::GetMaybeInstrumentedCodeForInvoke(ArtMethod* method) {
1441 // This is called by resolution trampolines and that should never be getting proxy methods.
1442 DCHECK(!method->IsProxyMethod()) << method->PrettyMethod();
1443 const void* code = GetCodeForInvoke(method);
1444 if (EntryExitStubsInstalled() && !CodeSupportsEntryExitHooks(code, method)) {
1445 return method->IsNative() ? GetQuickGenericJniStub() : GetQuickToInterpreterBridge();
1446 }
1447 return code;
1448 }
1449
MethodEnterEventImpl(Thread * thread,ArtMethod * method) const1450 void Instrumentation::MethodEnterEventImpl(Thread* thread, ArtMethod* method) const {
1451 DCHECK(!method->IsRuntimeMethod());
1452 if (HasMethodEntryListeners()) {
1453 for (InstrumentationListener* listener : method_entry_slow_listeners_) {
1454 if (listener != nullptr) {
1455 listener->MethodEntered(thread, method);
1456 }
1457 }
1458 for (InstrumentationListener* listener : method_entry_fast_trace_listeners_) {
1459 if (listener != nullptr) {
1460 listener->MethodEntered(thread, method);
1461 }
1462 }
1463 }
1464 }
1465
1466 template <>
MethodExitEventImpl(Thread * thread,ArtMethod * method,OptionalFrame frame,MutableHandle<mirror::Object> & return_value) const1467 void Instrumentation::MethodExitEventImpl(Thread* thread,
1468 ArtMethod* method,
1469 OptionalFrame frame,
1470 MutableHandle<mirror::Object>& return_value) const {
1471 if (HasMethodExitListeners()) {
1472 for (InstrumentationListener* listener : method_exit_slow_listeners_) {
1473 if (listener != nullptr) {
1474 listener->MethodExited(thread, method, frame, return_value);
1475 }
1476 }
1477 for (InstrumentationListener* listener : method_exit_fast_trace_listeners_) {
1478 if (listener != nullptr) {
1479 listener->MethodExited(thread, method, frame, return_value);
1480 }
1481 }
1482 }
1483 }
1484
MethodExitEventImpl(Thread * thread,ArtMethod * method,OptionalFrame frame,JValue & return_value) const1485 template<> void Instrumentation::MethodExitEventImpl(Thread* thread,
1486 ArtMethod* method,
1487 OptionalFrame frame,
1488 JValue& return_value) const {
1489 if (HasMethodExitListeners()) {
1490 Thread* self = Thread::Current();
1491 StackHandleScope<1> hs(self);
1492 if (method->GetInterfaceMethodIfProxy(kRuntimePointerSize)->GetReturnTypePrimitive() !=
1493 Primitive::kPrimNot) {
1494 for (InstrumentationListener* listener : method_exit_slow_listeners_) {
1495 if (listener != nullptr) {
1496 listener->MethodExited(thread, method, frame, return_value);
1497 }
1498 }
1499 for (InstrumentationListener* listener : method_exit_fast_trace_listeners_) {
1500 if (listener != nullptr) {
1501 listener->MethodExited(thread, method, frame, return_value);
1502 }
1503 }
1504 } else {
1505 MutableHandle<mirror::Object> ret(hs.NewHandle(return_value.GetL()));
1506 MethodExitEventImpl(thread, method, frame, ret);
1507 return_value.SetL(ret.Get());
1508 }
1509 }
1510 }
1511
MethodUnwindEvent(Thread * thread,ArtMethod * method,uint32_t dex_pc) const1512 void Instrumentation::MethodUnwindEvent(Thread* thread,
1513 ArtMethod* method,
1514 uint32_t dex_pc) const {
1515 if (HasMethodUnwindListeners()) {
1516 for (InstrumentationListener* listener : method_unwind_listeners_) {
1517 if (listener != nullptr) {
1518 listener->MethodUnwind(thread, method, dex_pc);
1519 }
1520 }
1521 }
1522 }
1523
DexPcMovedEventImpl(Thread * thread,ObjPtr<mirror::Object> this_object,ArtMethod * method,uint32_t dex_pc) const1524 void Instrumentation::DexPcMovedEventImpl(Thread* thread,
1525 ObjPtr<mirror::Object> this_object,
1526 ArtMethod* method,
1527 uint32_t dex_pc) const {
1528 Thread* self = Thread::Current();
1529 StackHandleScope<1> hs(self);
1530 Handle<mirror::Object> thiz(hs.NewHandle(this_object));
1531 for (InstrumentationListener* listener : dex_pc_listeners_) {
1532 if (listener != nullptr) {
1533 listener->DexPcMoved(thread, thiz, method, dex_pc);
1534 }
1535 }
1536 }
1537
BranchImpl(Thread * thread,ArtMethod * method,uint32_t dex_pc,int32_t offset) const1538 void Instrumentation::BranchImpl(Thread* thread,
1539 ArtMethod* method,
1540 uint32_t dex_pc,
1541 int32_t offset) const {
1542 for (InstrumentationListener* listener : branch_listeners_) {
1543 if (listener != nullptr) {
1544 listener->Branch(thread, method, dex_pc, offset);
1545 }
1546 }
1547 }
1548
WatchedFramePopImpl(Thread * thread,const ShadowFrame & frame) const1549 void Instrumentation::WatchedFramePopImpl(Thread* thread, const ShadowFrame& frame) const {
1550 for (InstrumentationListener* listener : watched_frame_pop_listeners_) {
1551 if (listener != nullptr) {
1552 listener->WatchedFramePop(thread, frame);
1553 }
1554 }
1555 }
1556
FieldReadEventImpl(Thread * thread,ObjPtr<mirror::Object> this_object,ArtMethod * method,uint32_t dex_pc,ArtField * field) const1557 void Instrumentation::FieldReadEventImpl(Thread* thread,
1558 ObjPtr<mirror::Object> this_object,
1559 ArtMethod* method,
1560 uint32_t dex_pc,
1561 ArtField* field) const {
1562 Thread* self = Thread::Current();
1563 StackHandleScope<1> hs(self);
1564 Handle<mirror::Object> thiz(hs.NewHandle(this_object));
1565 for (InstrumentationListener* listener : field_read_listeners_) {
1566 if (listener != nullptr) {
1567 listener->FieldRead(thread, thiz, method, dex_pc, field);
1568 }
1569 }
1570 }
1571
FieldWriteEventImpl(Thread * thread,ObjPtr<mirror::Object> this_object,ArtMethod * method,uint32_t dex_pc,ArtField * field,const JValue & field_value) const1572 void Instrumentation::FieldWriteEventImpl(Thread* thread,
1573 ObjPtr<mirror::Object> this_object,
1574 ArtMethod* method,
1575 uint32_t dex_pc,
1576 ArtField* field,
1577 const JValue& field_value) const {
1578 Thread* self = Thread::Current();
1579 StackHandleScope<2> hs(self);
1580 Handle<mirror::Object> thiz(hs.NewHandle(this_object));
1581 if (field->IsPrimitiveType()) {
1582 for (InstrumentationListener* listener : field_write_listeners_) {
1583 if (listener != nullptr) {
1584 listener->FieldWritten(thread, thiz, method, dex_pc, field, field_value);
1585 }
1586 }
1587 } else {
1588 Handle<mirror::Object> val(hs.NewHandle(field_value.GetL()));
1589 for (InstrumentationListener* listener : field_write_listeners_) {
1590 if (listener != nullptr) {
1591 listener->FieldWritten(thread, thiz, method, dex_pc, field, val);
1592 }
1593 }
1594 }
1595 }
1596
ExceptionThrownEvent(Thread * thread,ObjPtr<mirror::Throwable> exception_object) const1597 void Instrumentation::ExceptionThrownEvent(Thread* thread,
1598 ObjPtr<mirror::Throwable> exception_object) const {
1599 Thread* self = Thread::Current();
1600 StackHandleScope<1> hs(self);
1601 Handle<mirror::Throwable> h_exception(hs.NewHandle(exception_object));
1602 if (HasExceptionThrownListeners()) {
1603 DCHECK_EQ(thread->GetException(), h_exception.Get());
1604 thread->ClearException();
1605 for (InstrumentationListener* listener : exception_thrown_listeners_) {
1606 if (listener != nullptr) {
1607 listener->ExceptionThrown(thread, h_exception);
1608 }
1609 }
1610 // See b/65049545 for discussion about this behavior.
1611 thread->AssertNoPendingException();
1612 thread->SetException(h_exception.Get());
1613 }
1614 }
1615
ExceptionHandledEvent(Thread * thread,ObjPtr<mirror::Throwable> exception_object) const1616 void Instrumentation::ExceptionHandledEvent(Thread* thread,
1617 ObjPtr<mirror::Throwable> exception_object) const {
1618 Thread* self = Thread::Current();
1619 StackHandleScope<1> hs(self);
1620 Handle<mirror::Throwable> h_exception(hs.NewHandle(exception_object));
1621 if (HasExceptionHandledListeners()) {
1622 // We should have cleared the exception so that callers can detect a new one.
1623 DCHECK(thread->GetException() == nullptr);
1624 for (InstrumentationListener* listener : exception_handled_listeners_) {
1625 if (listener != nullptr) {
1626 listener->ExceptionHandled(thread, h_exception);
1627 }
1628 }
1629 }
1630 }
1631
GetDeoptimizationMethodType(ArtMethod * method)1632 DeoptimizationMethodType Instrumentation::GetDeoptimizationMethodType(ArtMethod* method) {
1633 if (method->IsRuntimeMethod()) {
1634 // Certain methods have strict requirement on whether the dex instruction
1635 // should be re-executed upon deoptimization.
1636 if (method == Runtime::Current()->GetCalleeSaveMethod(
1637 CalleeSaveType::kSaveEverythingForClinit)) {
1638 return DeoptimizationMethodType::kKeepDexPc;
1639 }
1640 if (method == Runtime::Current()->GetCalleeSaveMethod(
1641 CalleeSaveType::kSaveEverythingForSuspendCheck)) {
1642 return DeoptimizationMethodType::kKeepDexPc;
1643 }
1644 }
1645 return DeoptimizationMethodType::kDefault;
1646 }
1647
GetReturnValue(ArtMethod * method,bool * is_ref,uint64_t * gpr_result,uint64_t * fpr_result)1648 JValue Instrumentation::GetReturnValue(ArtMethod* method,
1649 bool* is_ref,
1650 uint64_t* gpr_result,
1651 uint64_t* fpr_result) {
1652 uint32_t length;
1653 const PointerSize pointer_size = Runtime::Current()->GetClassLinker()->GetImagePointerSize();
1654
1655 // Runtime method does not call into MethodExitEvent() so there should not be
1656 // suspension point below.
1657 ScopedAssertNoThreadSuspension ants(__FUNCTION__, method->IsRuntimeMethod());
1658 DCHECK(!method->IsRuntimeMethod());
1659 char return_shorty = method->GetInterfaceMethodIfProxy(pointer_size)->GetShorty(&length)[0];
1660
1661 *is_ref = return_shorty == '[' || return_shorty == 'L';
1662 JValue return_value;
1663 if (return_shorty == 'V') {
1664 return_value.SetJ(0);
1665 } else if (return_shorty == 'F' || return_shorty == 'D') {
1666 return_value.SetJ(*fpr_result);
1667 } else {
1668 return_value.SetJ(*gpr_result);
1669 }
1670 return return_value;
1671 }
1672
PushDeoptContextIfNeeded(Thread * self,DeoptimizationMethodType deopt_type,bool is_ref,const JValue & return_value)1673 bool Instrumentation::PushDeoptContextIfNeeded(Thread* self,
1674 DeoptimizationMethodType deopt_type,
1675 bool is_ref,
1676 const JValue& return_value)
1677 REQUIRES_SHARED(Locks::mutator_lock_) {
1678 if (self->IsExceptionPending()) {
1679 return false;
1680 }
1681
1682 ArtMethod** sp = self->GetManagedStack()->GetTopQuickFrame();
1683 DCHECK(sp != nullptr && (*sp)->IsRuntimeMethod());
1684 if (!ShouldDeoptimizeCaller(self, sp)) {
1685 return false;
1686 }
1687
1688 // TODO(mythria): The current deopt behaviour is we just re-execute the
1689 // alloc instruction so we don't need the return value. For instrumentation
1690 // related deopts, we actually don't need to and can use the result we got
1691 // here. Since this is a debug only feature it is not very important but
1692 // consider reusing the result in future.
1693 self->PushDeoptimizationContext(
1694 return_value, is_ref, nullptr, /* from_code= */ false, deopt_type);
1695 self->SetException(Thread::GetDeoptimizationException());
1696 return true;
1697 }
1698
DeoptimizeIfNeeded(Thread * self,ArtMethod ** sp,DeoptimizationMethodType type,JValue return_value,bool is_reference)1699 void Instrumentation::DeoptimizeIfNeeded(Thread* self,
1700 ArtMethod** sp,
1701 DeoptimizationMethodType type,
1702 JValue return_value,
1703 bool is_reference) {
1704 if (self->IsAsyncExceptionPending() || ShouldDeoptimizeCaller(self, sp)) {
1705 self->PushDeoptimizationContext(return_value,
1706 is_reference,
1707 nullptr,
1708 /* from_code= */ false,
1709 type);
1710 // This is requested from suspend points or when returning from runtime methods so exit
1711 // callbacks wouldn't be run yet. So don't skip method callbacks.
1712 artDeoptimize(self, /* skip_method_exit_callbacks= */ false);
1713 }
1714 }
1715
NeedsSlowInterpreterForMethod(Thread * self,ArtMethod * method)1716 bool Instrumentation::NeedsSlowInterpreterForMethod(Thread* self, ArtMethod* method) {
1717 return (method != nullptr) &&
1718 (InterpreterStubsInstalled() ||
1719 IsDeoptimized(method) ||
1720 self->IsForceInterpreter() ||
1721 // NB Since structurally obsolete compiled methods might have the offsets of
1722 // methods/fields compiled in we need to go back to interpreter whenever we hit
1723 // them.
1724 method->GetDeclaringClass()->IsObsoleteObject() ||
1725 Dbg::IsForcedInterpreterNeededForUpcall(self, method));
1726 }
1727
ShouldDeoptimizeCaller(Thread * self,ArtMethod ** sp)1728 bool Instrumentation::ShouldDeoptimizeCaller(Thread* self, ArtMethod** sp) {
1729 // When exit stubs aren't called we don't need to check for any instrumentation related
1730 // deoptimizations.
1731 if (!RunExitHooks()) {
1732 return false;
1733 }
1734
1735 ArtMethod* runtime_method = *sp;
1736 DCHECK(runtime_method->IsRuntimeMethod());
1737 QuickMethodFrameInfo frame_info = Runtime::Current()->GetRuntimeMethodFrameInfo(runtime_method);
1738 return ShouldDeoptimizeCaller(self, sp, frame_info.FrameSizeInBytes());
1739 }
1740
ShouldDeoptimizeCaller(Thread * self,ArtMethod ** sp,size_t frame_size)1741 bool Instrumentation::ShouldDeoptimizeCaller(Thread* self, ArtMethod** sp, size_t frame_size) {
1742 uintptr_t caller_sp = reinterpret_cast<uintptr_t>(sp) + frame_size;
1743 ArtMethod* caller = *(reinterpret_cast<ArtMethod**>(caller_sp));
1744 uintptr_t caller_pc_addr = reinterpret_cast<uintptr_t>(sp) + (frame_size - sizeof(void*));
1745 uintptr_t caller_pc = *reinterpret_cast<uintptr_t*>(caller_pc_addr);
1746
1747 if (caller == nullptr ||
1748 caller->IsNative() ||
1749 caller->IsRuntimeMethod()) {
1750 // We need to check for a deoptimization here because when a redefinition happens it is
1751 // not safe to use any compiled code because the field offsets might change. For native
1752 // methods, we don't embed any field offsets so no need to check for a deoptimization.
1753 // If the caller is null we don't need to do anything. This can happen when the caller
1754 // is being interpreted by the switch interpreter (when called from
1755 // artQuickToInterpreterBridge) / during shutdown / early startup.
1756 return false;
1757 }
1758
1759 bool needs_deopt = NeedsSlowInterpreterForMethod(self, caller);
1760
1761 // Non java debuggable apps don't support redefinition and hence it isn't required to check if
1762 // frame needs to be deoptimized. Even in debuggable apps, we only need this check when a
1763 // redefinition has actually happened. This is indicated by IsDeoptCheckRequired flag. We also
1764 // want to avoid getting method header when we need a deopt anyway.
1765 if (Runtime::Current()->IsJavaDebuggable() && !needs_deopt && self->IsDeoptCheckRequired()) {
1766 const OatQuickMethodHeader* header = caller->GetOatQuickMethodHeader(caller_pc);
1767 if (header != nullptr && header->HasShouldDeoptimizeFlag()) {
1768 DCHECK(header->IsOptimized());
1769 uint8_t* should_deopt_flag_addr =
1770 reinterpret_cast<uint8_t*>(caller_sp) + header->GetShouldDeoptimizeFlagOffset();
1771 if ((*should_deopt_flag_addr &
1772 static_cast<uint8_t>(DeoptimizeFlagValue::kForceDeoptForRedefinition)) != 0) {
1773 needs_deopt = true;
1774 }
1775 }
1776 }
1777
1778 if (needs_deopt) {
1779 if (!Runtime::Current()->IsAsyncDeoptimizeable(caller, caller_pc)) {
1780 LOG(WARNING) << "Got a deoptimization request on un-deoptimizable method "
1781 << caller->PrettyMethod();
1782 return false;
1783 }
1784 return true;
1785 }
1786
1787 return false;
1788 }
1789
1790 } // namespace instrumentation
1791 } // namespace art
1792