1 /*
2  * Copyright (C) 2014 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "quick_exception_handler.h"
18 
19 #include <ios>
20 #include <queue>
21 #include <sstream>
22 
23 #include "arch/context.h"
24 #include "art_method-inl.h"
25 #include "base/array_ref.h"
26 #include "base/globals.h"
27 #include "base/logging.h"  // For VLOG_IS_ON.
28 #include "base/pointer_size.h"
29 #include "base/systrace.h"
30 #include "dex/dex_file_types.h"
31 #include "dex/dex_instruction.h"
32 #include "dex/dex_instruction-inl.h"
33 #include "entrypoints/entrypoint_utils.h"
34 #include "entrypoints/quick/quick_entrypoints_enum.h"
35 #include "entrypoints/runtime_asm_entrypoints.h"
36 #include "handle_scope-inl.h"
37 #include "interpreter/shadow_frame-inl.h"
38 #include "jit/jit.h"
39 #include "jit/jit_code_cache.h"
40 #include "mirror/class-inl.h"
41 #include "mirror/class_loader.h"
42 #include "mirror/throwable.h"
43 #include "nterp_helpers.h"
44 #include "oat/oat_quick_method_header.h"
45 #include "oat/stack_map.h"
46 #include "stack.h"
47 
48 namespace art HIDDEN {
49 
50 static constexpr bool kDebugExceptionDelivery = false;
51 static constexpr size_t kInvalidFrameDepth = 0xffffffff;
52 
QuickExceptionHandler(Thread * self,bool is_deoptimization)53 QuickExceptionHandler::QuickExceptionHandler(Thread* self, bool is_deoptimization)
54     : self_(self),
55       context_(self->GetLongJumpContext()),
56       is_deoptimization_(is_deoptimization),
57       handler_quick_frame_(nullptr),
58       handler_quick_frame_pc_(0),
59       handler_method_header_(nullptr),
60       handler_quick_arg0_(0),
61       clear_exception_(false),
62       handler_frame_depth_(kInvalidFrameDepth),
63       full_fragment_done_(false) {}
64 
65 // Finds catch handler.
66 class CatchBlockStackVisitor final : public StackVisitor {
67  public:
CatchBlockStackVisitor(Thread * self,Context * context,Handle<mirror::Throwable> * exception,QuickExceptionHandler * exception_handler,uint32_t skip_frames,bool skip_top_unwind_callback)68   CatchBlockStackVisitor(Thread* self,
69                          Context* context,
70                          Handle<mirror::Throwable>* exception,
71                          QuickExceptionHandler* exception_handler,
72                          uint32_t skip_frames,
73                          bool skip_top_unwind_callback)
74       REQUIRES_SHARED(Locks::mutator_lock_)
75       : StackVisitor(self, context, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
76         exception_(exception),
77         exception_handler_(exception_handler),
78         skip_frames_(skip_frames),
79         skip_unwind_callback_(skip_top_unwind_callback) {
80     DCHECK_IMPLIES(skip_unwind_callback_, skip_frames_ == 0);
81   }
82 
VisitFrame()83   bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
84     ArtMethod* method = GetMethod();
85     exception_handler_->SetHandlerFrameDepth(GetFrameDepth());
86     if (method == nullptr) {
87       DCHECK_EQ(skip_frames_, 0u)
88           << "We tried to skip an upcall! We should have returned to the upcall to finish delivery";
89       // This is the upcall, we remember the frame and last pc so that we may long jump to them.
90       exception_handler_->SetHandlerQuickFramePc(GetCurrentQuickFramePc());
91       exception_handler_->SetHandlerQuickFrame(GetCurrentQuickFrame());
92       return false;  // End stack walk.
93     }
94     if (skip_frames_ != 0) {
95       skip_frames_--;
96       return true;
97     }
98     if (method->IsRuntimeMethod()) {
99       // Ignore callee save method.
100       DCHECK(method->IsCalleeSaveMethod());
101       return true;
102     }
103     bool continue_stack_walk = HandleTryItems(method);
104     // Collect methods for which MethodUnwind callback needs to be invoked. MethodUnwind callback
105     // can potentially throw, so we want to call these after we find the catch block.
106     // We stop the stack walk when we find the catch block. If we are ending the stack walk we don't
107     // have to unwind this method so don't record it.
108     if (continue_stack_walk && !skip_unwind_callback_) {
109       // Skip unwind callback is only used when method exit callback has thrown an exception. In
110       // that case, we should have runtime method (artMethodExitHook) on top of stack and the
111       // second should be the method for which method exit was called.
112       DCHECK_IMPLIES(skip_unwind_callback_, GetFrameDepth() == 2);
113       unwound_methods_.push(method);
114     }
115     skip_unwind_callback_ = false;
116     return continue_stack_walk;
117   }
118 
GetUnwoundMethods()119   std::queue<ArtMethod*>& GetUnwoundMethods() {
120     return unwound_methods_;
121   }
122 
123  private:
HandleTryItems(ArtMethod * method)124   bool HandleTryItems(ArtMethod* method)
125       REQUIRES_SHARED(Locks::mutator_lock_) {
126     uint32_t dex_pc = dex::kDexNoIndex;
127     if (!method->IsNative()) {
128       dex_pc = GetDexPc();
129     }
130     if (dex_pc != dex::kDexNoIndex) {
131       bool clear_exception = false;
132       StackHandleScope<1> hs(GetThread());
133       Handle<mirror::Class> to_find(hs.NewHandle((*exception_)->GetClass()));
134       uint32_t found_dex_pc = method->FindCatchBlock(to_find, dex_pc, &clear_exception);
135       exception_handler_->SetClearException(clear_exception);
136       if (found_dex_pc != dex::kDexNoIndex) {
137         exception_handler_->SetHandlerDexPcList(ComputeDexPcList(found_dex_pc));
138         uint32_t stack_map_row = -1;
139         exception_handler_->SetHandlerQuickFramePc(
140             GetCurrentOatQuickMethodHeader()->ToNativeQuickPcForCatchHandlers(
141                 method, exception_handler_->GetHandlerDexPcList(), &stack_map_row));
142         exception_handler_->SetCatchStackMapRow(stack_map_row);
143         exception_handler_->SetHandlerQuickFrame(GetCurrentQuickFrame());
144         exception_handler_->SetHandlerMethodHeader(GetCurrentOatQuickMethodHeader());
145         return false;  // End stack walk.
146       } else if (UNLIKELY(GetThread()->HasDebuggerShadowFrames())) {
147         // We are going to unwind this frame. Did we prepare a shadow frame for debugging?
148         size_t frame_id = GetFrameId();
149         ShadowFrame* frame = GetThread()->FindDebuggerShadowFrame(frame_id);
150         if (frame != nullptr) {
151           // We will not execute this shadow frame so we can safely deallocate it.
152           GetThread()->RemoveDebuggerShadowFrameMapping(frame_id);
153           ShadowFrame::DeleteDeoptimizedFrame(frame);
154         }
155       }
156     }
157     return true;  // Continue stack walk.
158   }
159 
160   // The exception we're looking for the catch block of.
161   Handle<mirror::Throwable>* exception_;
162   // The quick exception handler we're visiting for.
163   QuickExceptionHandler* const exception_handler_;
164   // The number of frames to skip searching for catches in.
165   uint32_t skip_frames_;
166   // The list of methods we would skip to reach the catch block. We record these to call
167   // MethodUnwind callbacks.
168   std::queue<ArtMethod*> unwound_methods_;
169   // Specifies if the unwind callback should be ignored for method at the top of the stack.
170   bool skip_unwind_callback_;
171 
172   DISALLOW_COPY_AND_ASSIGN(CatchBlockStackVisitor);
173 };
174 
175 // Finds the appropriate exception catch after calling all method exit instrumentation functions.
176 // Note that this might change the exception being thrown. If is_method_exit_exception is true
177 // skip the method unwind call for the method on top of the stack as the exception was thrown by
178 // method exit callback.
FindCatch(ObjPtr<mirror::Throwable> exception,bool is_method_exit_exception)179 void QuickExceptionHandler::FindCatch(ObjPtr<mirror::Throwable> exception,
180                                       bool is_method_exit_exception) {
181   DCHECK(!is_deoptimization_);
182   instrumentation::Instrumentation* instr = Runtime::Current()->GetInstrumentation();
183   // The number of total frames we have so far popped.
184   uint32_t already_popped = 0;
185   bool popped_to_top = true;
186   StackHandleScope<1> hs(self_);
187   MutableHandle<mirror::Throwable> exception_ref(hs.NewHandle(exception));
188   bool skip_top_unwind_callback = is_method_exit_exception;
189   // Sending the instrumentation events (done by the InstrumentationStackPopper) can cause new
190   // exceptions to be thrown which will override the current exception. Therefore we need to perform
191   // the search for a catch in a loop until we have successfully popped all the way to a catch or
192   // the top of the stack.
193   do {
194     if (kDebugExceptionDelivery) {
195       ObjPtr<mirror::String> msg = exception_ref->GetDetailMessage();
196       std::string str_msg(msg != nullptr ? msg->ToModifiedUtf8() : "");
197       self_->DumpStack(LOG_STREAM(INFO) << "Delivering exception: " << exception_ref->PrettyTypeOf()
198                                         << ": " << str_msg << "\n");
199     }
200 
201     // Walk the stack to find catch handler.
202     CatchBlockStackVisitor visitor(self_,
203                                    context_,
204                                    &exception_ref,
205                                    this,
206                                    /*skip_frames=*/already_popped,
207                                    skip_top_unwind_callback);
208     visitor.WalkStack(true);
209     skip_top_unwind_callback = false;
210 
211     uint32_t new_pop_count = handler_frame_depth_;
212     DCHECK_GE(new_pop_count, already_popped);
213     already_popped = new_pop_count;
214 
215     if (kDebugExceptionDelivery) {
216       if (*handler_quick_frame_ == nullptr) {
217         LOG(INFO) << "Handler is upcall";
218       }
219       if (GetHandlerMethod() != nullptr) {
220         const DexFile* dex_file = GetHandlerMethod()->GetDexFile();
221         DCHECK(handler_dex_pc_list_.has_value());
222         DCHECK_GE(handler_dex_pc_list_->size(), 1u);
223         int line_number = annotations::GetLineNumFromPC(
224             dex_file, GetHandlerMethod(), handler_dex_pc_list_->front());
225 
226         // We may have an inlined method. If so, we can add some extra logging.
227         std::stringstream ss;
228         ArtMethod* maybe_inlined_method = visitor.GetMethod();
229         if (maybe_inlined_method != GetHandlerMethod()) {
230           const DexFile* inlined_dex_file = maybe_inlined_method->GetDexFile();
231           DCHECK_GE(handler_dex_pc_list_->size(), 2u);
232           int inlined_line_number = annotations::GetLineNumFromPC(
233               inlined_dex_file, maybe_inlined_method, handler_dex_pc_list_->back());
234           ss << " which ends up calling inlined method " << maybe_inlined_method->PrettyMethod()
235              << " (line: " << inlined_line_number << ")";
236         }
237 
238         LOG(INFO) << "Handler: " << GetHandlerMethod()->PrettyMethod() << " (line: "
239                   << line_number << ")" << ss.str();
240       }
241     }
242     // Exception was cleared as part of delivery.
243     DCHECK(!self_->IsExceptionPending());
244     // If the handler is in optimized code, we need to set the catch environment.
245     if (*handler_quick_frame_ != nullptr &&
246         handler_method_header_ != nullptr &&
247         handler_method_header_->IsOptimized()) {
248       SetCatchEnvironmentForOptimizedHandler(&visitor);
249     }
250     popped_to_top = instr->ProcessMethodUnwindCallbacks(self_,
251                                                         visitor.GetUnwoundMethods(),
252                                                         exception_ref);
253   } while (!popped_to_top);
254 
255   if (!clear_exception_) {
256     // Put exception back in root set with clear throw location.
257     self_->SetException(exception_ref.Get());
258   }
259 }
260 
ToVRegKind(DexRegisterLocation::Kind kind)261 static VRegKind ToVRegKind(DexRegisterLocation::Kind kind) {
262   // Slightly hacky since we cannot map DexRegisterLocationKind and VRegKind
263   // one to one. However, StackVisitor::GetVRegFromOptimizedCode only needs to
264   // distinguish between core/FPU registers and low/high bits on 64-bit.
265   switch (kind) {
266     case DexRegisterLocation::Kind::kConstant:
267     case DexRegisterLocation::Kind::kInStack:
268       // VRegKind is ignored.
269       return VRegKind::kUndefined;
270 
271     case DexRegisterLocation::Kind::kInRegister:
272       // Selects core register. For 64-bit registers, selects low 32 bits.
273       return VRegKind::kLongLoVReg;
274 
275     case DexRegisterLocation::Kind::kInRegisterHigh:
276       // Selects core register. For 64-bit registers, selects high 32 bits.
277       return VRegKind::kLongHiVReg;
278 
279     case DexRegisterLocation::Kind::kInFpuRegister:
280       // Selects FPU register. For 64-bit registers, selects low 32 bits.
281       return VRegKind::kDoubleLoVReg;
282 
283     case DexRegisterLocation::Kind::kInFpuRegisterHigh:
284       // Selects FPU register. For 64-bit registers, selects high 32 bits.
285       return VRegKind::kDoubleHiVReg;
286 
287     default:
288       LOG(FATAL) << "Unexpected vreg location " << kind;
289       UNREACHABLE();
290   }
291 }
292 
SetCatchEnvironmentForOptimizedHandler(StackVisitor * stack_visitor)293 void QuickExceptionHandler::SetCatchEnvironmentForOptimizedHandler(StackVisitor* stack_visitor) {
294   DCHECK(!is_deoptimization_);
295   DCHECK(*handler_quick_frame_ != nullptr) << "Method should not be called on upcall exceptions";
296   DCHECK(GetHandlerMethod() != nullptr && handler_method_header_->IsOptimized());
297 
298   if (kDebugExceptionDelivery) {
299     self_->DumpStack(LOG_STREAM(INFO) << "Setting catch phis: ");
300   }
301 
302   CodeInfo code_info(handler_method_header_);
303 
304   // Find stack map of the catch block.
305   ArrayRef<const uint32_t> dex_pc_list = GetHandlerDexPcList();
306   DCHECK_GE(dex_pc_list.size(), 1u);
307   StackMap catch_stack_map = code_info.GetStackMapAt(GetCatchStackMapRow());
308   DCHECK(catch_stack_map.IsValid());
309   DCHECK_EQ(catch_stack_map.Row(), code_info.GetCatchStackMapForDexPc(dex_pc_list).Row());
310   const uint32_t catch_depth = dex_pc_list.size() - 1;
311   const size_t number_of_registers = stack_visitor->GetNumberOfRegisters(&code_info, catch_depth);
312   DexRegisterMap catch_vreg_map =
313       code_info.GetDexRegisterMapOf(catch_stack_map, /* first= */ 0, number_of_registers);
314 
315   if (!catch_vreg_map.HasAnyLiveDexRegisters()) {
316     return;
317   }
318 
319   // Find stack map of the throwing instruction.
320   StackMap throw_stack_map =
321       code_info.GetStackMapForNativePcOffset(stack_visitor->GetNativePcOffset());
322   DCHECK(throw_stack_map.IsValid());
323   const uint32_t throw_depth = stack_visitor->InlineDepth();
324   DCHECK_EQ(throw_depth, catch_depth);
325   DexRegisterMap throw_vreg_map =
326       code_info.GetDexRegisterMapOf(throw_stack_map, /* first= */ 0, number_of_registers);
327   DCHECK_EQ(throw_vreg_map.size(), catch_vreg_map.size());
328 
329   // First vreg that it is part of the catch's environment.
330   const size_t catch_vreg_start = catch_depth == 0
331     ? 0
332     : stack_visitor->GetNumberOfRegisters(&code_info, catch_depth - 1);
333 
334   // We don't need to copy anything in the parent's environment.
335   for (size_t vreg = 0; vreg < catch_vreg_start; ++vreg) {
336     DexRegisterLocation::Kind catch_location_kind = catch_vreg_map[vreg].GetKind();
337     DCHECK(catch_location_kind == DexRegisterLocation::Kind::kNone ||
338            catch_location_kind == DexRegisterLocation::Kind::kConstant ||
339            catch_location_kind == DexRegisterLocation::Kind::kInStack)
340         << "Unexpected catch_location_kind: " << catch_location_kind;
341   }
342 
343   // Copy values between the throw and the catch.
344   for (size_t vreg = catch_vreg_start; vreg < catch_vreg_map.size(); ++vreg) {
345     DexRegisterLocation::Kind catch_location_kind = catch_vreg_map[vreg].GetKind();
346     if (catch_location_kind == DexRegisterLocation::Kind::kNone) {
347       continue;
348     }
349 
350     // Consistency checks.
351     DCHECK_EQ(catch_location_kind, DexRegisterLocation::Kind::kInStack);
352     uint32_t vreg_value;
353     VRegKind vreg_kind = ToVRegKind(throw_vreg_map[vreg].GetKind());
354     DCHECK_NE(vreg_kind, kReferenceVReg)
355         << "The fast path in GetVReg doesn't expect a kReferenceVReg.";
356 
357     // Get vreg value from its current location.
358     bool get_vreg_success = stack_visitor->GetVReg(stack_visitor->GetMethod(),
359                                                    vreg,
360                                                    vreg_kind,
361                                                    &vreg_value,
362                                                    throw_vreg_map[vreg],
363                                                    /* need_full_register_list= */ true);
364     CHECK(get_vreg_success) << "VReg " << vreg << " was optimized out ("
365                             << "method=" << ArtMethod::PrettyMethod(stack_visitor->GetMethod())
366                             << ", dex_pc=" << stack_visitor->GetDexPc() << ", "
367                             << "native_pc_offset=" << stack_visitor->GetNativePcOffset() << ")";
368 
369     // Copy value to the catch phi's stack slot.
370     int32_t slot_offset = catch_vreg_map[vreg].GetStackOffsetInBytes();
371     ArtMethod** frame_top = stack_visitor->GetCurrentQuickFrame();
372     uint8_t* slot_address = reinterpret_cast<uint8_t*>(frame_top) + slot_offset;
373     uint32_t* slot_ptr = reinterpret_cast<uint32_t*>(slot_address);
374     *slot_ptr = vreg_value;
375   }
376 }
377 
378 // Prepares deoptimization.
379 class DeoptimizeStackVisitor final : public StackVisitor {
380  public:
DeoptimizeStackVisitor(Thread * self,Context * context,QuickExceptionHandler * exception_handler,bool single_frame,bool skip_method_exit_callbacks)381   DeoptimizeStackVisitor(Thread* self,
382                          Context* context,
383                          QuickExceptionHandler* exception_handler,
384                          bool single_frame,
385                          bool skip_method_exit_callbacks) REQUIRES_SHARED(Locks::mutator_lock_)
386       : StackVisitor(self, context, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
387         exception_handler_(exception_handler),
388         prev_shadow_frame_(nullptr),
389         bottom_shadow_frame_(nullptr),
390         stacked_shadow_frame_pushed_(false),
391         single_frame_deopt_(single_frame),
392         single_frame_done_(false),
393         single_frame_deopt_method_(nullptr),
394         single_frame_deopt_quick_method_header_(nullptr),
395         callee_method_(nullptr),
396         skip_method_exit_callbacks_(skip_method_exit_callbacks) {}
397 
GetSingleFrameDeoptMethod() const398   ArtMethod* GetSingleFrameDeoptMethod() const {
399     return single_frame_deopt_method_;
400   }
401 
GetSingleFrameDeoptQuickMethodHeader() const402   const OatQuickMethodHeader* GetSingleFrameDeoptQuickMethodHeader() const {
403     return single_frame_deopt_quick_method_header_;
404   }
405 
GetBottomShadowFrame() const406   ShadowFrame* GetBottomShadowFrame() const {
407     return bottom_shadow_frame_;
408   }
409 
GetDexPcs() const410   const std::vector<uint32_t>& GetDexPcs() const {
411     return dex_pcs_;
412   }
413 
FinishStackWalk()414   void FinishStackWalk() REQUIRES_SHARED(Locks::mutator_lock_) {
415     // This is the upcall, or the next full frame in single-frame deopt, or the
416     // code isn't deoptimizeable. We remember the frame and last pc so that we
417     // may long jump to them.
418     exception_handler_->SetHandlerQuickFramePc(GetCurrentQuickFramePc());
419     exception_handler_->SetHandlerQuickFrame(GetCurrentQuickFrame());
420     exception_handler_->SetHandlerMethodHeader(GetCurrentOatQuickMethodHeader());
421     if (!stacked_shadow_frame_pushed_) {
422       // In case there is no deoptimized shadow frame for this upcall, we still
423       // need to push a nullptr to the stack since there is always a matching pop after
424       // the long jump.
425       GetThread()->PushStackedShadowFrame(nullptr,
426                                           StackedShadowFrameType::kDeoptimizationShadowFrame);
427       stacked_shadow_frame_pushed_ = true;
428     }
429     if (GetMethod() == nullptr) {
430       exception_handler_->SetFullFragmentDone(true);
431     } else {
432       CHECK(callee_method_ != nullptr) << GetMethod()->PrettyMethod(false);
433       exception_handler_->SetHandlerQuickArg0(reinterpret_cast<uintptr_t>(callee_method_));
434     }
435   }
436 
VisitFrame()437   bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
438     exception_handler_->SetHandlerFrameDepth(GetFrameDepth());
439     ArtMethod* method = GetMethod();
440     VLOG(deopt) << "Deoptimizing stack: depth: " << GetFrameDepth()
441                 << " at method " << ArtMethod::PrettyMethod(method);
442 
443     if (method == nullptr || single_frame_done_) {
444       FinishStackWalk();
445       return false;  // End stack walk.
446     }
447 
448     // Update if method exit event needs to be reported. We should report exit event only if we
449     // have reported an entry event. So tell interpreter if/ an entry event was reported.
450     bool supports_exit_events = Runtime::Current()->GetInstrumentation()->MethodSupportsExitEvents(
451         method, GetCurrentOatQuickMethodHeader());
452 
453     if (method->IsRuntimeMethod()) {
454       // Ignore callee save method.
455       DCHECK(method->IsCalleeSaveMethod());
456       return true;
457     } else if (method->IsNative()) {
458       // If we return from JNI with a pending exception and want to deoptimize, we need to skip
459       // the native method. The top method is a runtime method, the native method comes next.
460       // We also deoptimize due to method instrumentation reasons from method exit callbacks.
461       // In these cases native method is at the top of stack.
462       CHECK((GetFrameDepth() == 1U) || (GetFrameDepth() == 0U));
463       // We see a native frame when:
464       // 1. returning from JNI with a pending exception
465       // 2. deopting from method exit callbacks (with or without a pending exception).
466       // skip_method_exit_callbacks_ is set in this case
467       // 3. handling async exception on suspend points for fast native methods.
468       // We only need to call method unwind event in the first case.
469       if (supports_exit_events &&
470           !skip_method_exit_callbacks_ &&
471           GetThread()->IsExceptionPending()) {
472         // An exception has occurred in a native method and we are deoptimizing past the native
473         // method. So report method unwind event here.
474         Runtime::Current()->GetInstrumentation()->MethodUnwindEvent(
475             GetThread(), method, dex::kDexNoIndex);
476       }
477       callee_method_ = method;
478       return true;
479     } else if (!single_frame_deopt_ &&
480                !Runtime::Current()->IsAsyncDeoptimizeable(GetOuterMethod(),
481                                                           GetCurrentQuickFramePc())) {
482       // We hit some code that's not deoptimizeable. However, Single-frame deoptimization triggered
483       // from compiled code is always allowed since HDeoptimize always saves the full environment.
484       LOG(WARNING) << "Got request to deoptimize un-deoptimizable method "
485                    << method->PrettyMethod();
486       FinishStackWalk();
487       return false;  // End stack walk.
488     } else {
489       // Check if a shadow frame already exists for debugger's set-local-value purpose.
490       const size_t frame_id = GetFrameId();
491       ShadowFrame* new_frame = GetThread()->FindDebuggerShadowFrame(frame_id);
492       const bool* updated_vregs;
493       CodeItemDataAccessor accessor(method->DexInstructionData());
494       const size_t num_regs = accessor.RegistersSize();
495       if (new_frame == nullptr) {
496         new_frame = ShadowFrame::CreateDeoptimizedFrame(num_regs, method, GetDexPc());
497         updated_vregs = nullptr;
498       } else {
499         updated_vregs = GetThread()->GetUpdatedVRegFlags(frame_id);
500         DCHECK(updated_vregs != nullptr);
501       }
502       if (GetCurrentOatQuickMethodHeader()->IsNterpMethodHeader()) {
503         HandleNterpDeoptimization(method, new_frame, updated_vregs);
504       } else {
505         HandleOptimizingDeoptimization(method, new_frame, updated_vregs);
506       }
507       new_frame->SetSkipMethodExitEvents(!supports_exit_events);
508       // If we are deoptimizing after method exit callback we shouldn't call the method exit
509       // callbacks again for the top frame. We may have to deopt after the callback if the callback
510       // either throws or performs other actions that require a deopt.
511       // We only need to skip for the top frame and the rest of the frames should still run the
512       // callbacks. So only do this check for the top frame.
513       if (GetFrameDepth() == 0U && skip_method_exit_callbacks_) {
514         new_frame->SetSkipMethodExitEvents(true);
515         // This exception was raised by method exit callbacks and we shouldn't report it to
516         // listeners for these exceptions.
517         if (GetThread()->IsExceptionPending()) {
518           new_frame->SetSkipNextExceptionEvent(true);
519         }
520       }
521       if (updated_vregs != nullptr) {
522         // Calling Thread::RemoveDebuggerShadowFrameMapping will also delete the updated_vregs
523         // array so this must come after we processed the frame.
524         GetThread()->RemoveDebuggerShadowFrameMapping(frame_id);
525         DCHECK(GetThread()->FindDebuggerShadowFrame(frame_id) == nullptr);
526       }
527       if (prev_shadow_frame_ != nullptr) {
528         prev_shadow_frame_->SetLink(new_frame);
529       } else {
530         // Will be popped after the long jump after DeoptimizeStack(),
531         // right before interpreter::EnterInterpreterFromDeoptimize().
532         stacked_shadow_frame_pushed_ = true;
533         bottom_shadow_frame_ = new_frame;
534         GetThread()->PushStackedShadowFrame(
535             new_frame, StackedShadowFrameType::kDeoptimizationShadowFrame);
536       }
537       prev_shadow_frame_ = new_frame;
538 
539       if (single_frame_deopt_) {
540         dex_pcs_.push_back(GetDexPc());
541         if (!IsInInlinedFrame()) {
542           // Single-frame deopt ends at the first non-inlined frame and needs to store that method.
543           single_frame_done_ = true;
544           single_frame_deopt_method_ = method;
545           single_frame_deopt_quick_method_header_ = GetCurrentOatQuickMethodHeader();
546         }
547       }
548       callee_method_ = method;
549       return true;
550     }
551   }
552 
553  private:
HandleNterpDeoptimization(ArtMethod * m,ShadowFrame * new_frame,const bool * updated_vregs)554   void HandleNterpDeoptimization(ArtMethod* m,
555                                  ShadowFrame* new_frame,
556                                  const bool* updated_vregs)
557       REQUIRES_SHARED(Locks::mutator_lock_) {
558     ArtMethod** cur_quick_frame = GetCurrentQuickFrame();
559     StackReference<mirror::Object>* vreg_ref_base =
560         reinterpret_cast<StackReference<mirror::Object>*>(NterpGetReferenceArray(cur_quick_frame));
561     int32_t* vreg_int_base =
562         reinterpret_cast<int32_t*>(NterpGetRegistersArray(cur_quick_frame));
563     CodeItemDataAccessor accessor(m->DexInstructionData());
564     const uint16_t num_regs = accessor.RegistersSize();
565     // An nterp frame has two arrays: a dex register array and a reference array
566     // that shadows the dex register array but only containing references
567     // (non-reference dex registers have nulls). See nterp_helpers.cc.
568     for (size_t reg = 0; reg < num_regs; ++reg) {
569       if (updated_vregs != nullptr && updated_vregs[reg]) {
570         // Keep the value set by debugger.
571         continue;
572       }
573       StackReference<mirror::Object>* ref_addr = vreg_ref_base + reg;
574       mirror::Object* ref = ref_addr->AsMirrorPtr();
575       if (ref != nullptr) {
576         new_frame->SetVRegReference(reg, ref);
577       } else {
578         new_frame->SetVReg(reg, vreg_int_base[reg]);
579       }
580     }
581   }
582 
HandleOptimizingDeoptimization(ArtMethod * m,ShadowFrame * new_frame,const bool * updated_vregs)583   void HandleOptimizingDeoptimization(ArtMethod* m,
584                                       ShadowFrame* new_frame,
585                                       const bool* updated_vregs)
586       REQUIRES_SHARED(Locks::mutator_lock_) {
587     const OatQuickMethodHeader* method_header = GetCurrentOatQuickMethodHeader();
588     CodeInfo code_info(method_header);
589     uintptr_t native_pc_offset = method_header->NativeQuickPcOffset(GetCurrentQuickFramePc());
590     StackMap stack_map = code_info.GetStackMapForNativePcOffset(native_pc_offset);
591     CodeItemDataAccessor accessor(m->DexInstructionData());
592     const size_t number_of_vregs = accessor.RegistersSize();
593     uint32_t register_mask = code_info.GetRegisterMaskOf(stack_map);
594     BitMemoryRegion stack_mask = code_info.GetStackMaskOf(stack_map);
595     DexRegisterMap vreg_map = IsInInlinedFrame()
596         ? code_info.GetInlineDexRegisterMapOf(stack_map, GetCurrentInlinedFrame())
597         : code_info.GetDexRegisterMapOf(stack_map);
598 
599     if (kIsDebugBuild || UNLIKELY(Runtime::Current()->IsJavaDebuggable())) {
600       CHECK_EQ(vreg_map.size(), number_of_vregs) << *Thread::Current()
601                                                  << "Deopting: " << m->PrettyMethod()
602                                                  << " inlined? "
603                                                  << std::boolalpha << IsInInlinedFrame();
604     }
605     if (vreg_map.empty()) {
606       return;
607     }
608 
609     for (uint16_t vreg = 0; vreg < number_of_vregs; ++vreg) {
610       if (updated_vregs != nullptr && updated_vregs[vreg]) {
611         // Keep the value set by debugger.
612         continue;
613       }
614 
615       DexRegisterLocation::Kind location = vreg_map[vreg].GetKind();
616       static constexpr uint32_t kDeadValue = 0xEBADDE09;
617       uint32_t value = kDeadValue;
618       bool is_reference = false;
619 
620       switch (location) {
621         case DexRegisterLocation::Kind::kInStack: {
622           const int32_t offset = vreg_map[vreg].GetStackOffsetInBytes();
623           const uint8_t* addr = reinterpret_cast<const uint8_t*>(GetCurrentQuickFrame()) + offset;
624           value = *reinterpret_cast<const uint32_t*>(addr);
625           uint32_t bit = (offset >> 2);
626           if (bit < stack_mask.size_in_bits() && stack_mask.LoadBit(bit)) {
627             is_reference = true;
628           }
629           break;
630         }
631         case DexRegisterLocation::Kind::kInRegister:
632         case DexRegisterLocation::Kind::kInRegisterHigh:
633         case DexRegisterLocation::Kind::kInFpuRegister:
634         case DexRegisterLocation::Kind::kInFpuRegisterHigh: {
635           uint32_t reg = vreg_map[vreg].GetMachineRegister();
636           bool result = GetRegisterIfAccessible(reg, location, &value);
637           CHECK(result);
638           if (location == DexRegisterLocation::Kind::kInRegister) {
639             if (((1u << reg) & register_mask) != 0) {
640               is_reference = true;
641             }
642           }
643           break;
644         }
645         case DexRegisterLocation::Kind::kConstant: {
646           value = vreg_map[vreg].GetConstant();
647           if (value == 0) {
648             // Make it a reference for extra safety.
649             is_reference = true;
650           }
651           break;
652         }
653         case DexRegisterLocation::Kind::kNone: {
654           break;
655         }
656         default: {
657           LOG(FATAL) << "Unexpected location kind " << vreg_map[vreg].GetKind();
658           UNREACHABLE();
659         }
660       }
661       if (is_reference) {
662         new_frame->SetVRegReference(vreg, reinterpret_cast<mirror::Object*>(value));
663       } else {
664         new_frame->SetVReg(vreg, value);
665       }
666     }
667   }
668 
GetVRegKind(uint16_t reg,const std::vector<int32_t> & kinds)669   static VRegKind GetVRegKind(uint16_t reg, const std::vector<int32_t>& kinds) {
670     return static_cast<VRegKind>(kinds[reg * 2]);
671   }
672 
673   QuickExceptionHandler* const exception_handler_;
674   ShadowFrame* prev_shadow_frame_;
675   ShadowFrame* bottom_shadow_frame_;
676   bool stacked_shadow_frame_pushed_;
677   const bool single_frame_deopt_;
678   bool single_frame_done_;
679   ArtMethod* single_frame_deopt_method_;
680   const OatQuickMethodHeader* single_frame_deopt_quick_method_header_;
681   ArtMethod* callee_method_;
682   // This specifies if method exit callbacks should be skipped for the top frame. We may request
683   // a deopt after running method exit callbacks if the callback throws or requests events that
684   // need a deopt.
685   bool skip_method_exit_callbacks_;
686   std::vector<uint32_t> dex_pcs_;
687 
688   DISALLOW_COPY_AND_ASSIGN(DeoptimizeStackVisitor);
689 };
690 
PrepareForLongJumpToInvokeStubOrInterpreterBridge()691 void QuickExceptionHandler::PrepareForLongJumpToInvokeStubOrInterpreterBridge() {
692   if (full_fragment_done_) {
693     // Restore deoptimization exception. When returning from the invoke stub,
694     // ArtMethod::Invoke() will see the special exception to know deoptimization
695     // is needed.
696     self_->SetException(Thread::GetDeoptimizationException());
697   } else {
698     // PC needs to be of the quick-to-interpreter bridge.
699     int32_t offset;
700     offset = GetThreadOffset<kRuntimePointerSize>(kQuickQuickToInterpreterBridge).Int32Value();
701     handler_quick_frame_pc_ = *reinterpret_cast<uintptr_t*>(
702         reinterpret_cast<uint8_t*>(self_) + offset);
703   }
704 }
705 
DeoptimizeStack(bool skip_method_exit_callbacks)706 void QuickExceptionHandler::DeoptimizeStack(bool skip_method_exit_callbacks) {
707   DCHECK(is_deoptimization_);
708   if (kDebugExceptionDelivery) {
709     self_->DumpStack(LOG_STREAM(INFO) << "Deoptimizing: ");
710   }
711 
712   DeoptimizeStackVisitor visitor(self_, context_, this, false, skip_method_exit_callbacks);
713   visitor.WalkStack(true);
714   PrepareForLongJumpToInvokeStubOrInterpreterBridge();
715 }
716 
DeoptimizeSingleFrame(DeoptimizationKind kind)717 void QuickExceptionHandler::DeoptimizeSingleFrame(DeoptimizationKind kind) {
718   DCHECK(is_deoptimization_);
719 
720   // This deopt is requested while still executing the method. We haven't run method exit callbacks
721   // yet, so don't skip them.
722   DeoptimizeStackVisitor visitor(
723       self_, context_, this, true, /* skip_method_exit_callbacks= */ false);
724   visitor.WalkStack(true);
725 
726   // Compiled code made an explicit deoptimization.
727   ArtMethod* deopt_method = visitor.GetSingleFrameDeoptMethod();
728   SCOPED_TRACE << "Deoptimizing "
729                <<  deopt_method->PrettyMethod()
730                << ": " << GetDeoptimizationKindName(kind);
731 
732   DCHECK(deopt_method != nullptr);
733   if (VLOG_IS_ON(deopt) || kDebugExceptionDelivery) {
734     LOG(INFO) << "Single-frame deopting: "
735               << deopt_method->PrettyMethod()
736               << " due to "
737               << GetDeoptimizationKindName(kind);
738     DumpFramesWithType(self_, /* details= */ true);
739   }
740   // When deoptimizing for debug support the optimized code is still valid and
741   // can be reused when debugging support (like breakpoints) are no longer
742   // needed fot this method.
743   Runtime* runtime = Runtime::Current();
744   if (runtime->UseJitCompilation() && (kind != DeoptimizationKind::kDebugging)) {
745     runtime->GetJit()->GetCodeCache()->InvalidateCompiledCodeFor(
746         deopt_method, visitor.GetSingleFrameDeoptQuickMethodHeader());
747   } else {
748     runtime->GetInstrumentation()->InitializeMethodsCode(
749         deopt_method, /*aot_code=*/ nullptr);
750   }
751 
752   // If the deoptimization is due to an inline cache, update it with the type
753   // that made us deoptimize. This avoids pathological cases of never seeing
754   // that type while executing baseline generated code.
755   if (kind == DeoptimizationKind::kJitInlineCache || kind == DeoptimizationKind::kJitSameTarget) {
756     DCHECK(runtime->UseJitCompilation());
757     ShadowFrame* shadow_frame = visitor.GetBottomShadowFrame();
758     uint32_t dex_pc = shadow_frame->GetDexPC();
759     CodeItemDataAccessor accessor(shadow_frame->GetMethod()->DexInstructionData());
760     const uint16_t* const insns = accessor.Insns();
761     const Instruction* inst = Instruction::At(insns + dex_pc);
762     switch (inst->Opcode()) {
763       case Instruction::INVOKE_INTERFACE:
764       case Instruction::INVOKE_VIRTUAL:
765       case Instruction::INVOKE_INTERFACE_RANGE:
766       case Instruction::INVOKE_VIRTUAL_RANGE: {
767         uint32_t encoded_dex_pc = InlineCache::EncodeDexPc(
768             visitor.GetSingleFrameDeoptMethod(),
769             visitor.GetDexPcs(),
770             runtime->GetJit()->GetJitCompiler()->GetInlineMaxCodeUnits());
771         if (encoded_dex_pc != static_cast<uint32_t>(-1)) {
772           // The inline cache comes from the top-level method.
773           runtime->GetJit()->GetCodeCache()->MaybeUpdateInlineCache(
774               visitor.GetSingleFrameDeoptMethod(),
775               encoded_dex_pc,
776               shadow_frame->GetVRegReference(inst->VRegC())->GetClass(),
777               self_);
778         } else {
779           // If the top-level inline cache did not exist, update the one for the
780           // bottom method, we know it's the one that was used for compilation.
781           runtime->GetJit()->GetCodeCache()->MaybeUpdateInlineCache(
782               shadow_frame->GetMethod(),
783               dex_pc,
784               shadow_frame->GetVRegReference(inst->VRegC())->GetClass(),
785               self_);
786         }
787         break;
788       }
789       default: {
790         LOG(FATAL) << "Unexpected instruction for inline cache: " << inst->Name();
791       }
792     }
793   }
794 
795   PrepareForLongJumpToInvokeStubOrInterpreterBridge();
796 }
797 
DeoptimizePartialFragmentFixup()798 void QuickExceptionHandler::DeoptimizePartialFragmentFixup() {
799   CHECK(handler_quick_frame_ != nullptr);
800   // Architecture-dependent work. This is to get the LR right for x86 and x86-64.
801   if (kRuntimeISA == InstructionSet::kX86 || kRuntimeISA == InstructionSet::kX86_64) {
802     // On x86, the return address is on the stack, so just reuse it. Otherwise we would have to
803     // change how longjump works.
804     handler_quick_frame_ = reinterpret_cast<ArtMethod**>(
805         reinterpret_cast<uintptr_t>(handler_quick_frame_) - sizeof(void*));
806   }
807 }
808 
DoLongJump(bool smash_caller_saves)809 void QuickExceptionHandler::DoLongJump(bool smash_caller_saves) {
810   // Place context back on thread so it will be available when we continue.
811   self_->ReleaseLongJumpContext(context_);
812   context_->SetSP(reinterpret_cast<uintptr_t>(handler_quick_frame_));
813   CHECK_NE(handler_quick_frame_pc_, 0u);
814   context_->SetPC(handler_quick_frame_pc_);
815   context_->SetArg0(handler_quick_arg0_);
816   if (smash_caller_saves) {
817     context_->SmashCallerSaves();
818   }
819   if (!is_deoptimization_ &&
820       handler_method_header_ != nullptr &&
821       handler_method_header_->IsNterpMethodHeader()) {
822     // Interpreter procceses one method at a time i.e. not inlining
823     DCHECK(handler_dex_pc_list_.has_value());
824     DCHECK_EQ(handler_dex_pc_list_->size(), 1u) << "We shouldn't have any inlined frames.";
825     context_->SetNterpDexPC(reinterpret_cast<uintptr_t>(
826         GetHandlerMethod()->DexInstructions().Insns() + handler_dex_pc_list_->front()));
827   }
828   // Clear the dex_pc list so as not to leak memory.
829   handler_dex_pc_list_.reset();
830   context_->DoLongJump();
831   UNREACHABLE();
832 }
833 
DumpFramesWithType(Thread * self,bool details)834 void QuickExceptionHandler::DumpFramesWithType(Thread* self, bool details) {
835   StackVisitor::WalkStack(
836       [&](const art::StackVisitor* stack_visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
837         ArtMethod* method = stack_visitor->GetMethod();
838         if (details) {
839           LOG(INFO) << "|> pc   = " << std::hex << stack_visitor->GetCurrentQuickFramePc();
840           LOG(INFO) << "|> addr = " << std::hex
841               << reinterpret_cast<uintptr_t>(stack_visitor->GetCurrentQuickFrame());
842           if (stack_visitor->GetCurrentQuickFrame() != nullptr && method != nullptr) {
843             LOG(INFO) << "|> ret  = " << std::hex << stack_visitor->GetReturnPc();
844           }
845         }
846         if (method == nullptr) {
847           // Transition, do go on, we want to unwind over bridges, all the way.
848           if (details) {
849             LOG(INFO) << "N  <transition>";
850           }
851           return true;
852         } else if (method->IsRuntimeMethod()) {
853           if (details) {
854             LOG(INFO) << "R  " << method->PrettyMethod(true);
855           }
856           return true;
857         } else {
858           bool is_shadow = stack_visitor->GetCurrentShadowFrame() != nullptr;
859           LOG(INFO) << (is_shadow ? "S" : "Q")
860                     << ((!is_shadow && stack_visitor->IsInInlinedFrame()) ? "i" : " ")
861                     << " "
862                     << method->PrettyMethod(true);
863           return true;  // Go on.
864         }
865       },
866       self,
867       /* context= */ nullptr,
868       art::StackVisitor::StackWalkKind::kIncludeInlinedFrames);
869 }
870 
871 }  // namespace art
872