1 /*
2  * Copyright (C) 2014 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "quick_exception_handler.h"
18 
19 #include "arch/context.h"
20 #include "art_method-inl.h"
21 #include "dex_instruction.h"
22 #include "entrypoints/entrypoint_utils.h"
23 #include "entrypoints/runtime_asm_entrypoints.h"
24 #include "handle_scope-inl.h"
25 #include "mirror/class-inl.h"
26 #include "mirror/class_loader.h"
27 #include "mirror/throwable.h"
28 #include "verifier/method_verifier.h"
29 
30 namespace art {
31 
32 static constexpr bool kDebugExceptionDelivery = false;
33 static constexpr size_t kInvalidFrameDepth = 0xffffffff;
34 
QuickExceptionHandler(Thread * self,bool is_deoptimization)35 QuickExceptionHandler::QuickExceptionHandler(Thread* self, bool is_deoptimization)
36   : self_(self), context_(self->GetLongJumpContext()), is_deoptimization_(is_deoptimization),
37     method_tracing_active_(is_deoptimization ||
38                            Runtime::Current()->GetInstrumentation()->AreExitStubsInstalled()),
39     handler_quick_frame_(nullptr), handler_quick_frame_pc_(0), handler_method_(nullptr),
40     handler_dex_pc_(0), clear_exception_(false), handler_frame_depth_(kInvalidFrameDepth) {
41 }
42 
43 // Finds catch handler or prepares for deoptimization.
44 class CatchBlockStackVisitor FINAL : public StackVisitor {
45  public:
CatchBlockStackVisitor(Thread * self,Context * context,Handle<mirror::Throwable> * exception,QuickExceptionHandler * exception_handler)46   CatchBlockStackVisitor(Thread* self, Context* context, Handle<mirror::Throwable>* exception,
47                          QuickExceptionHandler* exception_handler)
48       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
49       : StackVisitor(self, context, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
50         self_(self),
51         exception_(exception),
52         exception_handler_(exception_handler) {
53   }
54 
VisitFrame()55   bool VisitFrame() OVERRIDE SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
56     ArtMethod* method = GetMethod();
57     exception_handler_->SetHandlerFrameDepth(GetFrameDepth());
58     if (method == nullptr) {
59       // This is the upcall, we remember the frame and last pc so that we may long jump to them.
60       exception_handler_->SetHandlerQuickFramePc(GetCurrentQuickFramePc());
61       exception_handler_->SetHandlerQuickFrame(GetCurrentQuickFrame());
62       uint32_t next_dex_pc;
63       ArtMethod* next_art_method;
64       bool has_next = GetNextMethodAndDexPc(&next_art_method, &next_dex_pc);
65       // Report the method that did the down call as the handler.
66       exception_handler_->SetHandlerDexPc(next_dex_pc);
67       exception_handler_->SetHandlerMethod(next_art_method);
68       if (!has_next) {
69         // No next method? Check exception handler is set up for the unhandled exception handler
70         // case.
71         DCHECK_EQ(0U, exception_handler_->GetHandlerDexPc());
72         DCHECK(nullptr == exception_handler_->GetHandlerMethod());
73       }
74       return false;  // End stack walk.
75     }
76     if (method->IsRuntimeMethod()) {
77       // Ignore callee save method.
78       DCHECK(method->IsCalleeSaveMethod());
79       return true;
80     }
81     return HandleTryItems(method);
82   }
83 
84  private:
HandleTryItems(ArtMethod * method)85   bool HandleTryItems(ArtMethod* method)
86       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
87     uint32_t dex_pc = DexFile::kDexNoIndex;
88     if (!method->IsNative()) {
89       dex_pc = GetDexPc();
90     }
91     if (dex_pc != DexFile::kDexNoIndex) {
92       bool clear_exception = false;
93       StackHandleScope<1> hs(self_);
94       Handle<mirror::Class> to_find(hs.NewHandle((*exception_)->GetClass()));
95       uint32_t found_dex_pc = method->FindCatchBlock(to_find, dex_pc, &clear_exception);
96       exception_handler_->SetClearException(clear_exception);
97       if (found_dex_pc != DexFile::kDexNoIndex) {
98         exception_handler_->SetHandlerMethod(method);
99         exception_handler_->SetHandlerDexPc(found_dex_pc);
100         exception_handler_->SetHandlerQuickFramePc(method->ToNativeQuickPc(found_dex_pc));
101         exception_handler_->SetHandlerQuickFrame(GetCurrentQuickFrame());
102         return false;  // End stack walk.
103       }
104     }
105     return true;  // Continue stack walk.
106   }
107 
108   Thread* const self_;
109   // The exception we're looking for the catch block of.
110   Handle<mirror::Throwable>* exception_;
111   // The quick exception handler we're visiting for.
112   QuickExceptionHandler* const exception_handler_;
113 
114   DISALLOW_COPY_AND_ASSIGN(CatchBlockStackVisitor);
115 };
116 
FindCatch(mirror::Throwable * exception)117 void QuickExceptionHandler::FindCatch(mirror::Throwable* exception) {
118   DCHECK(!is_deoptimization_);
119   if (kDebugExceptionDelivery) {
120     mirror::String* msg = exception->GetDetailMessage();
121     std::string str_msg(msg != nullptr ? msg->ToModifiedUtf8() : "");
122     self_->DumpStack(LOG(INFO) << "Delivering exception: " << PrettyTypeOf(exception)
123                      << ": " << str_msg << "\n");
124   }
125   StackHandleScope<1> hs(self_);
126   Handle<mirror::Throwable> exception_ref(hs.NewHandle(exception));
127 
128   // Walk the stack to find catch handler or prepare for deoptimization.
129   CatchBlockStackVisitor visitor(self_, context_, &exception_ref, this);
130   visitor.WalkStack(true);
131 
132   if (kDebugExceptionDelivery) {
133     if (*handler_quick_frame_ == nullptr) {
134       LOG(INFO) << "Handler is upcall";
135     }
136     if (handler_method_ != nullptr) {
137       const DexFile& dex_file = *handler_method_->GetDeclaringClass()->GetDexCache()->GetDexFile();
138       int line_number = dex_file.GetLineNumFromPC(handler_method_, handler_dex_pc_);
139       LOG(INFO) << "Handler: " << PrettyMethod(handler_method_) << " (line: " << line_number << ")";
140     }
141   }
142   if (clear_exception_) {
143     // Exception was cleared as part of delivery.
144     DCHECK(!self_->IsExceptionPending());
145   } else {
146     // Put exception back in root set with clear throw location.
147     self_->SetException(exception_ref.Get());
148   }
149   // The debugger may suspend this thread and walk its stack. Let's do this before popping
150   // instrumentation frames.
151   instrumentation::Instrumentation* instrumentation = Runtime::Current()->GetInstrumentation();
152   if (instrumentation->HasExceptionCaughtListeners()
153       && self_->IsExceptionThrownByCurrentMethod(exception)) {
154     instrumentation->ExceptionCaughtEvent(self_, exception_ref.Get());
155   }
156 }
157 
158 // Prepares deoptimization.
159 class DeoptimizeStackVisitor FINAL : public StackVisitor {
160  public:
DeoptimizeStackVisitor(Thread * self,Context * context,QuickExceptionHandler * exception_handler)161   DeoptimizeStackVisitor(Thread* self, Context* context, QuickExceptionHandler* exception_handler)
162       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
163       : StackVisitor(self, context, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
164         self_(self),
165         exception_handler_(exception_handler),
166         prev_shadow_frame_(nullptr),
167         stacked_shadow_frame_pushed_(false) {
168   }
169 
VisitFrame()170   bool VisitFrame() OVERRIDE SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
171     exception_handler_->SetHandlerFrameDepth(GetFrameDepth());
172     ArtMethod* method = GetMethod();
173     if (method == nullptr) {
174       // This is the upcall, we remember the frame and last pc so that we may long jump to them.
175       exception_handler_->SetHandlerQuickFramePc(GetCurrentQuickFramePc());
176       exception_handler_->SetHandlerQuickFrame(GetCurrentQuickFrame());
177       if (!stacked_shadow_frame_pushed_) {
178         // In case there is no deoptimized shadow frame for this upcall, we still
179         // need to push a nullptr to the stack since there is always a matching pop after
180         // the long jump.
181         self_->PushStackedShadowFrame(nullptr, StackedShadowFrameType::kDeoptimizationShadowFrame);
182         stacked_shadow_frame_pushed_ = true;
183       }
184       return false;  // End stack walk.
185     } else if (method->IsRuntimeMethod()) {
186       // Ignore callee save method.
187       DCHECK(method->IsCalleeSaveMethod());
188       return true;
189     } else {
190       return HandleDeoptimization(method);
191     }
192   }
193 
194  private:
GetVRegKind(uint16_t reg,const std::vector<int32_t> & kinds)195   static VRegKind GetVRegKind(uint16_t reg, const std::vector<int32_t>& kinds) {
196     return static_cast<VRegKind>(kinds.at(reg * 2));
197   }
198 
HandleDeoptimization(ArtMethod * m)199   bool HandleDeoptimization(ArtMethod* m) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
200     const DexFile::CodeItem* code_item = m->GetCodeItem();
201     CHECK(code_item != nullptr);
202     uint16_t num_regs = code_item->registers_size_;
203     uint32_t dex_pc = GetDexPc();
204     StackHandleScope<2> hs(self_);  // Dex cache, class loader and method.
205     mirror::Class* declaring_class = m->GetDeclaringClass();
206     Handle<mirror::DexCache> h_dex_cache(hs.NewHandle(declaring_class->GetDexCache()));
207     Handle<mirror::ClassLoader> h_class_loader(hs.NewHandle(declaring_class->GetClassLoader()));
208     verifier::MethodVerifier verifier(self_, h_dex_cache->GetDexFile(), h_dex_cache, h_class_loader,
209                                       &m->GetClassDef(), code_item, m->GetDexMethodIndex(),
210                                       m, m->GetAccessFlags(), true, true, true, true);
211     bool verifier_success = verifier.Verify();
212     CHECK(verifier_success) << PrettyMethod(m);
213     ShadowFrame* new_frame = ShadowFrame::CreateDeoptimizedFrame(num_regs, nullptr, m, dex_pc);
214     {
215       ScopedStackedShadowFramePusher pusher(self_, new_frame,
216                                             StackedShadowFrameType::kShadowFrameUnderConstruction);
217       const std::vector<int32_t> kinds(verifier.DescribeVRegs(dex_pc));
218 
219       // Markers for dead values, used when the verifier knows a Dex register is undefined,
220       // or when the compiler knows the register has not been initialized, or is not used
221       // anymore in the method.
222       static constexpr uint32_t kDeadValue = 0xEBADDE09;
223       static constexpr uint64_t kLongDeadValue = 0xEBADDE09EBADDE09;
224       for (uint16_t reg = 0; reg < num_regs; ++reg) {
225         VRegKind kind = GetVRegKind(reg, kinds);
226         switch (kind) {
227           case kUndefined:
228             new_frame->SetVReg(reg, kDeadValue);
229             break;
230           case kConstant:
231             new_frame->SetVReg(reg, kinds.at((reg * 2) + 1));
232             break;
233           case kReferenceVReg: {
234             uint32_t value = 0;
235             // Check IsReferenceVReg in case the compiled GC map doesn't agree with the verifier.
236             // We don't want to copy a stale reference into the shadow frame as a reference.
237             // b/20736048
238             if (GetVReg(m, reg, kind, &value) && IsReferenceVReg(m, reg)) {
239               new_frame->SetVRegReference(reg, reinterpret_cast<mirror::Object*>(value));
240             } else {
241               new_frame->SetVReg(reg, kDeadValue);
242             }
243             break;
244           }
245           case kLongLoVReg:
246             if (GetVRegKind(reg + 1, kinds) == kLongHiVReg) {
247               // Treat it as a "long" register pair.
248               uint64_t value = 0;
249               if (GetVRegPair(m, reg, kLongLoVReg, kLongHiVReg, &value)) {
250                 new_frame->SetVRegLong(reg, value);
251               } else {
252                 new_frame->SetVRegLong(reg, kLongDeadValue);
253               }
254             } else {
255               uint32_t value = 0;
256               if (GetVReg(m, reg, kind, &value)) {
257                 new_frame->SetVReg(reg, value);
258               } else {
259                 new_frame->SetVReg(reg, kDeadValue);
260               }
261             }
262             break;
263           case kLongHiVReg:
264             if (GetVRegKind(reg - 1, kinds) == kLongLoVReg) {
265               // Nothing to do: we treated it as a "long" register pair.
266             } else {
267               uint32_t value = 0;
268               if (GetVReg(m, reg, kind, &value)) {
269                 new_frame->SetVReg(reg, value);
270               } else {
271                 new_frame->SetVReg(reg, kDeadValue);
272               }
273             }
274             break;
275           case kDoubleLoVReg:
276             if (GetVRegKind(reg + 1, kinds) == kDoubleHiVReg) {
277               uint64_t value = 0;
278               if (GetVRegPair(m, reg, kDoubleLoVReg, kDoubleHiVReg, &value)) {
279                 // Treat it as a "double" register pair.
280                 new_frame->SetVRegLong(reg, value);
281               } else {
282                 new_frame->SetVRegLong(reg, kLongDeadValue);
283               }
284             } else {
285               uint32_t value = 0;
286               if (GetVReg(m, reg, kind, &value)) {
287                 new_frame->SetVReg(reg, value);
288               } else {
289                 new_frame->SetVReg(reg, kDeadValue);
290               }
291             }
292             break;
293           case kDoubleHiVReg:
294             if (GetVRegKind(reg - 1, kinds) == kDoubleLoVReg) {
295               // Nothing to do: we treated it as a "double" register pair.
296             } else {
297               uint32_t value = 0;
298               if (GetVReg(m, reg, kind, &value)) {
299                 new_frame->SetVReg(reg, value);
300               } else {
301                 new_frame->SetVReg(reg, kDeadValue);
302               }
303             }
304             break;
305           default:
306             uint32_t value = 0;
307             if (GetVReg(m, reg, kind, &value)) {
308               new_frame->SetVReg(reg, value);
309             } else {
310               new_frame->SetVReg(reg, kDeadValue);
311             }
312             break;
313         }
314       }
315     }
316     if (prev_shadow_frame_ != nullptr) {
317       prev_shadow_frame_->SetLink(new_frame);
318     } else {
319       // Will be popped after the long jump after DeoptimizeStack(),
320       // right before interpreter::EnterInterpreterFromDeoptimize().
321       stacked_shadow_frame_pushed_ = true;
322       self_->PushStackedShadowFrame(new_frame, StackedShadowFrameType::kDeoptimizationShadowFrame);
323     }
324     prev_shadow_frame_ = new_frame;
325     return true;
326   }
327 
328   Thread* const self_;
329   QuickExceptionHandler* const exception_handler_;
330   ShadowFrame* prev_shadow_frame_;
331   bool stacked_shadow_frame_pushed_;
332 
333   DISALLOW_COPY_AND_ASSIGN(DeoptimizeStackVisitor);
334 };
335 
DeoptimizeStack()336 void QuickExceptionHandler::DeoptimizeStack() {
337   DCHECK(is_deoptimization_);
338   if (kDebugExceptionDelivery) {
339     self_->DumpStack(LOG(INFO) << "Deoptimizing: ");
340   }
341 
342   DeoptimizeStackVisitor visitor(self_, context_, this);
343   visitor.WalkStack(true);
344 
345   // Restore deoptimization exception
346   self_->SetException(Thread::GetDeoptimizationException());
347 }
348 
349 // Unwinds all instrumentation stack frame prior to catch handler or upcall.
350 class InstrumentationStackVisitor : public StackVisitor {
351  public:
InstrumentationStackVisitor(Thread * self,size_t frame_depth)352   InstrumentationStackVisitor(Thread* self, size_t frame_depth)
353       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
354       : StackVisitor(self, nullptr, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
355         frame_depth_(frame_depth),
356         instrumentation_frames_to_pop_(0) {
357     CHECK_NE(frame_depth_, kInvalidFrameDepth);
358   }
359 
VisitFrame()360   bool VisitFrame() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
361     size_t current_frame_depth = GetFrameDepth();
362     if (current_frame_depth < frame_depth_) {
363       CHECK(GetMethod() != nullptr);
364       if (UNLIKELY(reinterpret_cast<uintptr_t>(GetQuickInstrumentationExitPc()) == GetReturnPc())) {
365         if (!IsInInlinedFrame()) {
366           // We do not count inlined frames, because we do not instrument them. The reason we
367           // include them in the stack walking is the check against `frame_depth_`, which is
368           // given to us by a visitor that visits inlined frames.
369           ++instrumentation_frames_to_pop_;
370         }
371       }
372       return true;
373     } else {
374       // We reached the frame of the catch handler or the upcall.
375       return false;
376     }
377   }
378 
GetInstrumentationFramesToPop() const379   size_t GetInstrumentationFramesToPop() const {
380     return instrumentation_frames_to_pop_;
381   }
382 
383  private:
384   const size_t frame_depth_;
385   size_t instrumentation_frames_to_pop_;
386 
387   DISALLOW_COPY_AND_ASSIGN(InstrumentationStackVisitor);
388 };
389 
UpdateInstrumentationStack()390 void QuickExceptionHandler::UpdateInstrumentationStack() {
391   if (method_tracing_active_) {
392     InstrumentationStackVisitor visitor(self_, handler_frame_depth_);
393     visitor.WalkStack(true);
394 
395     size_t instrumentation_frames_to_pop = visitor.GetInstrumentationFramesToPop();
396     instrumentation::Instrumentation* instrumentation = Runtime::Current()->GetInstrumentation();
397     for (size_t i = 0; i < instrumentation_frames_to_pop; ++i) {
398       instrumentation->PopMethodForUnwind(self_, is_deoptimization_);
399     }
400   }
401 }
402 
DoLongJump()403 void QuickExceptionHandler::DoLongJump() {
404   // Place context back on thread so it will be available when we continue.
405   self_->ReleaseLongJumpContext(context_);
406   context_->SetSP(reinterpret_cast<uintptr_t>(handler_quick_frame_));
407   CHECK_NE(handler_quick_frame_pc_, 0u);
408   context_->SetPC(handler_quick_frame_pc_);
409   context_->SmashCallerSaves();
410   context_->DoLongJump();
411   UNREACHABLE();
412 }
413 
414 }  // namespace art
415