1 /*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "quick_exception_handler.h"
18
19 #include "arch/context.h"
20 #include "art_method-inl.h"
21 #include "base/enums.h"
22 #include "base/logging.h" // For VLOG_IS_ON.
23 #include "base/systrace.h"
24 #include "dex/dex_file_types.h"
25 #include "dex/dex_instruction.h"
26 #include "entrypoints/entrypoint_utils.h"
27 #include "entrypoints/quick/quick_entrypoints_enum.h"
28 #include "entrypoints/runtime_asm_entrypoints.h"
29 #include "handle_scope-inl.h"
30 #include "interpreter/shadow_frame-inl.h"
31 #include "jit/jit.h"
32 #include "jit/jit_code_cache.h"
33 #include "mirror/class-inl.h"
34 #include "mirror/class_loader.h"
35 #include "mirror/throwable.h"
36 #include "nterp_helpers.h"
37 #include "oat_quick_method_header.h"
38 #include "stack.h"
39 #include "stack_map.h"
40
41 namespace art {
42
43 static constexpr bool kDebugExceptionDelivery = false;
44 static constexpr size_t kInvalidFrameDepth = 0xffffffff;
45
QuickExceptionHandler(Thread * self,bool is_deoptimization)46 QuickExceptionHandler::QuickExceptionHandler(Thread* self, bool is_deoptimization)
47 : self_(self),
48 context_(self->GetLongJumpContext()),
49 is_deoptimization_(is_deoptimization),
50 method_tracing_active_(is_deoptimization ||
51 Runtime::Current()->GetInstrumentation()->AreExitStubsInstalled()),
52 handler_quick_frame_(nullptr),
53 handler_quick_frame_pc_(0),
54 handler_method_header_(nullptr),
55 handler_quick_arg0_(0),
56 handler_dex_pc_(0),
57 clear_exception_(false),
58 handler_frame_depth_(kInvalidFrameDepth),
59 full_fragment_done_(false) {}
60
61 // Finds catch handler.
62 class CatchBlockStackVisitor final : public StackVisitor {
63 public:
CatchBlockStackVisitor(Thread * self,Context * context,Handle<mirror::Throwable> * exception,QuickExceptionHandler * exception_handler,uint32_t skip_frames)64 CatchBlockStackVisitor(Thread* self,
65 Context* context,
66 Handle<mirror::Throwable>* exception,
67 QuickExceptionHandler* exception_handler,
68 uint32_t skip_frames)
69 REQUIRES_SHARED(Locks::mutator_lock_)
70 : StackVisitor(self, context, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
71 exception_(exception),
72 exception_handler_(exception_handler),
73 skip_frames_(skip_frames) {
74 }
75
VisitFrame()76 bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
77 ArtMethod* method = GetMethod();
78 exception_handler_->SetHandlerFrameDepth(GetFrameDepth());
79 if (method == nullptr) {
80 DCHECK_EQ(skip_frames_, 0u)
81 << "We tried to skip an upcall! We should have returned to the upcall to finish delivery";
82 // This is the upcall, we remember the frame and last pc so that we may long jump to them.
83 exception_handler_->SetHandlerQuickFramePc(GetCurrentQuickFramePc());
84 exception_handler_->SetHandlerQuickFrame(GetCurrentQuickFrame());
85 return false; // End stack walk.
86 }
87 if (skip_frames_ != 0) {
88 skip_frames_--;
89 return true;
90 }
91 if (method->IsRuntimeMethod()) {
92 // Ignore callee save method.
93 DCHECK(method->IsCalleeSaveMethod());
94 return true;
95 }
96 return HandleTryItems(method);
97 }
98
99 private:
HandleTryItems(ArtMethod * method)100 bool HandleTryItems(ArtMethod* method)
101 REQUIRES_SHARED(Locks::mutator_lock_) {
102 uint32_t dex_pc = dex::kDexNoIndex;
103 if (!method->IsNative()) {
104 dex_pc = GetDexPc();
105 }
106 if (dex_pc != dex::kDexNoIndex) {
107 bool clear_exception = false;
108 StackHandleScope<1> hs(GetThread());
109 Handle<mirror::Class> to_find(hs.NewHandle((*exception_)->GetClass()));
110 uint32_t found_dex_pc = method->FindCatchBlock(to_find, dex_pc, &clear_exception);
111 exception_handler_->SetClearException(clear_exception);
112 if (found_dex_pc != dex::kDexNoIndex) {
113 exception_handler_->SetHandlerDexPc(found_dex_pc);
114 exception_handler_->SetHandlerQuickFramePc(
115 GetCurrentOatQuickMethodHeader()->ToNativeQuickPc(
116 method, found_dex_pc, /* is_for_catch_handler= */ true));
117 exception_handler_->SetHandlerQuickFrame(GetCurrentQuickFrame());
118 exception_handler_->SetHandlerMethodHeader(GetCurrentOatQuickMethodHeader());
119 return false; // End stack walk.
120 } else if (UNLIKELY(GetThread()->HasDebuggerShadowFrames())) {
121 // We are going to unwind this frame. Did we prepare a shadow frame for debugging?
122 size_t frame_id = GetFrameId();
123 ShadowFrame* frame = GetThread()->FindDebuggerShadowFrame(frame_id);
124 if (frame != nullptr) {
125 // We will not execute this shadow frame so we can safely deallocate it.
126 GetThread()->RemoveDebuggerShadowFrameMapping(frame_id);
127 ShadowFrame::DeleteDeoptimizedFrame(frame);
128 }
129 }
130 }
131 return true; // Continue stack walk.
132 }
133
134 // The exception we're looking for the catch block of.
135 Handle<mirror::Throwable>* exception_;
136 // The quick exception handler we're visiting for.
137 QuickExceptionHandler* const exception_handler_;
138 // The number of frames to skip searching for catches in.
139 uint32_t skip_frames_;
140
141 DISALLOW_COPY_AND_ASSIGN(CatchBlockStackVisitor);
142 };
143
144 // Finds the appropriate exception catch after calling all method exit instrumentation functions.
145 // Note that this might change the exception being thrown.
FindCatch(ObjPtr<mirror::Throwable> exception)146 void QuickExceptionHandler::FindCatch(ObjPtr<mirror::Throwable> exception) {
147 DCHECK(!is_deoptimization_);
148 instrumentation::InstrumentationStackPopper popper(self_);
149 // The number of total frames we have so far popped.
150 uint32_t already_popped = 0;
151 bool popped_to_top = true;
152 StackHandleScope<1> hs(self_);
153 MutableHandle<mirror::Throwable> exception_ref(hs.NewHandle(exception));
154 // Sending the instrumentation events (done by the InstrumentationStackPopper) can cause new
155 // exceptions to be thrown which will override the current exception. Therefore we need to perform
156 // the search for a catch in a loop until we have successfully popped all the way to a catch or
157 // the top of the stack.
158 do {
159 if (kDebugExceptionDelivery) {
160 ObjPtr<mirror::String> msg = exception_ref->GetDetailMessage();
161 std::string str_msg(msg != nullptr ? msg->ToModifiedUtf8() : "");
162 self_->DumpStack(LOG_STREAM(INFO) << "Delivering exception: " << exception_ref->PrettyTypeOf()
163 << ": " << str_msg << "\n");
164 }
165
166 // Walk the stack to find catch handler.
167 CatchBlockStackVisitor visitor(self_, context_,
168 &exception_ref,
169 this,
170 /*skip_frames=*/already_popped);
171 visitor.WalkStack(true);
172 uint32_t new_pop_count = handler_frame_depth_;
173 DCHECK_GE(new_pop_count, already_popped);
174 already_popped = new_pop_count;
175
176 if (kDebugExceptionDelivery) {
177 if (*handler_quick_frame_ == nullptr) {
178 LOG(INFO) << "Handler is upcall";
179 }
180 if (GetHandlerMethod() != nullptr) {
181 const DexFile* dex_file = GetHandlerMethod()->GetDexFile();
182 int line_number =
183 annotations::GetLineNumFromPC(dex_file, GetHandlerMethod(), handler_dex_pc_);
184 LOG(INFO) << "Handler: " << GetHandlerMethod()->PrettyMethod() << " (line: "
185 << line_number << ")";
186 }
187 }
188 // Exception was cleared as part of delivery.
189 DCHECK(!self_->IsExceptionPending());
190 // If the handler is in optimized code, we need to set the catch environment.
191 if (*handler_quick_frame_ != nullptr &&
192 handler_method_header_ != nullptr &&
193 handler_method_header_->IsOptimized()) {
194 SetCatchEnvironmentForOptimizedHandler(&visitor);
195 }
196 popped_to_top =
197 popper.PopFramesTo(reinterpret_cast<uintptr_t>(handler_quick_frame_), exception_ref);
198 } while (!popped_to_top);
199 if (!clear_exception_) {
200 // Put exception back in root set with clear throw location.
201 self_->SetException(exception_ref.Get());
202 }
203 }
204
ToVRegKind(DexRegisterLocation::Kind kind)205 static VRegKind ToVRegKind(DexRegisterLocation::Kind kind) {
206 // Slightly hacky since we cannot map DexRegisterLocationKind and VRegKind
207 // one to one. However, StackVisitor::GetVRegFromOptimizedCode only needs to
208 // distinguish between core/FPU registers and low/high bits on 64-bit.
209 switch (kind) {
210 case DexRegisterLocation::Kind::kConstant:
211 case DexRegisterLocation::Kind::kInStack:
212 // VRegKind is ignored.
213 return VRegKind::kUndefined;
214
215 case DexRegisterLocation::Kind::kInRegister:
216 // Selects core register. For 64-bit registers, selects low 32 bits.
217 return VRegKind::kLongLoVReg;
218
219 case DexRegisterLocation::Kind::kInRegisterHigh:
220 // Selects core register. For 64-bit registers, selects high 32 bits.
221 return VRegKind::kLongHiVReg;
222
223 case DexRegisterLocation::Kind::kInFpuRegister:
224 // Selects FPU register. For 64-bit registers, selects low 32 bits.
225 return VRegKind::kDoubleLoVReg;
226
227 case DexRegisterLocation::Kind::kInFpuRegisterHigh:
228 // Selects FPU register. For 64-bit registers, selects high 32 bits.
229 return VRegKind::kDoubleHiVReg;
230
231 default:
232 LOG(FATAL) << "Unexpected vreg location " << kind;
233 UNREACHABLE();
234 }
235 }
236
SetCatchEnvironmentForOptimizedHandler(StackVisitor * stack_visitor)237 void QuickExceptionHandler::SetCatchEnvironmentForOptimizedHandler(StackVisitor* stack_visitor) {
238 DCHECK(!is_deoptimization_);
239 DCHECK(*handler_quick_frame_ != nullptr) << "Method should not be called on upcall exceptions";
240 DCHECK(GetHandlerMethod() != nullptr && handler_method_header_->IsOptimized());
241
242 if (kDebugExceptionDelivery) {
243 self_->DumpStack(LOG_STREAM(INFO) << "Setting catch phis: ");
244 }
245
246 CodeItemDataAccessor accessor(GetHandlerMethod()->DexInstructionData());
247 const size_t number_of_vregs = accessor.RegistersSize();
248 CodeInfo code_info(handler_method_header_);
249
250 // Find stack map of the catch block.
251 StackMap catch_stack_map = code_info.GetCatchStackMapForDexPc(GetHandlerDexPc());
252 DCHECK(catch_stack_map.IsValid());
253 DexRegisterMap catch_vreg_map = code_info.GetDexRegisterMapOf(catch_stack_map);
254 DCHECK_EQ(catch_vreg_map.size(), number_of_vregs);
255
256 if (!catch_vreg_map.HasAnyLiveDexRegisters()) {
257 return;
258 }
259
260 // Find stack map of the throwing instruction.
261 StackMap throw_stack_map =
262 code_info.GetStackMapForNativePcOffset(stack_visitor->GetNativePcOffset());
263 DCHECK(throw_stack_map.IsValid());
264 DexRegisterMap throw_vreg_map = code_info.GetDexRegisterMapOf(throw_stack_map);
265 DCHECK_EQ(throw_vreg_map.size(), number_of_vregs);
266
267 // Copy values between them.
268 for (uint16_t vreg = 0; vreg < number_of_vregs; ++vreg) {
269 DexRegisterLocation::Kind catch_location = catch_vreg_map[vreg].GetKind();
270 if (catch_location == DexRegisterLocation::Kind::kNone) {
271 continue;
272 }
273 DCHECK(catch_location == DexRegisterLocation::Kind::kInStack);
274
275 // Get vreg value from its current location.
276 uint32_t vreg_value;
277 VRegKind vreg_kind = ToVRegKind(throw_vreg_map[vreg].GetKind());
278 bool get_vreg_success =
279 stack_visitor->GetVReg(stack_visitor->GetMethod(),
280 vreg,
281 vreg_kind,
282 &vreg_value,
283 throw_vreg_map[vreg]);
284 CHECK(get_vreg_success) << "VReg " << vreg << " was optimized out ("
285 << "method=" << ArtMethod::PrettyMethod(stack_visitor->GetMethod())
286 << ", dex_pc=" << stack_visitor->GetDexPc() << ", "
287 << "native_pc_offset=" << stack_visitor->GetNativePcOffset() << ")";
288
289 // Copy value to the catch phi's stack slot.
290 int32_t slot_offset = catch_vreg_map[vreg].GetStackOffsetInBytes();
291 ArtMethod** frame_top = stack_visitor->GetCurrentQuickFrame();
292 uint8_t* slot_address = reinterpret_cast<uint8_t*>(frame_top) + slot_offset;
293 uint32_t* slot_ptr = reinterpret_cast<uint32_t*>(slot_address);
294 *slot_ptr = vreg_value;
295 }
296 }
297
298 // Prepares deoptimization.
299 class DeoptimizeStackVisitor final : public StackVisitor {
300 public:
DeoptimizeStackVisitor(Thread * self,Context * context,QuickExceptionHandler * exception_handler,bool single_frame)301 DeoptimizeStackVisitor(Thread* self,
302 Context* context,
303 QuickExceptionHandler* exception_handler,
304 bool single_frame)
305 REQUIRES_SHARED(Locks::mutator_lock_)
306 : StackVisitor(self, context, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
307 exception_handler_(exception_handler),
308 prev_shadow_frame_(nullptr),
309 stacked_shadow_frame_pushed_(false),
310 single_frame_deopt_(single_frame),
311 single_frame_done_(false),
312 single_frame_deopt_method_(nullptr),
313 single_frame_deopt_quick_method_header_(nullptr),
314 callee_method_(nullptr) {
315 }
316
GetSingleFrameDeoptMethod() const317 ArtMethod* GetSingleFrameDeoptMethod() const {
318 return single_frame_deopt_method_;
319 }
320
GetSingleFrameDeoptQuickMethodHeader() const321 const OatQuickMethodHeader* GetSingleFrameDeoptQuickMethodHeader() const {
322 return single_frame_deopt_quick_method_header_;
323 }
324
FinishStackWalk()325 void FinishStackWalk() REQUIRES_SHARED(Locks::mutator_lock_) {
326 // This is the upcall, or the next full frame in single-frame deopt, or the
327 // code isn't deoptimizeable. We remember the frame and last pc so that we
328 // may long jump to them.
329 exception_handler_->SetHandlerQuickFramePc(GetCurrentQuickFramePc());
330 exception_handler_->SetHandlerQuickFrame(GetCurrentQuickFrame());
331 exception_handler_->SetHandlerMethodHeader(GetCurrentOatQuickMethodHeader());
332 if (!stacked_shadow_frame_pushed_) {
333 // In case there is no deoptimized shadow frame for this upcall, we still
334 // need to push a nullptr to the stack since there is always a matching pop after
335 // the long jump.
336 GetThread()->PushStackedShadowFrame(nullptr,
337 StackedShadowFrameType::kDeoptimizationShadowFrame);
338 stacked_shadow_frame_pushed_ = true;
339 }
340 if (GetMethod() == nullptr) {
341 exception_handler_->SetFullFragmentDone(true);
342 } else {
343 CHECK(callee_method_ != nullptr) << GetMethod()->PrettyMethod(false);
344 exception_handler_->SetHandlerQuickArg0(reinterpret_cast<uintptr_t>(callee_method_));
345 }
346 }
347
VisitFrame()348 bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
349 exception_handler_->SetHandlerFrameDepth(GetFrameDepth());
350 ArtMethod* method = GetMethod();
351 VLOG(deopt) << "Deoptimizing stack: depth: " << GetFrameDepth()
352 << " at method " << ArtMethod::PrettyMethod(method);
353 if (method == nullptr || single_frame_done_) {
354 FinishStackWalk();
355 return false; // End stack walk.
356 } else if (method->IsRuntimeMethod()) {
357 // Ignore callee save method.
358 DCHECK(method->IsCalleeSaveMethod());
359 return true;
360 } else if (method->IsNative()) {
361 // If we return from JNI with a pending exception and want to deoptimize, we need to skip
362 // the native method.
363 // The top method is a runtime method, the native method comes next.
364 CHECK_EQ(GetFrameDepth(), 1U);
365 callee_method_ = method;
366 return true;
367 } else if (!single_frame_deopt_ &&
368 !Runtime::Current()->IsAsyncDeoptimizeable(GetCurrentQuickFramePc())) {
369 // We hit some code that's not deoptimizeable. However, Single-frame deoptimization triggered
370 // from compiled code is always allowed since HDeoptimize always saves the full environment.
371 LOG(WARNING) << "Got request to deoptimize un-deoptimizable method "
372 << method->PrettyMethod();
373 FinishStackWalk();
374 return false; // End stack walk.
375 } else {
376 // Check if a shadow frame already exists for debugger's set-local-value purpose.
377 const size_t frame_id = GetFrameId();
378 ShadowFrame* new_frame = GetThread()->FindDebuggerShadowFrame(frame_id);
379 const bool* updated_vregs;
380 CodeItemDataAccessor accessor(method->DexInstructionData());
381 const size_t num_regs = accessor.RegistersSize();
382 if (new_frame == nullptr) {
383 new_frame = ShadowFrame::CreateDeoptimizedFrame(num_regs, nullptr, method, GetDexPc());
384 updated_vregs = nullptr;
385 } else {
386 updated_vregs = GetThread()->GetUpdatedVRegFlags(frame_id);
387 DCHECK(updated_vregs != nullptr);
388 }
389 if (GetCurrentOatQuickMethodHeader()->IsNterpMethodHeader()) {
390 HandleNterpDeoptimization(method, new_frame, updated_vregs);
391 } else {
392 HandleOptimizingDeoptimization(method, new_frame, updated_vregs);
393 }
394 if (updated_vregs != nullptr) {
395 // Calling Thread::RemoveDebuggerShadowFrameMapping will also delete the updated_vregs
396 // array so this must come after we processed the frame.
397 GetThread()->RemoveDebuggerShadowFrameMapping(frame_id);
398 DCHECK(GetThread()->FindDebuggerShadowFrame(frame_id) == nullptr);
399 }
400 if (prev_shadow_frame_ != nullptr) {
401 prev_shadow_frame_->SetLink(new_frame);
402 } else {
403 // Will be popped after the long jump after DeoptimizeStack(),
404 // right before interpreter::EnterInterpreterFromDeoptimize().
405 stacked_shadow_frame_pushed_ = true;
406 GetThread()->PushStackedShadowFrame(
407 new_frame, StackedShadowFrameType::kDeoptimizationShadowFrame);
408 }
409 prev_shadow_frame_ = new_frame;
410
411 if (single_frame_deopt_ && !IsInInlinedFrame()) {
412 // Single-frame deopt ends at the first non-inlined frame and needs to store that method.
413 single_frame_done_ = true;
414 single_frame_deopt_method_ = method;
415 single_frame_deopt_quick_method_header_ = GetCurrentOatQuickMethodHeader();
416 }
417 callee_method_ = method;
418 return true;
419 }
420 }
421
422 private:
HandleNterpDeoptimization(ArtMethod * m,ShadowFrame * new_frame,const bool * updated_vregs)423 void HandleNterpDeoptimization(ArtMethod* m,
424 ShadowFrame* new_frame,
425 const bool* updated_vregs)
426 REQUIRES_SHARED(Locks::mutator_lock_) {
427 ArtMethod** cur_quick_frame = GetCurrentQuickFrame();
428 StackReference<mirror::Object>* vreg_ref_base =
429 reinterpret_cast<StackReference<mirror::Object>*>(NterpGetReferenceArray(cur_quick_frame));
430 int32_t* vreg_int_base =
431 reinterpret_cast<int32_t*>(NterpGetRegistersArray(cur_quick_frame));
432 CodeItemDataAccessor accessor(m->DexInstructionData());
433 const uint16_t num_regs = accessor.RegistersSize();
434 // An nterp frame has two arrays: a dex register array and a reference array
435 // that shadows the dex register array but only containing references
436 // (non-reference dex registers have nulls). See nterp_helpers.cc.
437 for (size_t reg = 0; reg < num_regs; ++reg) {
438 if (updated_vregs != nullptr && updated_vregs[reg]) {
439 // Keep the value set by debugger.
440 continue;
441 }
442 StackReference<mirror::Object>* ref_addr = vreg_ref_base + reg;
443 mirror::Object* ref = ref_addr->AsMirrorPtr();
444 if (ref != nullptr) {
445 new_frame->SetVRegReference(reg, ref);
446 } else {
447 new_frame->SetVReg(reg, vreg_int_base[reg]);
448 }
449 }
450 }
451
HandleOptimizingDeoptimization(ArtMethod * m,ShadowFrame * new_frame,const bool * updated_vregs)452 void HandleOptimizingDeoptimization(ArtMethod* m,
453 ShadowFrame* new_frame,
454 const bool* updated_vregs)
455 REQUIRES_SHARED(Locks::mutator_lock_) {
456 const OatQuickMethodHeader* method_header = GetCurrentOatQuickMethodHeader();
457 CodeInfo code_info(method_header);
458 uintptr_t native_pc_offset = method_header->NativeQuickPcOffset(GetCurrentQuickFramePc());
459 StackMap stack_map = code_info.GetStackMapForNativePcOffset(native_pc_offset);
460 CodeItemDataAccessor accessor(m->DexInstructionData());
461 const size_t number_of_vregs = accessor.RegistersSize();
462 uint32_t register_mask = code_info.GetRegisterMaskOf(stack_map);
463 BitMemoryRegion stack_mask = code_info.GetStackMaskOf(stack_map);
464 DexRegisterMap vreg_map = IsInInlinedFrame()
465 ? code_info.GetInlineDexRegisterMapOf(stack_map, GetCurrentInlinedFrame())
466 : code_info.GetDexRegisterMapOf(stack_map);
467
468 DCHECK_EQ(vreg_map.size(), number_of_vregs);
469 if (vreg_map.empty()) {
470 return;
471 }
472
473 for (uint16_t vreg = 0; vreg < number_of_vregs; ++vreg) {
474 if (updated_vregs != nullptr && updated_vregs[vreg]) {
475 // Keep the value set by debugger.
476 continue;
477 }
478
479 DexRegisterLocation::Kind location = vreg_map[vreg].GetKind();
480 static constexpr uint32_t kDeadValue = 0xEBADDE09;
481 uint32_t value = kDeadValue;
482 bool is_reference = false;
483
484 switch (location) {
485 case DexRegisterLocation::Kind::kInStack: {
486 const int32_t offset = vreg_map[vreg].GetStackOffsetInBytes();
487 const uint8_t* addr = reinterpret_cast<const uint8_t*>(GetCurrentQuickFrame()) + offset;
488 value = *reinterpret_cast<const uint32_t*>(addr);
489 uint32_t bit = (offset >> 2);
490 if (bit < stack_mask.size_in_bits() && stack_mask.LoadBit(bit)) {
491 is_reference = true;
492 }
493 break;
494 }
495 case DexRegisterLocation::Kind::kInRegister:
496 case DexRegisterLocation::Kind::kInRegisterHigh:
497 case DexRegisterLocation::Kind::kInFpuRegister:
498 case DexRegisterLocation::Kind::kInFpuRegisterHigh: {
499 uint32_t reg = vreg_map[vreg].GetMachineRegister();
500 bool result = GetRegisterIfAccessible(reg, ToVRegKind(location), &value);
501 CHECK(result);
502 if (location == DexRegisterLocation::Kind::kInRegister) {
503 if (((1u << reg) & register_mask) != 0) {
504 is_reference = true;
505 }
506 }
507 break;
508 }
509 case DexRegisterLocation::Kind::kConstant: {
510 value = vreg_map[vreg].GetConstant();
511 if (value == 0) {
512 // Make it a reference for extra safety.
513 is_reference = true;
514 }
515 break;
516 }
517 case DexRegisterLocation::Kind::kNone: {
518 break;
519 }
520 default: {
521 LOG(FATAL) << "Unexpected location kind " << vreg_map[vreg].GetKind();
522 UNREACHABLE();
523 }
524 }
525 if (is_reference) {
526 new_frame->SetVRegReference(vreg, reinterpret_cast<mirror::Object*>(value));
527 } else {
528 new_frame->SetVReg(vreg, value);
529 }
530 }
531 }
532
GetVRegKind(uint16_t reg,const std::vector<int32_t> & kinds)533 static VRegKind GetVRegKind(uint16_t reg, const std::vector<int32_t>& kinds) {
534 return static_cast<VRegKind>(kinds[reg * 2]);
535 }
536
537 QuickExceptionHandler* const exception_handler_;
538 ShadowFrame* prev_shadow_frame_;
539 bool stacked_shadow_frame_pushed_;
540 const bool single_frame_deopt_;
541 bool single_frame_done_;
542 ArtMethod* single_frame_deopt_method_;
543 const OatQuickMethodHeader* single_frame_deopt_quick_method_header_;
544 ArtMethod* callee_method_;
545
546 DISALLOW_COPY_AND_ASSIGN(DeoptimizeStackVisitor);
547 };
548
PrepareForLongJumpToInvokeStubOrInterpreterBridge()549 void QuickExceptionHandler::PrepareForLongJumpToInvokeStubOrInterpreterBridge() {
550 if (full_fragment_done_) {
551 // Restore deoptimization exception. When returning from the invoke stub,
552 // ArtMethod::Invoke() will see the special exception to know deoptimization
553 // is needed.
554 self_->SetException(Thread::GetDeoptimizationException());
555 } else {
556 // PC needs to be of the quick-to-interpreter bridge.
557 int32_t offset;
558 offset = GetThreadOffset<kRuntimePointerSize>(kQuickQuickToInterpreterBridge).Int32Value();
559 handler_quick_frame_pc_ = *reinterpret_cast<uintptr_t*>(
560 reinterpret_cast<uint8_t*>(self_) + offset);
561 }
562 }
563
DeoptimizeStack()564 void QuickExceptionHandler::DeoptimizeStack() {
565 DCHECK(is_deoptimization_);
566 if (kDebugExceptionDelivery) {
567 self_->DumpStack(LOG_STREAM(INFO) << "Deoptimizing: ");
568 }
569
570 DeoptimizeStackVisitor visitor(self_, context_, this, false);
571 visitor.WalkStack(true);
572 PrepareForLongJumpToInvokeStubOrInterpreterBridge();
573 }
574
DeoptimizeSingleFrame(DeoptimizationKind kind)575 void QuickExceptionHandler::DeoptimizeSingleFrame(DeoptimizationKind kind) {
576 DCHECK(is_deoptimization_);
577
578 DeoptimizeStackVisitor visitor(self_, context_, this, true);
579 visitor.WalkStack(true);
580
581 // Compiled code made an explicit deoptimization.
582 ArtMethod* deopt_method = visitor.GetSingleFrameDeoptMethod();
583 SCOPED_TRACE << "Deoptimizing "
584 << deopt_method->PrettyMethod()
585 << ": " << GetDeoptimizationKindName(kind);
586
587 DCHECK(deopt_method != nullptr);
588 if (VLOG_IS_ON(deopt) || kDebugExceptionDelivery) {
589 LOG(INFO) << "Single-frame deopting: "
590 << deopt_method->PrettyMethod()
591 << " due to "
592 << GetDeoptimizationKindName(kind);
593 DumpFramesWithType(self_, /* details= */ true);
594 }
595 if (Runtime::Current()->UseJitCompilation()) {
596 Runtime::Current()->GetJit()->GetCodeCache()->InvalidateCompiledCodeFor(
597 deopt_method, visitor.GetSingleFrameDeoptQuickMethodHeader());
598 } else {
599 // Transfer the code to interpreter.
600 Runtime::Current()->GetInstrumentation()->UpdateMethodsCode(
601 deopt_method, GetQuickToInterpreterBridge());
602 }
603
604 PrepareForLongJumpToInvokeStubOrInterpreterBridge();
605 }
606
DeoptimizePartialFragmentFixup(uintptr_t return_pc)607 void QuickExceptionHandler::DeoptimizePartialFragmentFixup(uintptr_t return_pc) {
608 // At this point, the instrumentation stack has been updated. We need to install
609 // the real return pc on stack, in case instrumentation stub is stored there,
610 // so that the interpreter bridge code can return to the right place.
611 if (return_pc != 0) {
612 uintptr_t* pc_addr = reinterpret_cast<uintptr_t*>(handler_quick_frame_);
613 CHECK(pc_addr != nullptr);
614 pc_addr--;
615 *reinterpret_cast<uintptr_t*>(pc_addr) = return_pc;
616 }
617
618 // Architecture-dependent work. This is to get the LR right for x86 and x86-64.
619 if (kRuntimeISA == InstructionSet::kX86 || kRuntimeISA == InstructionSet::kX86_64) {
620 // On x86, the return address is on the stack, so just reuse it. Otherwise we would have to
621 // change how longjump works.
622 handler_quick_frame_ = reinterpret_cast<ArtMethod**>(
623 reinterpret_cast<uintptr_t>(handler_quick_frame_) - sizeof(void*));
624 }
625 }
626
UpdateInstrumentationStack()627 uintptr_t QuickExceptionHandler::UpdateInstrumentationStack() {
628 DCHECK(is_deoptimization_) << "Non-deoptimization handlers should use FindCatch";
629 uintptr_t return_pc = 0;
630 if (method_tracing_active_) {
631 instrumentation::Instrumentation* instrumentation = Runtime::Current()->GetInstrumentation();
632 return_pc = instrumentation->PopFramesForDeoptimization(
633 self_, reinterpret_cast<uintptr_t>(handler_quick_frame_));
634 }
635 return return_pc;
636 }
637
DoLongJump(bool smash_caller_saves)638 void QuickExceptionHandler::DoLongJump(bool smash_caller_saves) {
639 // Place context back on thread so it will be available when we continue.
640 self_->ReleaseLongJumpContext(context_);
641 context_->SetSP(reinterpret_cast<uintptr_t>(handler_quick_frame_));
642 CHECK_NE(handler_quick_frame_pc_, 0u);
643 context_->SetPC(handler_quick_frame_pc_);
644 context_->SetArg0(handler_quick_arg0_);
645 if (smash_caller_saves) {
646 context_->SmashCallerSaves();
647 }
648 if (!is_deoptimization_ &&
649 handler_method_header_ != nullptr &&
650 handler_method_header_->IsNterpMethodHeader()) {
651 context_->SetNterpDexPC(reinterpret_cast<uintptr_t>(
652 GetHandlerMethod()->DexInstructions().Insns() + handler_dex_pc_));
653 }
654 context_->DoLongJump();
655 UNREACHABLE();
656 }
657
DumpFramesWithType(Thread * self,bool details)658 void QuickExceptionHandler::DumpFramesWithType(Thread* self, bool details) {
659 StackVisitor::WalkStack(
660 [&](const art::StackVisitor* stack_visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
661 ArtMethod* method = stack_visitor->GetMethod();
662 if (details) {
663 LOG(INFO) << "|> pc = " << std::hex << stack_visitor->GetCurrentQuickFramePc();
664 LOG(INFO) << "|> addr = " << std::hex
665 << reinterpret_cast<uintptr_t>(stack_visitor->GetCurrentQuickFrame());
666 if (stack_visitor->GetCurrentQuickFrame() != nullptr && method != nullptr) {
667 LOG(INFO) << "|> ret = " << std::hex << stack_visitor->GetReturnPc();
668 }
669 }
670 if (method == nullptr) {
671 // Transition, do go on, we want to unwind over bridges, all the way.
672 if (details) {
673 LOG(INFO) << "N <transition>";
674 }
675 return true;
676 } else if (method->IsRuntimeMethod()) {
677 if (details) {
678 LOG(INFO) << "R " << method->PrettyMethod(true);
679 }
680 return true;
681 } else {
682 bool is_shadow = stack_visitor->GetCurrentShadowFrame() != nullptr;
683 LOG(INFO) << (is_shadow ? "S" : "Q")
684 << ((!is_shadow && stack_visitor->IsInInlinedFrame()) ? "i" : " ")
685 << " "
686 << method->PrettyMethod(true);
687 return true; // Go on.
688 }
689 },
690 self,
691 /* context= */ nullptr,
692 art::StackVisitor::StackWalkKind::kIncludeInlinedFrames);
693 }
694
695 } // namespace art
696